Skip to content

Commit

Permalink
Integrates sbt-org-policies plugin (#115)
Browse files Browse the repository at this point in the history
* Integrates sbt-org-policies plugin
* Bumps scala and plugin versions
* Removes unneeded scalaz library at examples module
* Releases new patch version
  • Loading branch information
juanpedromoreno authored Apr 19, 2017
1 parent 5e26f7c commit d4f389c
Show file tree
Hide file tree
Showing 30 changed files with 262 additions and 249 deletions.
22 changes: 21 additions & 1 deletion .scalafmt.conf
Original file line number Diff line number Diff line change
@@ -1,2 +1,22 @@
style = defaultWithAlign
maxColumn = 100
maxColumn = 100

continuationIndent.callSite = 2

newlines {
sometimesBeforeColonInMethodReturnType = false
}

align {
arrowEnumeratorGenerator = false
ifWhileOpenParen = false
openParenCallSite = false
openParenDefnSite = false
}

docstrings = JavaDoc

rewrite {
rules = [SortImports, RedundantBraces]
redundantBraces.maxLines = 1
}
11 changes: 5 additions & 6 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,21 +1,20 @@
language: scala
scala:
- 2.10.6
- 2.11.8
- 2.12.0
- 2.11.11
- 2.12.2
jdk:
- oraclejdk8
before_install:
- if [ "$TRAVIS_BRANCH" = "master" -a "$TRAVIS_PULL_REQUEST" = "false" ]; then openssl aes-256-cbc
-K $encrypted_37b99bd39158_key -iv $encrypted_37b99bd39158_iv
-in secring.gpg.enc -out secring.gpg -d; fi
script:
- sbt coverage 'fetchJVM/test' 'fetchJVM/coverageReport'
- sbt ++$TRAVIS_SCALA_VERSION 'tests/test'
- sbt ++$TRAVIS_SCALA_VERSION compile test
- sbt ++$TRAVIS_SCALA_VERSION coverage 'fetchJVM/test' 'fetchJVM/coverageReport'
- sbt ++$TRAVIS_SCALA_VERSION 'docs/tut'
- sbt ++$TRAVIS_SCALA_VERSION 'readme/tut'
- sbt 'examples/test'
after_success:
- bash <(curl -s https://codecov.io/bash) -t 47609994-e0cd-4f3b-a28d-eb558142c3bb
- if [ "$TRAVIS_BRANCH" = "master" -a "$TRAVIS_PULL_REQUEST" = "false" ]; then sbt ++$TRAVIS_SCALA_VERSION
publishSigned; fi
- sbt ++$TRAVIS_SCALA_VERSION orgAfterCISuccess
7 changes: 2 additions & 5 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
## Changelog
# Changelog

## Version 0.6.0

Date: 2017-03-17
## 2017-03-17 - Version 0.6.0

- Add `DataSource#batchingOnly` for batch-only data sources (thanks @aleczorab)
- Add `DataSource#batchExecution` for controlling how batches are executed


## Version 0.5.0

Date: 2017-01-26
Expand Down
10 changes: 5 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ Let's run it and wait for the fetch to complete:

```scala
fetchOne.runA[Id]
// [554] One ToString 1
// [132] One ToString 1
// res3: cats.Id[String] = 1
```

Expand All @@ -118,7 +118,7 @@ When executing the above fetch, note how the three identities get batched and th

```scala
fetchThree.runA[Id]
// [554] Many ToString NonEmptyList(1, 2, 3)
// [132] Many ToString NonEmptyList(3, 1, 2)
// res5: cats.Id[(String, String, String)] = (1,2,3)
```

Expand Down Expand Up @@ -159,8 +159,8 @@ Note how the two independent data fetches run in parallel, minimizing the latenc

```scala
fetchMulti.runA[Id]
// [554] One ToString 1
// [555] One Length one
// [133] One Length one
// [132] One ToString 1
// res7: cats.Id[(String, Int)] = (1,3)
```

Expand All @@ -179,6 +179,6 @@ While running it, notice that the data source is only queried once. The next tim

```scala
fetchTwice.runA[Id]
// [554] One ToString 1
// [132] One ToString 1
// res8: cats.Id[(String, String)] = (1,1)
```
186 changes: 27 additions & 159 deletions build.sbt
Original file line number Diff line number Diff line change
@@ -1,187 +1,55 @@
import de.heikoseeberger.sbtheader.AutomateHeaderPlugin
import de.heikoseeberger.sbtheader.license.Apache2_0
import catext.Dependencies._

val dev = Seq(Dev("47 Degrees (twitter: @47deg)", "47 Degrees"))
val gh = GitHubSettings("com.47deg", "fetch", "47 Degrees", apache)
pgpPassphrase := Some(getEnvVar("PGP_PASSPHRASE").getOrElse("").toCharArray)
pgpPublicRing := file(s"$gpgFolder/pubring.gpg")
pgpSecretRing := file(s"$gpgFolder/secring.gpg")

addCommandAlias("makeDocs", ";docs/makeMicrosite")

pgpPassphrase := Some(sys.env.getOrElse("PGP_PASSPHRASE", "").toCharArray)
pgpPublicRing := file(s"${sys.env.getOrElse("PGP_FOLDER", ".")}/pubring.gpg")
pgpSecretRing := file(s"${sys.env.getOrElse("PGP_FOLDER", ".")}/secring.gpg")

lazy val buildSettings = Seq(
organization := gh.org,
organizationName := gh.publishOrg,
description := "Simple & Efficient data access for Scala and Scala.js",
startYear := Option(2016),
homepage := Option(url("http://47deg.github.io/fetch/")),
organizationHomepage := Option(new URL("http://47deg.com")),
scalaVersion := "2.12.1",
crossScalaVersions := Seq("2.10.6", "2.11.8", "2.12.1"),
libraryDependencies ++= (scalaBinaryVersion.value match {
case "2.10" =>
compilerPlugin("org.scalamacros" % "paradise" % versions("paradise") cross CrossVersion.full) :: Nil
case _ =>
Nil
}),
headers := Map(
"scala" -> Apache2_0("2016", "47 Degrees, LLC. <http://www.47deg.com>")
)
)

lazy val commonSettings = Seq(
libraryDependencies ++= Seq(
"org.typelevel" %%% "cats-free" % versions("cats"),
"org.scalatest" %%% "scalatest" % versions("scalatest") % "test",
compilerPlugin(
"org.spire-math" %% "kind-projector" % versions("kind-projector")
)
),
scalacOptions ++= Seq(
"-unchecked",
"-deprecation",
"-feature",
"-Ywarn-dead-code",
"-language:higherKinds",
"-language:existentials",
"-language:postfixOps"
),
scalafmtConfig := Some(file(".scalafmt.conf"))
) ++ reformatOnCompileSettings

lazy val allSettings = buildSettings ++
commonSettings ++
sharedCommonSettings ++
miscSettings ++
sharedReleaseProcess ++
credentialSettings ++
sharedPublishSettings(gh, dev)
lazy val root = project.in(file("."))
.settings(name := "fetch")
.settings(moduleName := "root")
.aggregate(fetchJS, fetchJVM, fetchMonixJVM, fetchMonixJS, debugJVM, debugJS)

lazy val fetch = crossProject
.in(file("."))
.settings(moduleName := "fetch")
.settings(allSettings: _*)
lazy val fetch = crossProject.in(file("."))
.settings(name := "fetch")
.jsSettings(sharedJsSettings: _*)
.enablePlugins(AutomateHeaderPlugin)
.crossDepSettings(commonCrossDependencies: _*)

lazy val fetchJVM = fetch.jvm
lazy val fetchJS = fetch.js

lazy val root = project
.in(file("."))
.aggregate(fetchJS, fetchJVM, fetchMonixJVM, fetchMonixJS, debugJVM, debugJS)
.settings(allSettings)
.settings(noPublishSettings)

lazy val micrositeSettings = Seq(
micrositeName := "Fetch",
micrositeDescription := "Simple & Efficient data access for Scala and Scala.js",
micrositeBaseUrl := "fetch",
micrositeDocumentationUrl := "/fetch/docs.html",
micrositeGithubOwner := "47deg",
micrositeGithubRepo := "fetch",
micrositeHighlightTheme := "tomorrow",
micrositePalette := Map("brand-primary" -> "#FF518C",
"brand-secondary" -> "#2F2859",
"brand-tertiary" -> "#28224C",
"gray-dark" -> "#48474C",
"gray" -> "#8D8C92",
"gray-light" -> "#E3E2E3",
"gray-lighter" -> "#F4F3F9",
"white-color" -> "#FFFFFF"),
includeFilter in makeSite := "*.html" | "*.css" | "*.png" | "*.jpg" | "*.gif" | "*.js" | "*.swf" | "*.md"
)

lazy val docsSettings = buildSettings ++ micrositeSettings ++ Seq(
tutScalacOptions ~= (_.filterNot(Set("-Ywarn-unused-import", "-Ywarn-dead-code"))),
tutScalacOptions ++= (scalaBinaryVersion.value match {
case "2.10" => Seq("-Xdivergence211")
case _ => Nil
}),
aggregate in doc := true
)

lazy val docs = (project in file("docs"))
.dependsOn(fetchJVM, fetchMonixJVM, debugJVM)
.settings(
moduleName := "fetch-docs"
)
.settings(docsSettings: _*)
.settings(noPublishSettings)
.enablePlugins(MicrositesPlugin)

lazy val readmeSettings = buildSettings ++ tutSettings ++ Seq(
tutSourceDirectory := baseDirectory.value,
tutTargetDirectory := baseDirectory.value.getParentFile,
tutScalacOptions ~= (_.filterNot(Set("-Ywarn-unused-import", "-Ywarn-dead-code"))),
tutScalacOptions ++= (scalaBinaryVersion.value match {
case "2.10" => Seq("-Xdivergence211")
case _ => Nil
}),
tutNameFilter := """README.md""".r
)

lazy val readme = (project in file("tut"))
.settings(
moduleName := "fetch-readme"
)
.dependsOn(fetchJVM)
.settings(readmeSettings: _*)
.settings(noPublishSettings)

lazy val monixSettings = (
libraryDependencies ++= Seq(
"io.monix" %%% "monix-eval" % versions("monix"),
"io.monix" %%% "monix-cats" % versions("monix")
)
)

lazy val monix = crossProject
.in(file("monix"))
lazy val monix = crossProject.in(file("monix"))
.dependsOn(fetch)
.settings(moduleName := "fetch-monix")
.settings(allSettings: _*)
.settings(name := "fetch-monix")
.jsSettings(sharedJsSettings: _*)
.settings(monixSettings: _*)
.enablePlugins(AutomateHeaderPlugin)
.crossDepSettings(commonCrossDependencies ++ monixCrossDependencies: _*)

lazy val fetchMonixJVM = monix.jvm
lazy val fetchMonixJS = monix.js

lazy val debug = (crossProject in file("debug"))
.settings(
moduleName := "fetch-debug"
)
.settings(name := "fetch-debug")
.dependsOn(fetch)
.settings(allSettings: _*)
.jsSettings(sharedJsSettings: _*)
.enablePlugins(AutomateHeaderPlugin)
.crossDepSettings(commonCrossDependencies: _*)

lazy val debugJVM = debug.jvm
lazy val debugJS = debug.js

lazy val examplesSettings = Seq(
scalaVersion := "2.12.1",
libraryDependencies ++= Seq(
"org.tpolecat" %% "doobie-core-cats" % versions("doobie"),
"org.tpolecat" %% "doobie-h2-cats" % versions("doobie"),
"org.http4s" %% "http4s-blaze-client" % versions("http4s"),
"org.http4s" %% "http4s-circe" % versions("http4s"),
"io.circe" %% "circe-generic" % versions("circe")
)
)

lazy val examples = (project in file("examples"))
.settings(moduleName := "fetch-examples")
.settings(name := "fetch-examples")
.dependsOn(fetchJVM)
.settings(commonSettings: _*)
.settings(noPublishSettings: _*)
.settings(examplesSettings: _*)

lazy val tests = (project in file("."))
.aggregate(fetchJVM, fetchMonixJVM, debugJVM)
.settings(buildSettings)
.settings(commonSettings)
lazy val docs = (project in file("docs"))
.dependsOn(fetchJVM, fetchMonixJVM, debugJVM)
.settings(name := "fetch-docs")
.settings(docsSettings: _*)
.settings(noPublishSettings)
.enablePlugins(MicrositesPlugin)

lazy val readme = (project in file("tut"))
.settings(name := "fetch-readme")
.dependsOn(fetchJVM)
.settings(readmeSettings: _*)
.settings(noPublishSettings)
2 changes: 1 addition & 1 deletion debug/shared/src/main/scala/debug.scala
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2016 47 Degrees, LLC. <http://www.47deg.com>
* Copyright 2016-2017 47 Degrees, LLC. <http://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down
2 changes: 1 addition & 1 deletion debug/shared/src/main/scala/document.scala
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2016 47 Degrees, LLC. <http://www.47deg.com>
* Copyright 2016-2017 47 Degrees, LLC. <http://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down
25 changes: 22 additions & 3 deletions examples/src/test/scala/DoobieExample.scala
Original file line number Diff line number Diff line change
@@ -1,3 +1,19 @@
/*
* Copyright 2016-2017 47 Degrees, LLC. <http://www.47deg.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

import cats.data.NonEmptyList
import cats.instances.list._
import cats.syntax.cartesian._
Expand Down Expand Up @@ -54,9 +70,12 @@ class DoobieExample extends AsyncWordSpec with Matchers {
}
override def fetchMany(ids: NonEmptyList[AuthorId]): Query[Map[AuthorId, Author]] =
Query.async { (ok, fail) =>
fetchByIds(ids).map { authors =>
authors.map(a => AuthorId(a.id) -> a).toMap
}.transact(xa).unsafeRunAsync(_.fold(fail, ok))
fetchByIds(ids)
.map { authors =>
authors.map(a => AuthorId(a.id) -> a).toMap
}
.transact(xa)
.unsafeRunAsync(_.fold(fail, ok))
}

def fetchById(id: AuthorId): ConnectionIO[Option[Author]] =
Expand Down
Loading

0 comments on commit d4f389c

Please sign in to comment.