diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6f9bbb3ea..0dbe25e83 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -24,6 +24,23 @@ jobs: - name: Check code and docs formatting run: scalafmt --check + check-snippets: + + runs-on: ubuntu-latest + if: github.event_name == 'push' || github.event.action != 'labeled' # run for 'opened', 'reopened' and 'synchronize' + + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - uses: extractions/setup-just@v2 + - uses: coursier/setup-action@v1.3.0 + with: + apps: scala-cli sbt + - name: Run snippets from documentation + working-directory: docs + run: just test-snippets + build: runs-on: ubuntu-latest diff --git a/.scalafmt.conf b/.scalafmt.conf index 58d04cbb4..e685e5f37 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -6,6 +6,7 @@ runner.dialect = Scala213Source3 fileOverride { "glob:**/src/main/scala-3/**" { runner.dialect = scala3 } "glob:**/src/test/scala-3/**" { runner.dialect = scala3 } + "glob:**/scripts/**" { runner.dialect = scala3 } // for we we have to: // - replace ```scala with ```scala mdoc (MkDocs does NOT support these suffixes) // - run scalafmt --check docs/docs diff --git a/build.sbt b/build.sbt index 65ef8c109..1675aa754 100644 --- a/build.sbt +++ b/build.sbt @@ -297,6 +297,13 @@ val ciCommand = (platform: String, scalaSuffix: String) => { tasks.mkString(" ; ") } +val publishLocalForTests = { + for { + module <- Vector("chimneyMacroCommons", "chimney", "chimneyCats", "chimneyProtobufs", "chimneyJavaCollections") + moduleVersion <- Vector(module, module + "3") + } yield moduleVersion + "/publishLocal" +}.mkString(" ; ") + val releaseCommand = (tag: Seq[String]) => if (tag.nonEmpty) "publishSigned ; sonatypeBundleRelease" else "publishSigned" @@ -328,7 +335,7 @@ lazy val root = project | |When working with IntelliJ or Scala Metals, edit "val ideScala = ..." and "val idePlatform = ..." within "val versions" in build.sbt to control which Scala version you're currently working with. | - |If you need to test library locally in a different project, use publishLocal: + |If you need to test library locally in a different project, use publish-local-for-tests or manually publishLocal: | - chimney-macro-commons (obligatory) | - chimney | - cats/java-collections/protobufs integration (optional) @@ -360,7 +367,13 @@ lazy val root = project .alias("ci-native-2_13"), sbtwelcome .UsefulTask(ciCommand("Native", "2_12"), "CI pipeline for Scala 2.12 on Scala Native") - .alias("ci-native-2_12") + .alias("ci-native-2_12"), + sbtwelcome + .UsefulTask( + publishLocalForTests, + "Publishes all Scala 2.13 and Scala 3 JVM artifacts to test snippets in documentation" + ) + .alias("publish-local-for-tests") ) ) diff --git a/docs/Justfile b/docs/Justfile index c61024758..0fb0808ab 100644 --- a/docs/Justfile +++ b/docs/Justfile @@ -3,3 +3,7 @@ build: serve: build docker run --rm -it -p 8000:8000 -v ${PWD}:/docs --env "CI_LATEST_TAG=$(git describe --tags)" mkdocs-chimney-docs + +test-snippets: + cd .. && sbt publish-local-for-tests + cd .. && scala-cli run scripts/test-snippets.scala -- "$PWD/docs" "$(sbt -batch -error 'print chimney/version')" "" -1 -1 diff --git a/docs/docs/cookbook.md b/docs/docs/cookbook.md index eabc3b852..eed37ad8e 100644 --- a/docs/docs/cookbook.md +++ b/docs/docs/cookbook.md @@ -31,7 +31,7 @@ If we do not want to enable the same flag(s) in several places, we can define sh TransformerConfiguration.default.enableMethodAccessors.enableMacrosLogging transparent inline given PatcherConfiguration[?] = - PatcherConfiguration.ignoreNoneInPatch.enableMacrosLogging + PatcherConfiguration.default.ignoreNoneInPatch.enableMacrosLogging ``` !!! tip @@ -347,6 +347,7 @@ new extension methods: `asValidatedNec`, `asValidatedNel`, `asValidatedChain` an !!! example ```scala + //> using dep org.typelevel::cats-core::2.10.0 //> using dep io.scalaland::chimney-cats::{{ chimney_version() }} case class RegistrationForm( @@ -424,6 +425,7 @@ explanation: !!! example ```scala + //> using dep org.typelevel::cats-core::2.10.0 //> using dep io.scalaland::chimney-cats::{{ chimney_version() }} import cats.syntax.all._ import io.scalaland.chimney.Transformer @@ -442,6 +444,7 @@ Similarly, there exists instances for `PartialTransformer` and `partial.Result`: !!! example ```scala + //> using dep org.typelevel::cats-core::2.10.0 //> using dep io.scalaland::chimney-cats::{{ chimney_version() }} import cats.syntax.all._ import io.scalaland.chimney.PartialTransformer @@ -520,6 +523,7 @@ What does it means for us? but to NOT disable parallel semantics for some transformations when we would pass `failFast = false` later on ```scala + //> using dep org.typelevel::cats-core::2.10.0 //> using dep io.scalaland::chimney-cats::{{ chimney_version() }} import cats.syntax.all._ import io.scalaland.chimney.PartialTransformer @@ -541,6 +545,7 @@ What does it means for us? And `partial.Result`s have to use explicit combinators to decide whether it's sequential or parallel semantics: ```scala + //> using dep org.typelevel::cats-core::2.10.0 //> using dep io.scalaland::chimney-cats::{{ chimney_version() }} import cats.syntax.all._ import io.scalaland.chimney.partial @@ -593,6 +598,10 @@ The automatic conversion into a protobuf with such a field can be problematic: //> using dep io.scalaland::chimney::{{ chimney_version() }} import io.scalaland.chimney.dsl._ + object scalapb { + case class UnknownFieldSet() + } + object domain { case class Address(line1: String, line2: String) } @@ -600,7 +609,7 @@ The automatic conversion into a protobuf with such a field can be problematic: case class Address( line1: String = "", line2: String = "", - unknownFields: UnknownFieldSet = UnknownFieldSet() + unknownFields: scalapb.UnknownFieldSet = scalapb.UnknownFieldSet() ) } @@ -608,8 +617,7 @@ The automatic conversion into a protobuf with such a field can be problematic: // error: Chimney can't derive transformation from domain.Address to protobuf.Address // // protobuf.Address - // unknownFields: UnknownFieldSet - no accessor named unknownFields in source type domain.Address - // + // unknownFields: scalapb.UnknownFieldSet - no accessor named unknownFields in source type domain.Address // // Consult https://scalalandio.github.io/chimney for usage examples. ``` @@ -621,10 +629,11 @@ There are 2 ways in which Chimney could handle this issue: !!! example ```scala - //> using dep io.scalaland::chimney::{{ chimney_version() }} - import io.scalaland.chimney.dsl._ - - domain.Address("a", "b").into[protobuf.Address].enableDefaultValues.transform + domain + .Address("a", "b") + .into[protobuf.Address] + .enableDefaultValues + .transform ``` - manually [setting this one field](supported-transformations.md#wiring-constructors-parameter-to-raw-value)_ @@ -632,9 +641,6 @@ There are 2 ways in which Chimney could handle this issue: !!! example ```scala - //> using dep io.scalaland::chimney::{{ chimney_version() }} - import io.scalaland.chimney.dsl._ - domain .Address("a", "b") .into[protobuf.Address] @@ -1007,11 +1013,12 @@ If there is no common interface that could be summoned as implicit for performin !!! example Assuming Scala 3 or `-Xsource:3` for fixed `private` constructors so that `Username.apply` and `.copy` would - be private. + be private. (Newest versions of Scala 2.13 additionally require us to acknowledge this change in the behavior by + manually suppressing an error/warning). ```scala //> using scala {{ scala.2_13 }} - //> using options -Xsource:3 + //> using options -Xsource:3 -Wconf:cat=scala3-migration:s final case class Username private (value: String) object Username { def parse(value: String): Either[String, Username] = @@ -1035,7 +1042,17 @@ then Partial Transformer would have to be created manually: !!! example ```scala + //> using scala {{ scala.2_13 }} + //> using options -Xsource:3 -Wconf:cat=scala3-migration:s //> using dep io.scalaland::chimney::{{ chimney_version() }} + + final case class Username private (value: String) + object Username { + def parse(value: String): Either[String, Username] = + if (value.isEmpty) Left("Username cannot be empty") + else Right(Username(value)) + } + import io.scalaland.chimney.PartialTransformer import io.scalaland.chimney.partial @@ -1068,16 +1085,29 @@ we could use it to construct `PartialTransformer` automatically: !!! example ```scala + //> using scala {{ scala.2_13 }} + //> using options -Xsource:3 -Wconf:cat=scala3-migration:s //> using dep io.scalaland::chimney::{{ chimney_version() }} import io.scalaland.chimney.PartialTransformer import io.scalaland.chimney.partial + trait SmartConstructor[From, To] { + def parse(from: From): Either[String, To] + } + implicit def smartConstructedPartial[From, To](implicit smartConstructor: SmartConstructor[From, To] ): PartialTransformer[From, To] = PartialTransformer[From, To] { value => partial.Result.fromEitherString(smartConstructor.parse(value)) } + + final case class Username private (value: String) + object Username extends SmartConstructor[String, Username] { + def parse(value: String): Either[String, Username] = + if (value.isEmpty) Left("Username cannot be empty") + else Right(Username(value)) + } ``` The same would be true about extracting values from smart-constructed types @@ -1164,7 +1194,7 @@ We can use them to provide unwrapping `Transformer` and wrapping ): Transformer[Outer, Inner] = extractor.extract(_) implicit def wrapNewType[Inner, Outer](implicit - builder: HasBuilder.Aux[Inner, Outer] + builder: HasBuilder.Aux[Outer, Inner] ): PartialTransformer[Inner, Outer] = PartialTransformer[Inner, Outer] { value => partial.Result.fromEitherString( builder.build(value).left.map(_.toReadableString) @@ -1180,11 +1210,11 @@ popular constraints as long as we express them in the value's type. !!! example ```scala - //> using dep eu.timepit::refined::0.11.0 + //> using dep eu.timepit::refined::0.11.1 import eu.timepit.refined._ import eu.timepit.refined.api.Refined import eu.timepit.refined.auto._ - import eu.timepit.refined.collections._ + import eu.timepit.refined.collection._ type Username = String Refined NonEmpty ``` @@ -1194,8 +1224,9 @@ We can validate using the dedicated type class (`Validate`), while extraction is !!! example ```scala - //> using dep eu.timepit::refined::0.11.0 + //> using dep eu.timepit::refined::0.11.1 //> using dep io.scalaland::chimney::{{ chimney_version() }} + import eu.timepit.refined.refineV import eu.timepit.refined.api.{Refined, Validate} import io.scalaland.chimney.{PartialTransformer, Transformer} import io.scalaland.chimney.partial @@ -1207,9 +1238,7 @@ We can validate using the dedicated type class (`Validate`), while extraction is validate: Validate.Plain[Type, Refinement] ): PartialTransformer[Type, Type Refined Refinement] = PartialTransformer[Type, Type Refined Refinement] { value => - partial.Result.fromOption( - validate.validate(value).fold(Some(_), _ => None) - ) + partial.Result.fromEitherString(refineV[Refinement](value)) } ``` @@ -1307,12 +1336,13 @@ Most of the time a collection doesn't perform any sort of validations, and you c } object MyCollection { - def of[A](as: A*): MyCollection[A] = new MyCollection(Vector(as*)) + def of[A](as: A*): MyCollection[A] = new MyCollection(as.toVector) def from[A](vector: Vector[A]): MyCollection[A] = new MyCollection(vector) } // ...you can provide Chimney support for it... import io.scalaland.chimney.integrations.{ FactoryCompat, TotallyBuildIterable } + import scala.collection.compat._ import scala.collection.mutable implicit def myCollectionIsTotallyBuildIterable[A]: TotallyBuildIterable[MyCollection[A], A] = @@ -1340,8 +1370,8 @@ Most of the time a collection doesn't perform any sort of validations, and you c import io.scalaland.chimney.dsl._ // for converting to and from standard library collection (or any other type supported this way) - MyCollection("a", "b").transformInto[List[String]] // List("a", "b") - List("a", "b").transformInto[MyCollection[String]] // MyCollection("a", "b") + MyCollection.of("a", "b").transformInto[List[String]] // List("a", "b") + List("a", "b").transformInto[MyCollection[String]] // MyCollection.of("a", "b") case class Foo(value: String) case class Bar(value: String, another: Double) @@ -1350,7 +1380,7 @@ Most of the time a collection doesn't perform any sort of validations, and you c List(Foo("test")) .into[MyCollection[Bar]] .withFieldConst(_.everyItem.another, 3.14) - .transform // MyCollection(Bar("test", 3.14)) + .transform // MyCollection.of(Bar("test", 3.14)) ``` If your collection performs some sort of validation, you integrate it with Chimney as well: @@ -1373,7 +1403,7 @@ If your collection performs some sort of validation, you integrate it with Chimn } object NonEmptyCollection { - def of[A](a: A, as: A*): NonEmptyCollection[A] = new NonEmptyCollection(Vector((a +: as)*)) + def of[A](a: A, as: A*): NonEmptyCollection[A] = new NonEmptyCollection(a +: as.toVector) def from[A](vector: Vector[A]): Option[NonEmptyCollection[A]] = if (vector.nonEmpty) Some(new NonEmptyCollection(vector)) else None } @@ -1381,6 +1411,7 @@ If your collection performs some sort of validation, you integrate it with Chimn // ...you can provide Chimney support for it... import io.scalaland.chimney.integrations.{ FactoryCompat, PartiallyBuildIterable } import io.scalaland.chimney.partial + import scala.collection.compat._ import scala.collection.mutable implicit def nonEmptyCollectionIsPartiallyBuildIterable[A]: PartiallyBuildIterable[NonEmptyCollection[A], A] = @@ -1410,7 +1441,7 @@ If your collection performs some sort of validation, you integrate it with Chimn import io.scalaland.chimney.dsl._ // for validating that your collection can be created once all items have been put into Builder - List("a").transformIntoPartial[NonEmptyCollection[String]].asOption // Some(NonEmptyCollection("a")) + List("a").transformIntoPartial[NonEmptyCollection[String]].asOption // Some(NonEmptyCollection.of("a")) List.empty[String].transformIntoPartial[NonEmptyCollection[String]].asOption // None ``` @@ -1423,6 +1454,7 @@ For map types there are specialized versions of these type classes: import io.scalaland.chimney.integrations._ import io.scalaland.chimney.partial + import scala.collection.compat._ import scala.collection.mutable class MyMap[+K, +V] private (private val impl: Vector[(K, V)]) { @@ -1437,7 +1469,7 @@ For map types there are specialized versions of these type classes: } object MyMap { - def of[K, V](pairs: (K, V)*): MyMap[K, V] = new MyMap(Vector(pairs*)) + def of[K, V](pairs: (K, V)*): MyMap[K, V] = new MyMap(pairs.toVector) def from[K, V](vector: Vector[(K, V)]): MyMap[K, V] = new MyMap(vector) } @@ -1473,7 +1505,7 @@ For map types there are specialized versions of these type classes: } object NonEmptyMap { - def of[K, V](pair: (K, V), pairs: (K, V)*): NonEmptyMap[K, V] = new NonEmptyMap(Vector((pair +: pairs)*)) + def of[K, V](pair: (K, V), pairs: (K, V)*): NonEmptyMap[K, V] = new NonEmptyMap(pair +: pairs.toVector) def from[K, V](vector: Vector[(K, V)]): Option[NonEmptyMap[K, V]] = if (vector.nonEmpty) Some(new NonEmptyMap(vector)) else None } diff --git a/docs/docs/supported-patching.md b/docs/docs/supported-patching.md index 9b6addc4b..487f53b1b 100644 --- a/docs/docs/supported-patching.md +++ b/docs/docs/supported-patching.md @@ -50,9 +50,11 @@ When the patch `case class` contains a field that does not exist in patched obje val user = User(10, "abc@@domain.com", 1234567890L) user.patchUsing(UserUpdateForm("xyz@@domain.com", 123123123L, "some address")) - // Chimney can't derive patcher for User with patch type UserUpdateForm + // error: + // Chimney can't derive patching for User with patch type UserUpdateForm + // + // Field named 'address' not found in target patching type snippet.User! // - // Field named 'address' not found in target patching type User! // Consult https://chimney.readthedocs.io for usage examples. ``` @@ -109,6 +111,7 @@ If the flag was enabled in the implicit config it can be disabled with `.failRed .using(UserUpdateForm("xyz@@domain.com", 123123123L, "some address")) .failRedundantPatcherFields .patch + // error: // Chimney can't derive patcher for User with patch type UserUpdateForm // // Field named 'address' not found in target patching type User! diff --git a/docs/docs/supported-transformations.md b/docs/docs/supported-transformations.md index b2f01ab1a..7f00ded53 100644 --- a/docs/docs/supported-transformations.md +++ b/docs/docs/supported-transformations.md @@ -1563,6 +1563,7 @@ We are also able to compute values in nested structure: ```scala //> using dep io.scalaland::chimney::{{ chimney_version() }} import io.scalaland.chimney.dsl._ + import io.scalaland.chimney.partial case class Foo(a: String, b: Int) case class Bar(a: String, b: Int, c: Long) @@ -2109,7 +2110,8 @@ If the computation needs to allow failure, there is `.withSealedSubtypeHandledPa ```scala //> using scala {{ scala.3 }} //> using dep io.scalaland::chimney::{{ chimney_version() }} - import io.scalaland.chimney.dsl._ + import io.scalaland.chimney.dsl.* + import io.scalaland.chimney.partial enum Foo { case Baz(a: String) @@ -2496,10 +2498,10 @@ automatically only with `PartialTransformer`: ### Controlling automatic `Option` unwrapping -Automatic unwrapping of `Option`s by `PartialTransformer`s allows for seemless decoding of many PTO types into domain +Automatic unwrapping of `Option`s by `PartialTransformer`s allows for seamless decoding of many PTO types into domain types and provides a nice symmetry with encoding values using `Transformer`s (wrapping values with `Option`). -However, sometimes you might prefer to opt out of such benavior. You can disable it with a flag: +However, sometimes you might prefer to opt out of such behavior. You can disable it with a flag: !!! example @@ -2810,7 +2812,7 @@ Then Chimney will try to match the source type's getters against the method's pa def make(value: Int): Bar = Bar(value.toString) } - Foo(10).into[Bar].withConstructor(Bar.make).transform // Bar("10") + Foo(10).into[Bar].withConstructor(Bar.make _).transform // Bar("10") Foo(10) .into[Bar] @@ -2850,7 +2852,7 @@ constructor for `PartialTransformer`: Foo("10") .intoPartial[Bar] - .withConstructorPartial(smartConstructor) + .withConstructorPartial(smartConstructor _) .transform .asEither // Right(Bar(10)) @@ -2866,7 +2868,7 @@ constructor for `PartialTransformer`: Foo("10") .intoPartial[Bar] - .withConstructorEither(Bar.parse) + .withConstructorEither(Bar.parse _) .transform .asEither // Right(Bar(1000)) ``` @@ -3204,11 +3206,27 @@ The Chimney does not decide and in the presence of 2 implicits it will fail and // Transformer[java.lang.String, scala.Int]: stringToIntUnsafe // // Please eliminate ambiguity from implicit scope or use enableImplicitConflictResolution/withFieldComputed/withFieldComputedPartial to decide which one should be used - "aa" - .intoPartial[Int] - .enableImplicitConflictResolution(PreferTotalTransformer) - .transform // throws NumberFormatException: For input string: "aa" - "aa".intoPartial[Int].enableImplicitConflictResolution(PreferPartialTransformer).transform.asOption // None + ``` + + When we provide a way of resolving implicits, the error dissapears: + + ```scala + //> using dep io.scalaland::chimney::{{ chimney_version() }} + import io.scalaland.chimney.{partial, PartialTransformer, Transformer} + import io.scalaland.chimney.dsl._ + + implicit val stringToIntUnsafe: Transformer[String, Int] = _.toInt // throws!!! + implicit val stringToIntSafe: PartialTransformer[String, Int] = + PartialTransformer(str => partial.Result.fromCatching(str.toInt)) + + locally { + implicit val cfg = TransformerConfiguration.default.enableImplicitConflictResolution(PreferTotalTransformer) + "aa".transformIntoPartial[Int] // throws NumberFormatException: For input string: "aa" + } + locally { + implicit val cfg = TransformerConfiguration.default.enableImplicitConflictResolution(PreferPartialTransformer) + "aa".transformIntoPartial[Int] // None + } ``` ## Recursive transformation @@ -3241,7 +3259,7 @@ Since we are talking about recursion then there is one troublesome issue - recur case class Bar(a: Int, b: Option[Bar]) val foo = Foo(10, Some(Foo(20, None))) - val bar = ??? + // val bar = ??? // how to implement it? ``` We cannot derive an expression that would handle such data without any recursion (or other form of backtracking). @@ -3310,7 +3328,7 @@ However, these 3 does not exhaust all possible comparisons and you might need to This is an advanced feature! Due to macros' limitations this feature requires several conditions to work. -The challenge is that the function you'd like to provie has to be called within macro, so it has to be defined in such +The challenge is that the function you'd like to provide has to be called within macro, so it has to be defined in such a way that the macro will be able to access it. Normally, there is no way to inject a custom login into existing macro, but Chimney has a specific solution for this: @@ -3319,7 +3337,7 @@ but Chimney has a specific solution for this: - your have to define this `object` as top-level definition or within another object - object defined within a `class`, a `trait` or locally, does need some logic for instantiation - you have to define your `object` in a module/subproject that is compiled _before_ the module where you need to use - it, so that the bytecode would already be accesible on classpath. + it, so that the bytecode would already be accesible on the classpath. !!! example diff --git a/docs/docs/troubleshooting.md b/docs/docs/troubleshooting.md index 3ace4e282..7aebac28f 100644 --- a/docs/docs/troubleshooting.md +++ b/docs/docs/troubleshooting.md @@ -150,14 +150,18 @@ This option allowed calling `.get` on `Option` to enable conversion from `Option !!! example ```scala - //> using dep io.scalaland::chimney::{{ chimney_version() }} + //> using dep io.scalaland::chimney::0.7.5 import io.scalaland.chimney.dsl._ case class Foo(a: Option[String]) case class Bar(a: String) Foo(Some("value")).into[Bar].enableUnsafeOption.transform // Bar("value") - Foo(None).into[Bar].enableUnsafeOption.transform // throws Exception + try { + Foo(None).into[Bar].enableUnsafeOption.transform // throws Exception + } catch { + case e: Throwable => println(e) + } ``` Throwing exceptions made sense as a workaround in simpler times, when `Transformer`s were the only option. However, @@ -205,7 +209,8 @@ another implicit `Transformer`. ```scala //> using dep io.scalaland::chimney::{{ chimney_version() }} - import io.scalaland.chimney._ + import io.scalaland.chimney.dsl._ + import io.scalaland.chimney.Transformer class MyType[A](private val a: A) { def map[B](f: A => B): MyType[B] = @@ -226,7 +231,8 @@ there the automatic instances as well, they need to use `Transformer.AutoDerived ```scala //> using dep io.scalaland::chimney::{{ chimney_version() }} - import io.scalaland.chimney._ + import io.scalaland.chimney.dsl._ + import io.scalaland.chimney.Transformer class MyOtherType[A](private val a: A) { def map[B](f: A => B): MyOtherType[B] = @@ -247,6 +253,27 @@ The difference is shown in this example: ```scala //> using dep io.scalaland::chimney::{{ chimney_version() }} import io.scalaland.chimney.dsl._ + import io.scalaland.chimney.Transformer + + class MyType[A](private val a: A) { + def map[B](f: A => B): MyType[B] = + new MyType(f(a)) + } + + implicit def provideMyType[A, B](implicit + a2b: Transformer[A, B] + ): Transformer[MyType[A], MyType[B]] = + myA => myA.map(_.transformInto[B]) + + class MyOtherType[A](private val a: A) { + def map[B](f: A => B): MyOtherType[B] = + new MyOtherType(f(a)) + } + + implicit def provideMyOtherType[A, B](implicit + a2b: Transformer.AutoDerived[A, B] + ): Transformer[MyOtherType[A], MyOtherType[B]] = + myA => myA.map(_.transformInto[B]) // implicit provided by the user implicit val int2str: Transformer[Int, String] = _.toString @@ -270,7 +297,7 @@ The difference is shown in this example: // myType2.transformInto[MyType[Either[String, String]]] // uses provideMyOtherType(Transformer.derive): - myOtherType2.transformInto[Either[String, String]] + myOtherType2.transformInto[MyOtherType[Either[String, String]]] ``` ### Default values no longer are used as fallback if the source field exists @@ -338,7 +365,7 @@ Here are some features it shares with Chimney (Automapper's code based on exampl !!! example "The simplest in-place mapping" ```scala - //> using scala {{ scala.213 }} + //> using scala {{ scala.2_13 }} //> using dep io.bfil::automapper::{{ libraries.scala_automapper }} case class SourceClass(label: String, value: Int) @@ -367,7 +394,7 @@ Here are some features it shares with Chimney (Automapper's code based on exampl !!! example "Defining transformation in one place as implicit" ```scala - //> using scala {{ scala.213 }} + //> using scala {{ scala.2_13 }} //> using dep io.bfil::automapper::{{ libraries.scala_automapper }} case class SourceClass(label: String, value: Int) @@ -417,7 +444,7 @@ Here are some features it shares with Chimney (Automapper's code based on exampl !!! example "Automapper's dynamic mappings" ```scala - //> using scala {{ scala.213 }} + //> using scala {{ scala.2_13 }} //> using dep io.bfil::automapper::{{ libraries.scala_automapper }} case class SourceClass(label: String, field: String, list: List[Int]) @@ -453,7 +480,7 @@ Here are some features it shares with Chimney (Automapper's code based on exampl val target = source.into[TargetClass] .withFieldRenamed(_.field, _.renamedField) // rename - .withFieldConst(_.total, sum(source.values)) // value provision + .withFieldConst(_.total, sum(values)) // value provision .transform // TargetClass("label", "field", 6) // alternatively we don't need intermediate `values` and `sum`: val target2 = source.into[TargetClass] @@ -465,7 +492,7 @@ Here are some features it shares with Chimney (Automapper's code based on exampl !!! example "Implicit conversion and polymorphic types" ```scala - //> using scala {{ scala.213 }} + //> using scala {{ scala.2_13 }} //> using dep io.bfil::automapper::{{ libraries.scala_automapper }} trait SourceTrait @@ -567,7 +594,7 @@ Here are some features it shares with Chimney (Henkan's code based on README): !!! example "Transform between case classes" ```scala - //> using scala {{ scala.213 }} + //> using scala {{ scala.2_13 }} //> using dep com.kailuowang::henkan-convert::{{ libraries.henkan }} import java.time.LocalDate @@ -608,7 +635,7 @@ Here are some features it shares with Chimney (Henkan's code based on README): !!! example "Transform between case classes with optional field" ```scala - //> using scala {{ scala.213 }} + //> using scala {{ scala.2_13 }} //> using dep com.kailuowang::henkan-optional::{{ libraries.henkan }} case class Message(a: Option[String], b: Option[Int]) @@ -630,6 +657,7 @@ Here are some features it shares with Chimney (Henkan's code based on README): type you want: ```scala + //> using dep org.typelevel::cats-core::2.10.0 //> using dep io.scalaland::chimney::{{ chimney_version() }} //> using dep io.scalaland::chimney-cats::{{ chimney_version() }} @@ -846,7 +874,7 @@ Here are some features it shares with Chimney (Ducktape's code based on GitHub P wirePerson .into[domain.Person] - .withConstructor(domain.Person) + .withConstructor(domain.Person.apply) .transform // Person( // firstName = "John", @@ -860,7 +888,7 @@ Here are some features it shares with Chimney (Ducktape's code based on GitHub P wirePerson .into[domain.Person] - .withConstructor(domain.Person) + .withConstructor(domain.Person.apply) .withFieldConst(_.paymentMethods.everyItem.matching[domain.PaymentMethod.PayPal].email, "overridden@email.com") .transform // Person = Person( @@ -920,13 +948,12 @@ Here are some features it shares with Chimney (Ducktape's code based on GitHub P wirePerson .into[domain.Person] .transform( - Field.const(_.age, 24), + Field.const(_.firstName, "Jane"), Case.const(_.paymentMethods.element.at[wire.PaymentMethod.Transfer], domain.PaymentMethod.Cash) ) // Person( - // firstName = "John", + // firstName = "Jane", // lastName = "Doe", - // age = 24, // paymentMethods = Vector( // Cash, // PayPal(email = "john@doe.com"), @@ -986,20 +1013,19 @@ Here are some features it shares with Chimney (Ducktape's code based on GitHub P // Case.const(_.paymentMethods.element.at[wire.PaymentMethod.Transfer], domain.PaymentMethod.Cash) // so this has to be handled "top level" by creating implicit/given. given Transformer[wire.PaymentMethod, domain.PaymentMethod] = Transformer - .derive[wire.PaymentMethod, domain.PaymentMethod] + .define[wire.PaymentMethod, domain.PaymentMethod] .withEnumCaseHandled[wire.PaymentMethod.Transfer](_ => domain.PaymentMethod.Cash) .buildTransformer wirePerson .into[domain.Person] - .withFieldConst(_.age, 24) + .withFieldConst(_.firstName, "Jane") // implicit instead of nested handling for withEnumCaseHandled .transform } // Person( - // firstName = "John", + // firstName = "Jane", // lastName = "Doe", - // age = 24, // paymentMethods = Vector( // Cash, // PayPal(email = "john@doe.com"), @@ -1077,7 +1103,7 @@ Here are some features it shares with Chimney (Ducktape's code based on GitHub P card .into[PaymentBand] - .transform(Field.fallBackToDefault) + .transform(Field.fallbackToDefault) // PaymentBand( // name = "J. Doe", // digits = 213712345L, @@ -1234,7 +1260,7 @@ Here are some features it shares with Chimney (Ducktape's code based on GitHub P transfer .into[domain.PaymentMethod] .transform( - Case.computed(_.at[wire.PaymentMethod.Transfer], transfer => domain.PaymentMethod.Card("J. Doe", transfer.accountNo.toLong)) + Case.computed(_.at[wire.PaymentMethod.Transfer], transfer => domain.PaymentMethod.Card(name = "J. Doe", digits = transfer.accountNo.toLong)) ) // PaymentMethod = Card(name = "J. Doe", digits = 2764262L) ``` @@ -1298,7 +1324,7 @@ Here are some features it shares with Chimney (Ducktape's code based on GitHub P transfer .into[domain.PaymentMethod] - .withEnumCaseHandled[wire.PaymentMethod.Transfer](transfer => domain.PaymentMethod.Card("J. Doe", transfer.accountNo.toLong)) + .withEnumCaseHandled[wire.PaymentMethod.Transfer](transfer => domain.PaymentMethod.Card(name = "J. Doe", digits = transfer.accountNo.toLong)) .transform // PaymentMethod = Card(name = "J. Doe", digits = 2764262L) ``` @@ -1346,14 +1372,14 @@ deciding between error accumulating and fail-fast in runtime. It provides utilit object domain: final case class Person( - firstName: NonEmptyString, - lastName: NonEmptyString, + firstName: newtypes.NonEmptyString, + lastName: newtypes.NonEmptyString, paymentMethods: Vector[domain.PaymentMethod] ) enum PaymentMethod: - case PayPal(email: NonEmptyString) - case Card(digits: Positive, name: NonEmptyString) + case PayPal(email: newtypes.NonEmptyString) + case Card(digits: newtypes.Positive, name: newtypes.NonEmptyString) case Cash val wirePerson = wire.Person( @@ -1370,11 +1396,11 @@ deciding between error accumulating and fail-fast in runtime. It provides utilit // expand the 'create' method into an instance of Transformer.Fallible // this is a key component in making those transformations automatic - given failFastNonEmptyString: Transformer.Fallible[[a] =>> Either[String, a], String, NonEmptyString] = - create + given failFastNonEmptyString: Transformer.Fallible[[a] =>> Either[String, a], String, newtypes.NonEmptyString] = + newtypes.NonEmptyString.create - given failFastPositive: Transformer.Fallible[[a] =>> Either[String, a], Long, Positive] = - create + given failFastPositive: Transformer.Fallible[[a] =>> Either[String, a], Long, newtypes.Positive] = + newtypes.Positive.create locally { given Mode.FailFast.Either[String] with {} @@ -1391,11 +1417,11 @@ deciding between error accumulating and fail-fast in runtime. It provides utilit } // also declare the same fallible transformer but make it ready for error accumulation - given accumulatingNonEmptyString: Transformer.Fallible[[a] =>> Either[List[String], a], String, NonEmptyString] = - create(_).left.map(_ :: Nil) + given accumulatingNonEmptyString: Transformer.Fallible[[a] =>> Either[List[String], a], String, newtypes.NonEmptyString] = + newtypes.NonEmptyString.create(_).left.map(_ :: Nil) - given accumulatingPositive: Transformer.Fallible[[a] =>> Either[List[String], a], Long, Positive] = - create(_).left.map(_ :: Nil) + given accumulatingPositive: Transformer.Fallible[[a] =>> Either[List[String], a], Long, newtypes.Positive] = + newtypes.Positive.create(_).left.map(_ :: Nil) locally { given Mode.Accumulating.Either[String, List] with {} @@ -1435,14 +1461,14 @@ deciding between error accumulating and fail-fast in runtime. It provides utilit object domain: final case class Person( - firstName: NonEmptyString, - lastName: NonEmptyString, + firstName: newtypes.NonEmptyString, + lastName: newtypes.NonEmptyString, paymentMethods: Vector[domain.PaymentMethod] ) enum PaymentMethod: - case PayPal(email: NonEmptyString) - case Card(digits: Positive, name: NonEmptyString) + case PayPal(email: newtypes.NonEmptyString) + case Card(digits: newtypes.Positive, name: newtypes.NonEmptyString) case Cash val wirePerson = wire.Person( @@ -1457,13 +1483,14 @@ deciding between error accumulating and fail-fast in runtime. It provides utilit import io.scalaland.chimney.dsl.* import io.scalaland.chimney.{partial, PartialTransformer} + // TODO: partial.syntax._ -> asResult - given PartialTransformer[String, newtypes.NonEmptyString]: PartialTransformer[String, newtypes.NonEmptyString](str => - partial.Result.fromEitherString(newtypes.NonEmptyString.create) + given PartialTransformer[String, newtypes.NonEmptyString] = PartialTransformer[String, newtypes.NonEmptyString](str => + partial.Result.fromEitherString(newtypes.NonEmptyString.create(str)) ) - given PartialTransformer[String, newtypes.Positive]: PartialTransformer[String, newtypes.Positive](str => - partial.Result.fromEitherString(newtypes.Positive.create) + given PartialTransformer[Long, newtypes.Positive] = PartialTransformer[Long, newtypes.Positive](str => + partial.Result.fromEitherString(newtypes.Positive.create(str)) ) wirePerson.transformIntoPartial[domain.Person].asEitherErrorPathMessageStrings @@ -1472,14 +1499,14 @@ deciding between error accumulating and fail-fast in runtime. It provides utilit wirePerson.intoPartial[domain.Person] .withFieldConstPartial( _.paymentMethods.everyItem.matching[domain.PaymentMethod.PayPal].email, - newtypes.NonEmptyString.create("overridden@email.com") + partial.Result.fromEitherString(newtypes.NonEmptyString.create("overridden@email.com")) ) .transform .asEitherErrorPathMessageStrings wirePerson.intoPartial[domain.Person] .withFieldConstPartial( _.paymentMethods.everyItem.matching[domain.PaymentMethod.PayPal].email, - newtypes.NonEmptyString.create("overridden@email.com") + partial.Result.fromEitherString(newtypes.NonEmptyString.create("overridden@email.com")) ) .transformFailFast .asEitherErrorPathMessageStrings diff --git a/docs/main.py b/docs/main.py index cb65a6446..15d1819d6 100644 --- a/docs/main.py +++ b/docs/main.py @@ -30,7 +30,7 @@ def define_env(env): """ If git describe tells us that this is NOT a git tag but git tag + some offset, we need to add -SNAPSHOT to match sbt """ - if re.compile('.+-[0-9]+-[0-9a-z]{8}').match(chimney_version_string): + if re.compile('.+-[0-9]+-g[0-9a-z]{8}').match(chimney_version_string): chimney_version_string = chimney_version_string + '-SNAPSHOT' @env.macro diff --git a/scripts/test-snippets.scala b/scripts/test-snippets.scala new file mode 100644 index 000000000..8f4e34974 --- /dev/null +++ b/scripts/test-snippets.scala @@ -0,0 +1,379 @@ +//> using scala 3.3.3 +//> using dep org.virtuslab::scala-yaml:0.0.8 + +import java.io.File +import java.nio.file.{Files, Paths} +import scala.Console.{MAGENTA, RESET} +import scala.collection.immutable.ListMap +import scala.util.chaining.* +import scala.util.matching.Regex +import scala.util.{Try, Using} +import scala.sys.process.* + +// config + +case class Config(extra: Map[String, String]) +object Config { + + def parse(cfgFile: File): Either[String, Config] = { + import org.virtuslab.yaml.* + def decode(any: Any): Map[String, String] = any match { + case map: Map[?, ?] => + map.flatMap { + case (k, v: Map[?, ?]) => decode(v).map { case (k2, v2) => s"$k.$k2" -> v2 } + case (k, v: List[?]) => decode(v).map { case (k2, v2) => s"$k.$k2" -> v2 } + case (k, v) => Map(k.toString -> v.toString) + }.toMap + case list: List[?] => + list.zipWithIndex.flatMap { + case (i: Map[?, ?], idx) => decode(i).map { case (k, v) => s"[$idx].$k" -> v } + case (i: List[?], idx) => decode(i).map { case (k, v) => s"[$idx].$k" -> v } + case (i, idx) => Map(s"[$idx]" -> i.toString) + }.toMap + case _ => throw new IllegalArgumentException(s"$any is not an expected YAML") + } + for { + cfgStr <- Using(io.Source.fromFile(cfgFile))(_.getLines().toList.mkString("\n")).toEither.left + .map(_.getMessage()) + cfgRaw <- cfgStr.as[Any].left.map(_.toString) + extra <- Try(decode(cfgRaw.asInstanceOf[Map[Any, Any]].apply("extra"))).toEither.left.map(_.getMessage) + } yield Config(extra) + } +} + +enum SpecialHandling: + case NotExample(reason: String) + case NeedManual(reason: String) + case TestErrors + +val specialHandling: ListMap[String, SpecialHandling] = ListMap( + "cookbook_Reusing-flags-for-several-transformationspatchings_3" -> SpecialHandling.NotExample("pseudocode"), + "cookbook_Automatic-vs-semiautomatic_1" -> SpecialHandling.NotExample("pseudocode"), + "cookbook_Automatic-vs-semiautomatic_2" -> SpecialHandling.NotExample("pseudocode"), + "cookbook_Automatic-vs-semiautomatic_3" -> SpecialHandling.NotExample("pseudocode"), + "cookbook_Automatic-vs-semiautomatic_4" -> SpecialHandling.NotExample("pseudocode"), + "cookbook_Automatic-vs-semiautomatic_5" -> SpecialHandling.NotExample("pseudocode"), + "cookbook_Performance-concerns_2" -> SpecialHandling.NotExample("example of code generated by macro"), + "cookbook_Performance-concerns_3" -> SpecialHandling.NotExample("example of code generated by macro"), + "cookbook_UnknownFieldSet_1" -> SpecialHandling.TestErrors, + "cookbook_UnknownFieldSet_2" -> SpecialHandling.NeedManual("continuation from cookbook_UnknownFieldSet_1"), + "cookbook_UnknownFieldSet_3" -> SpecialHandling.NeedManual("continuation from cookbook_UnknownFieldSet_1"), + "cookbook_oneof-fields_1" -> SpecialHandling.NeedManual("depends on code generated by codegen"), + "cookbook_oneof-fields_2" -> SpecialHandling.NeedManual("depends on code generated by codegen"), + "cookbook_oneof-fields_3" -> SpecialHandling.NeedManual("depends on code generated by codegen"), + "cookbook_oneof-fields_4" -> SpecialHandling.NeedManual("depends on code generated by codegen"), + "cookbook_oneof-fields_5" -> SpecialHandling.NeedManual("depends on code generated by codegen"), + "cookbook_sealed_value-oneof-fields_1" -> SpecialHandling.NeedManual("depends on code generated by codegen"), + "cookbook_sealed_value-oneof-fields_2" -> SpecialHandling.NeedManual("depends on code generated by codegen"), + "cookbook_sealed_value-oneof-fields_3" -> SpecialHandling.NeedManual("depends on code generated by codegen"), + "cookbook_sealed_value_optional-oneof-fields_1" -> SpecialHandling.NeedManual("depends on code generated by codegen"), + "cookbook_sealed_value_optional-oneof-fields_2" -> SpecialHandling.NeedManual("depends on code generated by codegen"), + "cookbook_Libraries-with-smart-constructors_5" -> SpecialHandling.NotExample("pseudocode"), + "index__2" -> SpecialHandling.NeedManual("landing page"), + "index__3" -> SpecialHandling.NeedManual("landing page"), + "index__4" -> SpecialHandling.NeedManual("landing page"), + "index__5" -> SpecialHandling.NeedManual("landing page"), + "index__6" -> SpecialHandling.NeedManual("landing page"), + "quickstart_Quick-Start_1" -> SpecialHandling.NotExample("sbt example"), + "quickstart_Quick-Start_2" -> SpecialHandling.NotExample("sbt example"), + "quickstart_Java-collections-integration_1" -> SpecialHandling.NotExample("sbt example"), + "quickstart_Java-collections-integration_2" -> SpecialHandling.NotExample("sbt example"), + "quickstart_Cats-integration_1" -> SpecialHandling.NotExample("sbt example"), + "quickstart_Cats-integration_2" -> SpecialHandling.NotExample("sbt example"), + "quickstart_Protocol-Buffers-integration_1" -> SpecialHandling.NotExample("sbt example"), + "quickstart_Protocol-Buffers-integration_2" -> SpecialHandling.NotExample("sbt example"), + "quickstart_Protocol-Buffers-integration_1" -> SpecialHandling.NotExample("sbt example"), + "quickstart_Protocol-Buffers-integration_2" -> SpecialHandling.NotExample("sbt example"), + "supported-patching_Ignoring-fields-in-patches_1" -> SpecialHandling.TestErrors, + "supported-patching_Ignoring-fields-in-patches_3" -> SpecialHandling.TestErrors, + "supported-transformations_Reading-from-methods_2" -> SpecialHandling.TestErrors, + "supported-transformations_Reading-from-inherited-valuesmethods_2" -> SpecialHandling.TestErrors, + "supported-transformations_Reading-from-Bean-getters_2" -> SpecialHandling.TestErrors, + "supported-transformations_Writing-to-Bean-setters_2" -> SpecialHandling.TestErrors, + "supported-transformations_Ignoring-unmatched-Bean-setters_2" -> SpecialHandling.TestErrors, + "supported-transformations_Allowing-fallback-to-the-constructors-default-values_2" -> SpecialHandling.TestErrors, + "supported-transformations_Allowing-fallback-to-None-as-the-constructors-argument_3" -> SpecialHandling.TestErrors, + "supported-transformations_Customizing-field-name-matching_2" -> SpecialHandling.TestErrors, + "supported-transformations_Frominto-an-AnyVal_2" -> SpecialHandling.TestErrors, + "supported-transformations_Between-sealedenums_2" -> SpecialHandling.NeedManual( + "snippet fails!!! investigate later" + ), // FIXME + "supported-transformations_Between-sealedenums_3" -> SpecialHandling.NeedManual( + "snippet throws exception!!! investigate later" + ), // FIXME + "supported-transformations_Between-sealedenums_4" -> SpecialHandling.NeedManual( + "snippet throws exception!!! investigate later" + ), // FIXME + "supported-transformations_Javas-enums_1" -> SpecialHandling.NeedManual("requires previous snipper with Java code"), + "supported-transformations_Javas-enums_2" -> SpecialHandling.NeedManual("requires previous snipper with Java code"), + "supported-transformations_Handling-a-specific-sealed-subtype-with-a-computed-value_3" -> SpecialHandling.NeedManual( + "snippet throws exception!!! investigate later" + ), // FIXME + "supported-transformations_Handling-a-specific-sealed-subtype-with-a-computed-value_4" -> SpecialHandling.NeedManual( + "requires previous snipper with Java code" + ), + "supported-transformations_Handling-a-specific-sealed-subtype-with-a-computed-value_5" -> SpecialHandling.NeedManual( + "requires previous snipper with Java code" + ), + "supported-transformations_Handling-a-specific-sealed-subtype-with-a-computed-value_6" -> SpecialHandling.NeedManual( + "requires previous snipper with Java code" + ), + "supported-transformations_Customizing-subtype-name-matching_3" -> SpecialHandling.TestErrors, + "supported-transformations_Controlling-automatic-Option-unwrapping_1" -> SpecialHandling.TestErrors, + "supported-transformations_Types-with-manually-provided-constructors_3" -> SpecialHandling.NeedManual( + "example split into multiple files" + ), + "supported-transformations_Types-with-manually-provided-constructors_4" -> SpecialHandling.NeedManual( + "contunuation from the previous snippet" + ), + "supported-transformations_Types-with-manually-provided-constructors_5" -> SpecialHandling.NeedManual( + "example split into multiple files" + ), + "supported-transformations_Types-with-manually-provided-constructors_6" -> SpecialHandling.NeedManual( + "contunuation from the previous snippet" + ), + "supported-transformations_Resolving-priority-of-implicit-Total-vs-Partial-Transformers_1" -> SpecialHandling.TestErrors, + "supported-transformations_Defining-custom-name-matching-predicate_1" -> SpecialHandling.NeedManual( + "example split into multiple files" + ), + "supported-transformations_Defining-custom-name-matching-predicate_2" -> SpecialHandling.NeedManual( + "contunuation from the previous snippet" + ), + "troubleshooting_Replacing-Lifted-Transformers-TransformerF-with-PartialTransformers_1" -> SpecialHandling.NotExample( + "pseudocode" + ), + "troubleshooting_Explicit-enabling-of-default-values_1" -> SpecialHandling.NotExample("pseudocode"), + "troubleshooting_Ducktape_2" -> SpecialHandling.NeedManual("snippet throws exception!!! investigate later"), // FIXME + "troubleshooting_Ducktape_4" -> SpecialHandling.NeedManual("snippet throws exception!!! investigate later"), // FIXME + "troubleshooting_Ducktape_8" -> SpecialHandling.NeedManual("snippet throws exception!!! investigate later"), // FIXME + "troubleshooting_Ducktape_10" -> SpecialHandling.NeedManual("snippet throws exception!!! investigate later"), // FIXME + "troubleshooting_Recursive-types-fail-to-compile_1" -> SpecialHandling.NotExample("pseudocode"), + "troubleshooting_Recursive-types-fail-to-compile_2" -> SpecialHandling.NotExample("pseudocode"), + "troubleshooting_Recursive-calls-on-implicits_1" -> SpecialHandling.NotExample("pseudocode"), + "troubleshooting_Recursive-calls-on-implicits_2" -> SpecialHandling.NotExample("pseudocode"), + "troubleshooting_Recursive-calls-on-implicits_3" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_How-DSL-summons-Transformer-instance_1" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_How-DSL-summons-Transformer-instance_2" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_How-DSL-summons-Transformer-instance_3" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_How-DSL-summons-Transformer-instance_4" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_How-DSL-summons-Transformer-instance_5" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_How-DSL-summons-Transformer-instance_6" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_How-DSL-manages-customizations_1" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Carrying-around-the-runtime-configuration_1" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Carrying-around-the-runtime-configuration_2" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Carrying-around-the-runtime-configuration_3" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Carrying-around-the-runtime-configuration_4" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Carrying-around-the-type-level-configuration_1" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Total-vs-Partial_1" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Total-vs-Partial_2" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Total-vs-Partial_3" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Summoning-implicits_1" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Sealed-hierarchies_1" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Sealed-hierarchies_2" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Scala-2-vs-Scala-3-in-derivation_1" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Scala-2-vs-Scala-3-in-derivation_3" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Scala-2-vs-Scala-3-in-derivation_4" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Scala-2-vs-Scala-3-in-derivation_5" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Scala-2-vs-Scala-3-in-derivation_6" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Scala-2-vs-Scala-3-in-derivation_7" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Scala-2-vs-Scala-3-in-derivation_8" -> SpecialHandling.NotExample("pseudocode"), + "under-the-hood_Scala-2-vs-Scala-3-in-derivation_9" -> SpecialHandling.NotExample("pseudocode") +) + +val ignored: Set[String] = specialHandling.keySet + +// models + +case class Markdown(name: String, content: List[String]) { + + def extractAll(replacePatterns: Map[String, String]): List[Snippet] = Snippet.extractAll(this, replacePatterns) +} +object Markdown { + + def readAllInDir(dir: File): List[Markdown] = + for { + files <- Option(dir.listFiles()).toList + markdownFile <- files.sortBy(_.getName()) if markdownFile.getAbsolutePath().endsWith(".md") + } yield Using(io.Source.fromFile(markdownFile)) { src => + val name = markdownFile.getName() + Markdown(name.substring(0, name.length() - ".md".length()), src.getLines().toList) + }.get +} + +case class Snippet(name: String, hint: String, content: String) { + + def expectedErrors: List[String] = + List.empty + + def isIgnored: Boolean = ignored(name) // TODO: make it smarter + + def save(tmpDir: File): File = { + val snippetFile: File = File(s"${tmpDir.getPath()}/$name/snippet.sc") + snippetFile.getParentFile().mkdirs() + Files.writeString(snippetFile.toPath(), content) + snippetFile + } + + def run(tmpDir: File): Unit = { + val snippetDir = File(s"${tmpDir.getPath()}/$name/snippet.sc").getParent() + s"scala-cli run '$snippetDir'".!! + } +} +object Snippet { + + def extractAll(markdown: Markdown, replacePatterns: Map[String, String]): List[Snippet] = { + val name = markdown.name + + case class Example(section: String, ordinal: Int = 0) { + + def next: Example = copy(ordinal = ordinal + 1) + + def toName: String = s"${name}_${section}_$ordinal".replaceAll(" +", "-").replaceAll("[^A-Za-z0-9_-]+", "") + } + + enum Mode: + case Reading(lineNo: Int, indent: Int, contentReverse: List[String]) + case Awaiting + + import Mode.* + + val start = "```scala" + val end = "```" + val sectionName = "#+(.+)".r + + def adjustLine(line: String, indent: Int): String = { + val stripIndent = if line.length() > indent then line.substring(indent) else line + replacePatterns.foldLeft(stripIndent) { case (s, (k, v)) => + s.replaceAll(k, v) + } + } + + def mkSnippet(example: Example, lineNo: Int, contentReverse: List[String]): Snippet = { + val content0 = contentReverse.reverse.mkString("\n") + val content = + if content0.contains("//> using scala") then content0 + else "//> using scala 2.13.13\n" + content0 + Snippet(example.toName, s"$name.md:$lineNo", content) + } + + def loop(content: List[(String, Int)], example: Example, mode: Mode, reverseResult: List[Snippet]): List[Snippet] = + content match { + case (line, lineNo) :: lines => + mode match { + case Reading(lineNo, indent, contentReverse) => + if line.trim() == end then + loop(lines, example, Awaiting, mkSnippet(example, lineNo, contentReverse) :: reverseResult) + else + loop(lines, example, Reading(lineNo, indent, adjustLine(line, indent) :: contentReverse), reverseResult) + case Awaiting => + line.trim() match { + case `start` => loop(lines, example.next, Reading(lineNo + 1, line.indexOf(start), Nil), reverseResult) + case sectionName(section) => loop(lines, Example(section.trim()), Awaiting, reverseResult) + case _ => loop(lines, example, Awaiting, reverseResult) + } + } + case Nil => reverseResult.reverse + } + + loop(markdown.content.zipWithIndex, Example(""), Awaiting, Nil) + } +} + +// program + +/** Usage: + * + * From the project root (if called from other directory, adapt path after PWD accordingly): + * + * on CI: + * {{{ + * # run all tests, use artifacts published locally from current tag + * scala-cli run scripts/test-snippets.scala -- "$PWD/docs" "$(sbt -batch -error 'print chimney/version')" "" -1 -1 + * }}} + * + * during development: + * {{{ + * # fix: version to use, tmp directory, drop and take from snippets list (the ordering is deterministic) + * scala-cli run scripts/test-snippets.scala -- "$PWD/docs" "1.0.0-RC1" /var/folders/m_/sm90t09d5591cgz5h242bkm80000gn/T/docs-snippets13141962741435068727 0 44 + * }}} + */ +@main def testExamples( + path: String, + providedVersion: String, + providedTmpDir: String, + providedSnippetsDrop: Int, + providedSnippetsTake: Int +): Unit = { + extension (s: StringContext) def hl(args: Any*): String = s"$MAGENTA${s.s(args*)}$RESET" + + val chimneyVersion = providedVersion.trim + .pipe("\u001b\\[([0-9]+)m".r.replaceAllIn(_, "")) // remove possible console coloring from sbt + .pipe(raw"(?U)\s".r.replaceAllIn(_, "")) // remove possible ESC characters + .replaceAll("\u001B\\[0J", "") // replae this one offending thing + + val cfgFile = File(s"$path/mkdocs.yml") + val cfg = Config.parse(cfgFile).right.get + val replacePatterns = (cfg.extra + (raw"chimney_version\(\)" -> chimneyVersion)).map { case (k, v) => + (raw"\{\{\s*" + k + raw"\s*\}\}") -> v + } + val tmpDir = + if providedTmpDir.isEmpty() then Files.createTempDirectory(s"docs-snippets").toFile() else File(providedTmpDir) + val snippetsDrop = Option(providedSnippetsDrop).filter(_ >= 0).getOrElse(0) + val snippetsTake = Option(providedSnippetsTake).filter(_ > 0).getOrElse(Int.MaxValue) + println(hl"Generation for: version=$chimneyVersion, tmp=$tmpDir, cfg=$cfg") + println() + val docsDir = File(s"$path/docs") + println(hl"Started reading from ${docsDir.getAbsolutePath()}") + println() + val markdowns = Markdown.readAllInDir(docsDir) + println(hl"Read files: ${markdowns.map(_.name)}") + println() + val snippets = markdowns.flatMap(_.extractAll(replacePatterns)).drop(snippetsDrop).take(snippetsTake) + println( + hl"Found snippets" + ":\n" + snippets.map(s => hl"\n${s.hint} (${s.name})" + ":\n" + s.content).mkString("\n") + ) + println() + val (ignoredSnippets, testedSnippets) = snippets.partition(_.isIgnored) + println(hl"Ignoring snippets" + ":\n" + ignoredSnippets.map(s => hl"${s.hint} (${s.name})").mkString("\n")) + println() + val ignoredNotFound = ignored.filterNot(i => snippets.exists(_.name == i)).toList.sorted + if ignoredNotFound.nonEmpty && providedSnippetsDrop == -1 && providedSnippetsTake == -1 then { + println( + hl"Some ignored snippets have been moved, their indices changed and cannot be matched" + ":\n" + ignoredNotFound + .mkString("\n") + ) + sys.exit(1) + } + val failed = snippets.flatMap { snippet => + println() + import snippet.{hint, name} + if snippet.isIgnored then { + println(hl"Snippet $hint (stable name: $name) was ignored") + List.empty[String] + } else { + val snippetDir = snippet.save(tmpDir) + println(hl"Snippet: $hint (stable name: $name) saved in $snippetDir, testing" + ":\n" + snippet.content) + try { + snippet.run(tmpDir) + println(hl"Snippet: $hint (stable name: $name) succeeded") + List.empty[String] + } catch { + case _: Throwable => + println(hl"Snippet: $hint (stable name: $name) failed") + List(s"$hint (stable name: $name)") + } + } + } + + println() + if failed.nonEmpty then { + println( + hl"Failed snippets (${failed.length}/${testedSnippets.length}, ignored: ${ignoredSnippets.length})" + s":\n${failed + .mkString("\n")}" + ) + println(hl"Fix them or add to ignored list (name in parenthesis is less subject to change)") + sys.exit(1) + } else { + println(hl"All snippets (${testedSnippets.length}, ignored: ${ignoredSnippets.length}) run succesfully!") + } +}