From 54d90165e9ee3428fba2dce7f950ed849897b16b Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Tue, 1 Dec 2020 15:14:19 +0100 Subject: [PATCH 01/56] Scala3 port IsJavaBean --- build.sbt | 21 +++-- project/ScalacOptions.scala | 6 +- project/plugins.sbt | 1 + .../com/spotify/scio/IsJava.scala | 0 .../com/spotify/scio/MacroSettings.scala | 0 .../com/spotify/scio/MagnoliaMacros.scala | 0 .../com/spotify/scio/SysPropsMacros.scala | 0 .../spotify/scio/coders/AvroCoderMacros.scala | 0 .../com/spotify/scio/coders/CoderMacros.scala | 0 .../spotify/scio/coders/KryoRegistrar.scala | 0 .../scala-3/com/spotify/scio/IsJava.scala | 88 +++++++++++++++++++ 11 files changed, 108 insertions(+), 8 deletions(-) rename scio-macros/src/main/{scala => scala-2}/com/spotify/scio/IsJava.scala (100%) rename scio-macros/src/main/{scala => scala-2}/com/spotify/scio/MacroSettings.scala (100%) rename scio-macros/src/main/{scala => scala-2}/com/spotify/scio/MagnoliaMacros.scala (100%) rename scio-macros/src/main/{scala => scala-2}/com/spotify/scio/SysPropsMacros.scala (100%) rename scio-macros/src/main/{scala => scala-2}/com/spotify/scio/coders/AvroCoderMacros.scala (100%) rename scio-macros/src/main/{scala => scala-2}/com/spotify/scio/coders/CoderMacros.scala (100%) rename scio-macros/src/main/{scala => scala-2}/com/spotify/scio/coders/KryoRegistrar.scala (100%) create mode 100644 scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala diff --git a/build.sbt b/build.sbt index f0c9da7091..0b30af2e61 100644 --- a/build.sbt +++ b/build.sbt @@ -274,7 +274,11 @@ lazy val assemblySettings = Seq( ) lazy val macroSettings = Def.settings( - libraryDependencies += "org.scala-lang" % "scala-reflect" % scalaVersion.value, + libraryDependencies ++= { + if (!isDotty.value) + Seq("org.scala-lang" % "scala-reflect" % scalaVersion.value) + else Nil + }, libraryDependencies ++= { VersionNumber(scalaVersion.value) match { case v if v.matchesSemVer(SemanticSelector("2.12.x")) => @@ -542,12 +546,19 @@ lazy val `scio-macros`: Project = project .settings( description := "Scio macros", libraryDependencies ++= Seq( - "com.chuusai" %% "shapeless" % shapelessVersion, "com.esotericsoftware" % "kryo-shaded" % kryoVersion, "org.apache.beam" % "beam-sdks-java-extensions-sql" % beamVersion, - "org.apache.avro" % "avro" % avroVersion, - "com.propensive" %% "magnolia" % magnoliaVersion - ) + "org.apache.avro" % "avro" % avroVersion + ), + libraryDependencies ++= { + if (!isDotty.value) + Seq( + "com.chuusai" %% "shapeless" % shapelessVersion, + "com.propensive" %% "magnolia" % magnoliaVersion + ) + else Nil + }, + crossScalaVersions += "3.0.0-M2" ) lazy val `scio-avro`: Project = project diff --git a/project/ScalacOptions.scala b/project/ScalacOptions.scala index 43aade402c..e2012c24a5 100644 --- a/project/ScalacOptions.scala +++ b/project/ScalacOptions.scala @@ -59,9 +59,9 @@ object Scalac { // "-Ywarn-unused:privates", // Warn if a private member is unused. "-Ywarn-value-discard", // Warn when non-Unit expression results are unused. "-Xmacro-settings:show-coder-fallback=true", - "-Ydelambdafy:inline", // Set the strategy used for translating lambdas into JVM code to "inline" - "-Ybackend-parallelism", - parallelism.toString + "-Ydelambdafy:inline" // Set the strategy used for translating lambdas into JVM code to "inline" + // "-Ybackend-parallelism", + // parallelism.toString ) } diff --git a/project/plugins.sbt b/project/plugins.sbt index 5ffb63776d..84aadb0356 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -20,6 +20,7 @@ addSbtPlugin("com.thoughtworks.sbt-api-mappings" % "sbt-api-mappings" % "3.0.0") addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") +addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.4.4") libraryDependencies ++= Seq( "com.github.os72" % "protoc-jar" % "3.11.4", diff --git a/scio-macros/src/main/scala/com/spotify/scio/IsJava.scala b/scio-macros/src/main/scala-2/com/spotify/scio/IsJava.scala similarity index 100% rename from scio-macros/src/main/scala/com/spotify/scio/IsJava.scala rename to scio-macros/src/main/scala-2/com/spotify/scio/IsJava.scala diff --git a/scio-macros/src/main/scala/com/spotify/scio/MacroSettings.scala b/scio-macros/src/main/scala-2/com/spotify/scio/MacroSettings.scala similarity index 100% rename from scio-macros/src/main/scala/com/spotify/scio/MacroSettings.scala rename to scio-macros/src/main/scala-2/com/spotify/scio/MacroSettings.scala diff --git a/scio-macros/src/main/scala/com/spotify/scio/MagnoliaMacros.scala b/scio-macros/src/main/scala-2/com/spotify/scio/MagnoliaMacros.scala similarity index 100% rename from scio-macros/src/main/scala/com/spotify/scio/MagnoliaMacros.scala rename to scio-macros/src/main/scala-2/com/spotify/scio/MagnoliaMacros.scala diff --git a/scio-macros/src/main/scala/com/spotify/scio/SysPropsMacros.scala b/scio-macros/src/main/scala-2/com/spotify/scio/SysPropsMacros.scala similarity index 100% rename from scio-macros/src/main/scala/com/spotify/scio/SysPropsMacros.scala rename to scio-macros/src/main/scala-2/com/spotify/scio/SysPropsMacros.scala diff --git a/scio-macros/src/main/scala/com/spotify/scio/coders/AvroCoderMacros.scala b/scio-macros/src/main/scala-2/com/spotify/scio/coders/AvroCoderMacros.scala similarity index 100% rename from scio-macros/src/main/scala/com/spotify/scio/coders/AvroCoderMacros.scala rename to scio-macros/src/main/scala-2/com/spotify/scio/coders/AvroCoderMacros.scala diff --git a/scio-macros/src/main/scala/com/spotify/scio/coders/CoderMacros.scala b/scio-macros/src/main/scala-2/com/spotify/scio/coders/CoderMacros.scala similarity index 100% rename from scio-macros/src/main/scala/com/spotify/scio/coders/CoderMacros.scala rename to scio-macros/src/main/scala-2/com/spotify/scio/coders/CoderMacros.scala diff --git a/scio-macros/src/main/scala/com/spotify/scio/coders/KryoRegistrar.scala b/scio-macros/src/main/scala-2/com/spotify/scio/coders/KryoRegistrar.scala similarity index 100% rename from scio-macros/src/main/scala/com/spotify/scio/coders/KryoRegistrar.scala rename to scio-macros/src/main/scala-2/com/spotify/scio/coders/KryoRegistrar.scala diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala new file mode 100644 index 0000000000..7e60555f38 --- /dev/null +++ b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala @@ -0,0 +1,88 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio + +import scala.compiletime._ +import scala.deriving._ +import scala.quoted._ + +/** Proof that a type is implemented in Java */ +sealed trait IsJavaBean[T] + +object IsJavaBean { + + private def checkGetterAndSetters(using q: Quotes)(sym: q.reflect.Symbol): Unit = { + import q.reflect._ + val methods: List[Symbol] = sym.classMethods + + val getters = + methods.collect { + case s if s.name.toString.startsWith("get") => + (s.name.toString.drop(3), s.tree.asInstanceOf[DefDef]) + } + + val setters = + methods.collect { + case s if s.name.toString.startsWith("set") => + (s.name.toString.drop(3), s.tree.asInstanceOf[DefDef]) + }.toMap + + if(getters.isEmpty) { + val mess = + s"""Class ${sym.name} has not getter""" + report.error(mess) + } + + getters.foreach { case (name, info) => + val setter: DefDef = + setters // Map[String, DefDef] + .get(name) + .getOrElse { + val mess = + s"""JavaBean contained a getter for field $name""" + + """ but did not contain a matching setter.""" + report.throwError(mess) + } + + val resType = info.returnTpt.tpe + val paramType = setter.paramss.head.head.tpt.tpe + + if (resType != paramType) { + val mess = + s"""JavaBean contained setter for field $name that had a mismatching type. + | found: $paramType + | expected: $resType""".stripMargin + report.throwError(mess) + } + } + } + + private def isJavaBeanImpl[T](using Quotes, Type[T]): Expr[IsJavaBean[T]] = { + import quotes.reflect._ + val sym = TypeTree.of[T].symbol + // TODO: check if symbol is a Java class ? + checkGetterAndSetters(sym) + '{new IsJavaBean[T]{}} + } + + inline given isJavaBean[T] as IsJavaBean[T] = { + ${ isJavaBeanImpl[T] } + } + + def apply[T](using i: IsJavaBean[T]): IsJavaBean[T] = i +} From 5b0ca6edd777cbf4ad27b26af5280291c8a50a4f Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Wed, 2 Dec 2020 09:56:54 +0100 Subject: [PATCH 02/56] WIP: Upgrade scio-core to Scala 3 --- build.sbt | 37 +- .../spotify/scio/coders/DerivedCoder.scala | 0 .../spotify/scio/coders/FallbackCoder.scala | 6 + .../coders/instances/AvroCoderMacros.scala | 26 + .../scio/schemas/SchemaMacroHelpers.scala | 48 + .../scala-2/com/spotify/scio/schemas/To.scala | 63 + .../LowPrioritySchemaDerivation.scala | 0 .../spotify/scio/coders/DerivedCoder.scala | 116 + .../spotify/scio/coders/FallbackCoder.scala | 9 + .../coders/instances/AvroCoderMacros.scala | 57 + .../scio/coders/instances/TupleCoders.scala | 4746 +++++++++++++++++ .../kryo/JTraversableSerializer.scala | 91 + .../coders/macros/FallbackCoderMacros.scala | 104 + .../scala-3/com/spotify/scio/schemas/To.scala | 42 + .../LowPrioritySchemaDerivation.scala | 56 + .../com/spotify/scio/util/JMapWrapper.scala | 71 + .../scala/collection/compat/extra.scala | 24 + .../scala/com/spotify/scio/VersionUtil.scala | 6 +- .../scala/com/spotify/scio/coders/Coder.scala | 4 +- .../spotify/scio/coders/KryoAtomicCoder.scala | 2 +- .../scio/coders/instances/AvroCoders.scala | 8 +- .../scio/coders/instances/ScalaCoders.scala | 36 +- .../estimators/ApproxDistinctCounter.scala | 1 + .../com/spotify/scio/hash/ApproxFilter.scala | 2 +- .../com/spotify/scio/io/dynamic/package.scala | 4 +- .../com/spotify/scio/schemas/Schema.scala | 51 +- .../scio/schemas/SchemaMaterializer.scala | 8 +- .../scala/com/spotify/scio/schemas/To.scala | 35 +- .../com/spotify/scio/util/ArtisanJoin.scala | 3 +- .../com/spotify/scio/util/MultiJoin.scala | 338 +- .../values/PairHashSCollectionFunctions.scala | 2 +- .../values/PairSCollectionFunctions.scala | 21 +- .../PairSkewedSCollectionFunctions.scala | 4 +- .../com/spotify/scio/values/SCollection.scala | 6 +- .../spotify/scio/coders/AvroCoderMacros.scala | 4 +- .../com/spotify/scio/DerivationUtils.scala | 38 + .../scala-3/com/spotify/scio/IsJava.scala | 4 +- .../com/spotify/scio/SysPropsMacros.scala | 29 + .../com/spotify/scio/coders/CoderMacros.scala | 30 + .../spotify/scio/coders/KryoRegistrar.scala | 33 + scripts/multijoin.py | 6 +- 41 files changed, 5851 insertions(+), 320 deletions(-) rename scio-core/src/main/{scala => scala-2}/com/spotify/scio/coders/DerivedCoder.scala (100%) create mode 100644 scio-core/src/main/scala-2/com/spotify/scio/coders/FallbackCoder.scala create mode 100644 scio-core/src/main/scala-2/com/spotify/scio/coders/instances/AvroCoderMacros.scala create mode 100644 scio-core/src/main/scala-2/com/spotify/scio/schemas/SchemaMacroHelpers.scala create mode 100644 scio-core/src/main/scala-2/com/spotify/scio/schemas/To.scala rename scio-core/src/main/{scala => scala-2}/com/spotify/scio/schemas/instances/LowPrioritySchemaDerivation.scala (100%) create mode 100644 scio-core/src/main/scala-3/com/spotify/scio/coders/DerivedCoder.scala create mode 100644 scio-core/src/main/scala-3/com/spotify/scio/coders/FallbackCoder.scala create mode 100644 scio-core/src/main/scala-3/com/spotify/scio/coders/instances/AvroCoderMacros.scala create mode 100644 scio-core/src/main/scala-3/com/spotify/scio/coders/instances/TupleCoders.scala create mode 100644 scio-core/src/main/scala-3/com/spotify/scio/coders/instances/kryo/JTraversableSerializer.scala create mode 100644 scio-core/src/main/scala-3/com/spotify/scio/coders/macros/FallbackCoderMacros.scala create mode 100644 scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala create mode 100644 scio-core/src/main/scala-3/com/spotify/scio/schemas/instances/LowPrioritySchemaDerivation.scala create mode 100644 scio-core/src/main/scala-3/com/spotify/scio/util/JMapWrapper.scala create mode 100644 scio-core/src/main/scala-3/scala/collection/compat/extra.scala create mode 100644 scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala create mode 100644 scio-macros/src/main/scala-3/com/spotify/scio/SysPropsMacros.scala create mode 100644 scio-macros/src/main/scala-3/com/spotify/scio/coders/CoderMacros.scala create mode 100644 scio-macros/src/main/scala-3/com/spotify/scio/coders/KryoRegistrar.scala diff --git a/build.sbt b/build.sbt index 0b30af2e61..b608eb884d 100644 --- a/build.sbt +++ b/build.sbt @@ -397,12 +397,11 @@ lazy val `scio-core`: Project = project (ThisBuild / baseDirectory).value / "version.sbt" ), libraryDependencies ++= Seq( - "com.chuusai" %% "shapeless" % shapelessVersion, "com.esotericsoftware" % "kryo-shaded" % kryoVersion, "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion, - "com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion, - "com.github.alexarchambault" %% "case-app" % caseappVersion, - "com.github.alexarchambault" %% "case-app-annotations" % caseappVersion, + ("com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion).withDottyCompat(scalaVersion.value), + ("com.github.alexarchambault" %% "case-app" % caseappVersion).withDottyCompat(scalaVersion.value), + ("com.github.alexarchambault" %% "case-app-annotations" % caseappVersion).withDottyCompat(scalaVersion.value), "com.github.ben-manes.caffeine" % "caffeine" % caffeineVersion % "provided", "com.google.api-client" % "google-api-client" % googleClientsVersion, "com.google.apis" % "google-api-services-dataflow" % googleApiServicesDataflow, @@ -413,9 +412,9 @@ lazy val `scio-core`: Project = project "com.google.protobuf" % "protobuf-java" % protobufVersion, "com.twitter" % "chill-java" % chillVersion, "com.twitter" % "chill-protobuf" % chillVersion, - "com.twitter" %% "algebird-core" % algebirdVersion, - "com.twitter" %% "chill" % chillVersion, - "com.twitter" %% "chill-algebird" % chillVersion, + ("com.twitter" %% "algebird-core" % algebirdVersion).withDottyCompat(scalaVersion.value), + ("com.twitter" %% "chill" % chillVersion).withDottyCompat(scalaVersion.value), + ("com.twitter" %% "chill-algebird" % chillVersion).withDottyCompat(scalaVersion.value), "commons-io" % "commons-io" % commonsIoVersion, "io.grpc" % "grpc-auth" % grpcVersion, "io.grpc" % "grpc-core" % grpcVersion, @@ -424,7 +423,7 @@ lazy val `scio-core`: Project = project "io.grpc" % "grpc-stub" % grpcVersion, "io.netty" % "netty-handler" % nettyVersion, "joda-time" % "joda-time" % jodaTimeVersion, - "me.lyh" %% "protobuf-generic" % protobufGenericVersion, + ("me.lyh" %% "protobuf-generic" % protobufGenericVersion).withDottyCompat(scalaVersion.value), "org.apache.avro" % "avro" % avroVersion, "org.apache.beam" % "beam-runners-core-construction-java" % beamVersion, "org.apache.beam" % "beam-runners-google-cloud-dataflow-java" % beamVersion % Provided, @@ -444,12 +443,25 @@ lazy val `scio-core`: Project = project "org.apache.commons" % "commons-math3" % commonsMath3Version, "org.scalatest" %% "scalatest" % scalatestVersion % Test, "org.slf4j" % "slf4j-api" % slf4jVersion, - "org.typelevel" %% "algebra" % algebraVersion, - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, - "com.propensive" %% "magnolia" % magnoliaVersion + ("org.typelevel" %% "algebra" % algebraVersion).withDottyCompat(scalaVersion.value), + ("org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion).withDottyCompat(scalaVersion.value) ), buildInfoKeys := Seq[BuildInfoKey](scalaVersion, version, "beamVersion" -> beamVersion), - buildInfoPackage := "com.spotify.scio" + buildInfoPackage := "com.spotify.scio", + // Scala3 setting + crossScalaVersions += "3.0.0-M2", + libraryDependencies ++= { + if (!isDotty.value) + Seq( + "com.chuusai" %% "shapeless" % shapelessVersion, + "com.propensive" %% "magnolia" % magnoliaVersion + ) + else Nil + }, + scalacOptions ++= { + if (isDotty.value) Seq("-source:3.0-migration") else Nil + }, + compileOrder := CompileOrder.JavaThenScala, ) .dependsOn( `scio-schemas` % "test->test", @@ -558,6 +570,7 @@ lazy val `scio-macros`: Project = project ) else Nil }, + // Scala3 setting crossScalaVersions += "3.0.0-M2" ) diff --git a/scio-core/src/main/scala/com/spotify/scio/coders/DerivedCoder.scala b/scio-core/src/main/scala-2/com/spotify/scio/coders/DerivedCoder.scala similarity index 100% rename from scio-core/src/main/scala/com/spotify/scio/coders/DerivedCoder.scala rename to scio-core/src/main/scala-2/com/spotify/scio/coders/DerivedCoder.scala diff --git a/scio-core/src/main/scala-2/com/spotify/scio/coders/FallbackCoder.scala b/scio-core/src/main/scala-2/com/spotify/scio/coders/FallbackCoder.scala new file mode 100644 index 0000000000..bd89032a00 --- /dev/null +++ b/scio-core/src/main/scala-2/com/spotify/scio/coders/FallbackCoder.scala @@ -0,0 +1,6 @@ +package com.spotify.scio.coders + +trait FallbackCoder { + def fallback[T](implicit lp: shapeless.LowPriority): Coder[T] = + macro CoderMacros.issueFallbackWarning[T] +} diff --git a/scio-core/src/main/scala-2/com/spotify/scio/coders/instances/AvroCoderMacros.scala b/scio-core/src/main/scala-2/com/spotify/scio/coders/instances/AvroCoderMacros.scala new file mode 100644 index 0000000000..c669dc2902 --- /dev/null +++ b/scio-core/src/main/scala-2/com/spotify/scio/coders/instances/AvroCoderMacros.scala @@ -0,0 +1,26 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.coders.instances + +import com.spotify.scio.coders.Coder +import org.apache.avro.specific.SpecificRecordBase + +private[instances] trait AvroCodersMacros { + implicit def genAvro[T <: SpecificRecordBase]: Coder[T] = + macro AvroCoderMacros.staticInvokeCoder[T] +} diff --git a/scio-core/src/main/scala-2/com/spotify/scio/schemas/SchemaMacroHelpers.scala b/scio-core/src/main/scala-2/com/spotify/scio/schemas/SchemaMacroHelpers.scala new file mode 100644 index 0000000000..63eaa1139e --- /dev/null +++ b/scio-core/src/main/scala-2/com/spotify/scio/schemas/SchemaMacroHelpers.scala @@ -0,0 +1,48 @@ +package com.spotify.scio.schemas + +import scala.reflect.ClassTag +import org.apache.beam.sdk.values.TupleTag + +private[scio] trait SchemaMacroHelpers { + import scala.reflect.macros._ + import com.spotify.scio.{FeatureFlag, MacroSettings} + + val ctx: blackbox.Context + import ctx.universe._ + + val cacheImplicitSchemas: FeatureFlag = MacroSettings.cacheImplicitSchemas(ctx) + + def untyped[A: ctx.WeakTypeTag](expr: ctx.Expr[Schema[A]]): ctx.Expr[Schema[A]] = + ctx.Expr[Schema[A]](ctx.untypecheck(expr.tree.duplicate)) + + def inferImplicitSchema[A: ctx.WeakTypeTag]: ctx.Expr[Schema[A]] = + inferImplicitSchema(weakTypeOf[A]).asInstanceOf[ctx.Expr[Schema[A]]] + + def inferImplicitSchema(t: ctx.Type): ctx.Expr[Schema[_]] = { + val tpe = + cacheImplicitSchemas match { + case FeatureFlag.Enable => + tq"_root_.shapeless.Cached[_root_.com.spotify.scio.schemas.Schema[$t]]" + case _ => + tq"_root_.com.spotify.scio.schemas.Schema[$t]" + } + + val tp = ctx.typecheck(tpe, ctx.TYPEmode).tpe + val typedTree = ctx.inferImplicitValue(tp, silent = false) + val untypedTree = ctx.untypecheck(typedTree.duplicate) + + cacheImplicitSchemas match { + case FeatureFlag.Enable => + ctx.Expr[Schema[_]](q"$untypedTree.value") + case _ => + ctx.Expr[Schema[_]](untypedTree) + } + } + + def inferClassTag(t: ctx.Type): ctx.Expr[ClassTag[_]] = + ctx.Expr[ClassTag[_]](q"implicitly[_root_.scala.reflect.ClassTag[$t]]") + + implicit def liftTupleTag[A: ctx.WeakTypeTag]: Liftable[TupleTag[A]] = Liftable[TupleTag[A]] { + x => q"new _root_.org.apache.beam.sdk.values.TupleTag[${weakTypeOf[A]}](${x.getId()})" + } +} diff --git a/scio-core/src/main/scala-2/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-2/com/spotify/scio/schemas/To.scala new file mode 100644 index 0000000000..cb124c05e8 --- /dev/null +++ b/scio-core/src/main/scala-2/com/spotify/scio/schemas/To.scala @@ -0,0 +1,63 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.schemas + +import com.spotify.scio.values._ +import com.spotify.scio.coders._ +import com.spotify.scio.util.ScioUtil +import org.apache.beam.sdk.values._ +import org.apache.beam.sdk.schemas.{SchemaCoder, Schema => BSchema} + +import scala.jdk.CollectionConverters._ +import scala.annotation.tailrec +import scala.reflect.ClassTag + +trait ToMacro { + /** + * Convert instance of ${T} in this SCollection into instances of ${O} + * based on the Schemas on the 2 classes. The compatibility of thoses classes is checked + * at compile time. + * @see To#unsafe + */ + def safe[I: Schema, O: Schema]: To[I, O] = + macro ToMacro.safeImpl[I, O] +} + +object ToMacro { + import scala.reflect.macros._ + def safeImpl[I: c.WeakTypeTag, O: c.WeakTypeTag]( + c: blackbox.Context + )(iSchema: c.Expr[Schema[I]], oSchema: c.Expr[Schema[O]]): c.Expr[To[I, O]] = { + val h = new { val ctx: c.type = c } with SchemaMacroHelpers + import h._ + import c.universe._ + + val tpeI = weakTypeOf[I] + val tpeO = weakTypeOf[O] + + val expr = c.Expr[(Schema[I], Schema[O])](q"(${untyped(iSchema)}, ${untyped(oSchema)})") + val (sIn, sOut) = c.eval(expr) + + val schemaOut: BSchema = SchemaMaterializer.fieldType(sOut).getRowSchema() + val schemaIn: BSchema = SchemaMaterializer.fieldType(sIn).getRowSchema() + + To.checkCompatibility(schemaIn, schemaOut) { + q"""_root_.com.spotify.scio.schemas.To.unchecked[$tpeI, $tpeO]""" + }.fold(message => c.abort(c.enclosingPosition, message), t => c.Expr[To[I, O]](t)) + } +} diff --git a/scio-core/src/main/scala/com/spotify/scio/schemas/instances/LowPrioritySchemaDerivation.scala b/scio-core/src/main/scala-2/com/spotify/scio/schemas/instances/LowPrioritySchemaDerivation.scala similarity index 100% rename from scio-core/src/main/scala/com/spotify/scio/schemas/instances/LowPrioritySchemaDerivation.scala rename to scio-core/src/main/scala-2/com/spotify/scio/schemas/instances/LowPrioritySchemaDerivation.scala diff --git a/scio-core/src/main/scala-3/com/spotify/scio/coders/DerivedCoder.scala b/scio-core/src/main/scala-3/com/spotify/scio/coders/DerivedCoder.scala new file mode 100644 index 0000000000..bd63546718 --- /dev/null +++ b/scio-core/src/main/scala-3/com/spotify/scio/coders/DerivedCoder.scala @@ -0,0 +1,116 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.coders + +import scala.deriving._ +import scala.compiletime._ +import scala.quoted._ +import com.spotify.scio.macros.DerivationUtils + +private object Derived extends Serializable { + @inline private def catching[T](msg: => String, stack: Array[StackTraceElement])(v: => T): T = + try { + v + } catch { + case e: Exception => + /* prior to scio 0.8, a wrapped exception was thrown. It is no longer the case, as some + backends (e.g. Flink) use exceptions as a way to signal from the Coder to the layers above + here; we therefore must alter the type of exceptions passing through this block. + */ + throw CoderStackTrace.append(e, Some(msg), stack) + } + + def coderProduct[T]( + p: Mirror.ProductOf[T], + typeName: String, + cs: Array[(String, Coder[Any])] + ): Coder[T] = { + val rawConstruct: Seq[Any] => T = + vs => p.fromProduct(Tuple.fromArray(vs.toArray)) + + // TODO: scala3 - check stack trace correctness + val materializationStack = CoderStackTrace.prepare + + val constructor: Seq[Any] => T = + ps => + catching(s"Error while constructing object from parameters $ps", materializationStack)( + rawConstruct(ps) + ) + + // TODO: error handling ? (can it even fail ?) + val destruct: T => Array[Any] = + // XXX: scala3 - I should probably not need to cast T to Product + t => Tuple.fromProduct(t.asInstanceOf[Product]).toArray.asInstanceOf[Array[Any]] + + Coder.record[T](typeName, cs, constructor, destruct) + } + + def coderSum[T]( + s: Mirror.SumOf[T], + typeName: String, + coders: Map[Int, Coder[T]] + ): Coder[T] = { + if(coders.size <= 2) { // trait has 2 or less implementations + val booleanId: Int => Boolean = _ != 0 + val cs = coders.map { case (key, v) => (booleanId(key), v) } + Coder.disjunction[T, Boolean](typeName, cs) { t => + booleanId(s.ordinal(t)) + } + } else { + Coder.disjunction[T, Int](typeName, coders) { t => + s.ordinal(t) + } + } + } +} + +trait LowPriorityCoderDerivation { + import Derived._ + + type Typeclass[T] = Coder[T] + + /** + * Derive a Coder for a type T given implicit coders of all parameters in the constructor + * of type T is in scope. For sealed trait, implicit coders of parameters of the constructors + * of all sub-types should be in scope. + * + * In case of a missing [[shapeless.LowPriority]] implicit error when calling this method as + * [[Coder.gen[Type] ]], it means that Scio is unable to derive a BeamCoder for some parameter + * [P] in the constructor of Type. This happens when no implicit Coder instance for type P is + * in scope. This is fixed by placing an implicit Coder of type P in scope, using + * [[Coder.kryo[P] ]] or defining the Coder manually (see also [[Coder.xmap]]) + */ + inline def gen[T](implicit m: Mirror.Of[T]): Coder[T] = + derived[T] + + inline implicit def derived[T](implicit m: Mirror.Of[T]): Coder[T] = { + inline m match { + case p: Mirror.ProductOf[T] => + val instances = DerivationUtils.summonAllF[Coder, p.MirroredElemTypes].toList.asInstanceOf[List[Coder[Any]]] + val typeName: String = constValue[p.MirroredLabel] // XXX: scala3 - How do I get the FULL class name + val fields = DerivationUtils.mirrorFields[p.MirroredElemLabels] + val cs = fields.zip(instances).toArray + coderProduct[T](p, typeName, cs) + case s: Mirror.SumOf[T] => + val instances = DerivationUtils.summonAllF[Coder, s.MirroredElemTypes].toList + val typeName: String = constValue[s.MirroredLabel] // XXX: scala3 - How do I get the FULL class name + val coders: Map[Int, Coder[T]] = instances.asInstanceOf[List[Coder[T]]].zipWithIndex.map((v, i) => (i, v)).toMap + coderSum[T](s, typeName, coders) + } + } +} diff --git a/scio-core/src/main/scala-3/com/spotify/scio/coders/FallbackCoder.scala b/scio-core/src/main/scala-3/com/spotify/scio/coders/FallbackCoder.scala new file mode 100644 index 0000000000..4ba29dbf2e --- /dev/null +++ b/scio-core/src/main/scala-3/com/spotify/scio/coders/FallbackCoder.scala @@ -0,0 +1,9 @@ +package com.spotify.scio.coders + +import scala.reflect.ClassTag +import com.spotify.scio.coders.macros.FallbackCoderMacros + +trait FallbackCoder { + inline def fallback[T](implicit lp: shapeless.LowPriority): Coder[T] = + ${ FallbackCoderMacros.issueFallbackWarning[T] } +} diff --git a/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/AvroCoderMacros.scala b/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/AvroCoderMacros.scala new file mode 100644 index 0000000000..b7b4ead58e --- /dev/null +++ b/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/AvroCoderMacros.scala @@ -0,0 +1,57 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.coders.instances + +import com.spotify.scio.coders.Coder +import scala.reflect.ClassTag +import scala.compiletime._ +import scala.deriving._ +import scala.quoted._ +import org.apache.beam.sdk.coders.AvroCoder +import org.apache.avro.specific.SpecificRecordBase + +private object AvroCodersMacros { + /** Generate a coder which does not serialize the schema and relies exclusively on types. */ + def staticInvokeCoder[T <: SpecificRecordBase](implicit q: Quotes, t: Type[T]): Expr[Coder[T]] = { + import quotes.reflect._ + val Some(tag) = Expr.summon[ClassTag[T]] + val tt = TypeTree.of[T] + // https://gitter.im/lampepfl/dotty?at=5dbe55a07477946bad1bcbd7 + val const = tt.symbol.primaryConstructor + val inst = New(tt).select(const).appliedToNone.asExprOf[T] + '{ + Coder.beam( + AvroCoder.of[T]( + $tag.runtimeClass.asInstanceOf[Class[T]], + $inst.getSchema)) + } + } +} + +private[instances] trait AvroCodersMacros { + // XXX: scala3 - this should not need to be transparent for for some reason + // it throws the following exception: + // [error] java.lang.AssertionError: assertion failed: asTerm called on not-a-Term val + // [error] scala.runtime.Scala3RunTime$.assertFailed(Scala3RunTime.scala:8) + // [error] dotty.tools.dotc.core.Symbols$Symbol.asTerm(Symbols.scala:156) + // [error] dotty.tools.dotc.sbt.ExtractAPICollector.apiDefinition(ExtractAPI.scala:338) + // [error] dotty.tools.dotc.sbt.ExtractAPICollector.apiDefinitions$$anonfun$1(ExtractAPI.scala:323) + // [error] scala.collection.immutable.List.map(List.scala:250) + transparent inline implicit def genAvro[T <: SpecificRecordBase]: Any = + ${ AvroCodersMacros.staticInvokeCoder[T] } +} diff --git a/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/TupleCoders.scala b/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/TupleCoders.scala new file mode 100644 index 0000000000..31007864a3 --- /dev/null +++ b/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/TupleCoders.scala @@ -0,0 +1,4746 @@ +/* + * Copyright 2020 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +// !! generated with tuplecoders.py +// !! DO NOT EDIT MANUALLY +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +package com.spotify.scio.coders.instances + +import java.io.{InputStream, OutputStream} + +import com.spotify.scio.coders.{Coder, CoderStackTrace} +import org.apache.beam.sdk.coders.Coder.NonDeterministicException +import org.apache.beam.sdk.coders.{Coder => BCoder, _} +import org.apache.beam.sdk.util.common.ElementByteSizeObserver + +import scala.jdk.CollectionConverters._ + +final private[coders] class Tuple2Coder[A, B](val ac: BCoder[A], val bc: BCoder[B]) + extends AtomicCoder[(A, B)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TB](msg: => (String, String))(f: => TB): TB = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple2: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + } + override def decode(is: InputStream): (A, B) = + (onErrorMsg("decode" -> "_1")(ac.decode(is)), onErrorMsg("decode" -> "_2")(bc.decode(is))) + + override def toString: String = + s"Tuple2Coder(_1 -> $ac, _2 -> $bc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List("_1" -> ac, "_2" -> bc) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() + + override def structuralValue(value: (A, B)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + (ac.structuralValue(value._1), bc.structuralValue(value._2)) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap(value: (A, B)): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap(value._2) + + override def registerByteSizeObserver(value: (A, B), observer: ElementByteSizeObserver): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + } +} + +final private[coders] class Tuple3Coder[A, B, C]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C] +) extends AtomicCoder[(A, B, C)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TC](msg: => (String, String))(f: => TC): TC = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple3: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + } + override def decode(is: InputStream): (A, B, C) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)) + ) + + override def toString: String = + s"Tuple3Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List("_1" -> ac, "_2" -> bc, "_3" -> cc) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() + + override def structuralValue(value: (A, B, C)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + (ac.structuralValue(value._1), bc.structuralValue(value._2), cc.structuralValue(value._3)) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap(value: (A, B, C)): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) + + override def registerByteSizeObserver( + value: (A, B, C), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + } +} + +final private[coders] class Tuple4Coder[A, B, C, D]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D] +) extends AtomicCoder[(A, B, C, D)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TD](msg: => (String, String))(f: => TD): TD = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple4: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C, D), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + } + override def decode(is: InputStream): (A, B, C, D) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)) + ) + + override def toString: String = + s"Tuple4Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List("_1" -> ac, "_2" -> bc, "_3" -> cc, "_4" -> dc) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() + + override def structuralValue(value: (A, B, C, D)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap(value: (A, B, C, D)): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) + + override def registerByteSizeObserver( + value: (A, B, C, D), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + } +} + +final private[coders] class Tuple5Coder[A, B, C, D, E]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E] +) extends AtomicCoder[(A, B, C, D, E)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TE](msg: => (String, String))(f: => TE): TE = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple5: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C, D, E), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + } + override def decode(is: InputStream): (A, B, C, D, E) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)) + ) + + override def toString: String = + s"Tuple5Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List("_1" -> ac, "_2" -> bc, "_3" -> cc, "_4" -> dc, "_5" -> ec) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap(value: (A, B, C, D, E)): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) + + override def registerByteSizeObserver( + value: (A, B, C, D, E), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + } +} + +final private[coders] class Tuple6Coder[A, B, C, D, E, G]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G] +) extends AtomicCoder[(A, B, C, D, E, G)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TF](msg: => (String, String))(f: => TF): TF = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple6: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C, D, E, G), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)) + ) + + override def toString: String = + s"Tuple6Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List("_1" -> ac, "_2" -> bc, "_3" -> cc, "_4" -> dc, "_5" -> ec, "_6" -> gc) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap(value: (A, B, C, D, E, G)): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + } +} + +final private[coders] class Tuple7Coder[A, B, C, D, E, G, H]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H] +) extends AtomicCoder[(A, B, C, D, E, G, H)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TG](msg: => (String, String))(f: => TG): TG = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple7: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C, D, E, G, H), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)) + ) + + override def toString: String = + s"Tuple7Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = + List("_1" -> ac, "_2" -> bc, "_3" -> cc, "_4" -> dc, "_5" -> ec, "_6" -> gc, "_7" -> hc) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G, H)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap(value: (A, B, C, D, E, G, H)): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + } +} + +final private[coders] class Tuple8Coder[A, B, C, D, E, G, H, I]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I] +) extends AtomicCoder[(A, B, C, D, E, G, H, I)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TH](msg: => (String, String))(f: => TH): TH = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple8: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C, D, E, G, H, I), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)) + ) + + override def toString: String = + s"Tuple8Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G, H, I)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap(value: (A, B, C, D, E, G, H, I)): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + } +} + +final private[coders] class Tuple9Coder[A, B, C, D, E, G, H, I, J]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TI](msg: => (String, String))(f: => TI): TI = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple9: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C, D, E, G, H, I, J), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I, J) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)) + ) + + override def toString: String = + s"Tuple9Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G, H, I, J)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap(value: (A, B, C, D, E, G, H, I, J)): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + } +} + +final private[coders] class Tuple10Coder[A, B, C, D, E, G, H, I, J, K]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TJ](msg: => (String, String))(f: => TJ): TJ = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple10: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C, D, E, G, H, I, J, K), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I, J, K) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)) + ) + + override def toString: String = + s"Tuple10Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G, H, I, J, K)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap(value: (A, B, C, D, E, G, H, I, J, K)): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + } +} + +final private[coders] class Tuple11Coder[A, B, C, D, E, G, H, I, J, K, L]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TK](msg: => (String, String))(f: => TK): TK = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple11: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C, D, E, G, H, I, J, K, L), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I, J, K, L) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)) + ) + + override def toString: String = + s"Tuple11Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G, H, I, J, K, L)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap(value: (A, B, C, D, E, G, H, I, J, K, L)): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + } +} + +final private[coders] class Tuple12Coder[A, B, C, D, E, G, H, I, J, K, L, M]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L], + val mc: BCoder[M] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L, M)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TL](msg: => (String, String))(f: => TL): TL = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple12: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C, D, E, G, H, I, J, K, L, M), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + onErrorMsg("encode" -> "_12")(mc.encode(value._12, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I, J, K, L, M) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)), + onErrorMsg("decode" -> "_12")(mc.decode(is)) + ) + + override def toString: String = + s"Tuple12Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc, _12 -> $mc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc, + "_12" -> mc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() && mc.consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G, H, I, J, K, L, M)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11), + mc.structuralValue(value._12) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap( + value: (A, B, C, D, E, G, H, I, J, K, L, M) + ): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) && mc.isRegisterByteSizeObserverCheap( + value._12 + ) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L, M), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + mc.registerByteSizeObserver(value._12, observer) + } +} + +final private[coders] class Tuple13Coder[A, B, C, D, E, G, H, I, J, K, L, M, N]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L], + val mc: BCoder[M], + val nc: BCoder[N] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L, M, N)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TM](msg: => (String, String))(f: => TM): TM = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple13: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C, D, E, G, H, I, J, K, L, M, N), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + onErrorMsg("encode" -> "_12")(mc.encode(value._12, os)) + onErrorMsg("encode" -> "_13")(nc.encode(value._13, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I, J, K, L, M, N) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)), + onErrorMsg("decode" -> "_12")(mc.decode(is)), + onErrorMsg("decode" -> "_13")(nc.decode(is)) + ) + + override def toString: String = + s"Tuple13Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc, _12 -> $mc, _13 -> $nc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc, + "_12" -> mc, + "_13" -> nc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() && mc.consistentWithEquals() && nc + .consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G, H, I, J, K, L, M, N)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11), + mc.structuralValue(value._12), + nc.structuralValue(value._13) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N) + ): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) && mc.isRegisterByteSizeObserverCheap( + value._12 + ) && nc.isRegisterByteSizeObserverCheap(value._13) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + mc.registerByteSizeObserver(value._12, observer) + nc.registerByteSizeObserver(value._13, observer) + } +} + +final private[coders] class Tuple14Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L], + val mc: BCoder[M], + val nc: BCoder[N], + val oc: BCoder[O] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TN](msg: => (String, String))(f: => TN): TN = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple14: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode(value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O), os: OutputStream): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + onErrorMsg("encode" -> "_12")(mc.encode(value._12, os)) + onErrorMsg("encode" -> "_13")(nc.encode(value._13, os)) + onErrorMsg("encode" -> "_14")(oc.encode(value._14, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I, J, K, L, M, N, O) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)), + onErrorMsg("decode" -> "_12")(mc.decode(is)), + onErrorMsg("decode" -> "_13")(nc.decode(is)), + onErrorMsg("decode" -> "_14")(oc.decode(is)) + ) + + override def toString: String = + s"Tuple14Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc, _12 -> $mc, _13 -> $nc, _14 -> $oc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc, + "_12" -> mc, + "_13" -> nc, + "_14" -> oc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() && mc.consistentWithEquals() && nc + .consistentWithEquals() && oc.consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11), + mc.structuralValue(value._12), + nc.structuralValue(value._13), + oc.structuralValue(value._14) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O) + ): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) && mc.isRegisterByteSizeObserverCheap( + value._12 + ) && nc.isRegisterByteSizeObserverCheap(value._13) && oc.isRegisterByteSizeObserverCheap( + value._14 + ) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + mc.registerByteSizeObserver(value._12, observer) + nc.registerByteSizeObserver(value._13, observer) + oc.registerByteSizeObserver(value._14, observer) + } +} + +final private[coders] class Tuple15Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L], + val mc: BCoder[M], + val nc: BCoder[N], + val oc: BCoder[O], + val pc: BCoder[P] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TO](msg: => (String, String))(f: => TO): TO = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple15: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P), + os: OutputStream + ): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + onErrorMsg("encode" -> "_12")(mc.encode(value._12, os)) + onErrorMsg("encode" -> "_13")(nc.encode(value._13, os)) + onErrorMsg("encode" -> "_14")(oc.encode(value._14, os)) + onErrorMsg("encode" -> "_15")(pc.encode(value._15, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)), + onErrorMsg("decode" -> "_12")(mc.decode(is)), + onErrorMsg("decode" -> "_13")(nc.decode(is)), + onErrorMsg("decode" -> "_14")(oc.decode(is)), + onErrorMsg("decode" -> "_15")(pc.decode(is)) + ) + + override def toString: String = + s"Tuple15Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc, _12 -> $mc, _13 -> $nc, _14 -> $oc, _15 -> $pc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc, + "_12" -> mc, + "_13" -> nc, + "_14" -> oc, + "_15" -> pc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() && mc.consistentWithEquals() && nc + .consistentWithEquals() && oc.consistentWithEquals() && pc.consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11), + mc.structuralValue(value._12), + nc.structuralValue(value._13), + oc.structuralValue(value._14), + pc.structuralValue(value._15) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P) + ): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) && mc.isRegisterByteSizeObserverCheap( + value._12 + ) && nc.isRegisterByteSizeObserverCheap(value._13) && oc.isRegisterByteSizeObserverCheap( + value._14 + ) && pc.isRegisterByteSizeObserverCheap(value._15) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + mc.registerByteSizeObserver(value._12, observer) + nc.registerByteSizeObserver(value._13, observer) + oc.registerByteSizeObserver(value._14, observer) + pc.registerByteSizeObserver(value._15, observer) + } +} + +final private[coders] class Tuple16Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L], + val mc: BCoder[M], + val nc: BCoder[N], + val oc: BCoder[O], + val pc: BCoder[P], + val qc: BCoder[Q] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TP](msg: => (String, String))(f: => TP): TP = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple16: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q), + os: OutputStream + ): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + onErrorMsg("encode" -> "_12")(mc.encode(value._12, os)) + onErrorMsg("encode" -> "_13")(nc.encode(value._13, os)) + onErrorMsg("encode" -> "_14")(oc.encode(value._14, os)) + onErrorMsg("encode" -> "_15")(pc.encode(value._15, os)) + onErrorMsg("encode" -> "_16")(qc.encode(value._16, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)), + onErrorMsg("decode" -> "_12")(mc.decode(is)), + onErrorMsg("decode" -> "_13")(nc.decode(is)), + onErrorMsg("decode" -> "_14")(oc.decode(is)), + onErrorMsg("decode" -> "_15")(pc.decode(is)), + onErrorMsg("decode" -> "_16")(qc.decode(is)) + ) + + override def toString: String = + s"Tuple16Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc, _12 -> $mc, _13 -> $nc, _14 -> $oc, _15 -> $pc, _16 -> $qc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc, + "_12" -> mc, + "_13" -> nc, + "_14" -> oc, + "_15" -> pc, + "_16" -> qc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() && mc.consistentWithEquals() && nc + .consistentWithEquals() && oc.consistentWithEquals() && pc.consistentWithEquals() && qc + .consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11), + mc.structuralValue(value._12), + nc.structuralValue(value._13), + oc.structuralValue(value._14), + pc.structuralValue(value._15), + qc.structuralValue(value._16) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q) + ): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) && mc.isRegisterByteSizeObserverCheap( + value._12 + ) && nc.isRegisterByteSizeObserverCheap(value._13) && oc.isRegisterByteSizeObserverCheap( + value._14 + ) && pc.isRegisterByteSizeObserverCheap(value._15) && qc.isRegisterByteSizeObserverCheap( + value._16 + ) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + mc.registerByteSizeObserver(value._12, observer) + nc.registerByteSizeObserver(value._13, observer) + oc.registerByteSizeObserver(value._14, observer) + pc.registerByteSizeObserver(value._15, observer) + qc.registerByteSizeObserver(value._16, observer) + } +} + +final private[coders] class Tuple17Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L], + val mc: BCoder[M], + val nc: BCoder[N], + val oc: BCoder[O], + val pc: BCoder[P], + val qc: BCoder[Q], + val rc: BCoder[R] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TQ](msg: => (String, String))(f: => TQ): TQ = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple17: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R), + os: OutputStream + ): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + onErrorMsg("encode" -> "_12")(mc.encode(value._12, os)) + onErrorMsg("encode" -> "_13")(nc.encode(value._13, os)) + onErrorMsg("encode" -> "_14")(oc.encode(value._14, os)) + onErrorMsg("encode" -> "_15")(pc.encode(value._15, os)) + onErrorMsg("encode" -> "_16")(qc.encode(value._16, os)) + onErrorMsg("encode" -> "_17")(rc.encode(value._17, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)), + onErrorMsg("decode" -> "_12")(mc.decode(is)), + onErrorMsg("decode" -> "_13")(nc.decode(is)), + onErrorMsg("decode" -> "_14")(oc.decode(is)), + onErrorMsg("decode" -> "_15")(pc.decode(is)), + onErrorMsg("decode" -> "_16")(qc.decode(is)), + onErrorMsg("decode" -> "_17")(rc.decode(is)) + ) + + override def toString: String = + s"Tuple17Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc, _12 -> $mc, _13 -> $nc, _14 -> $oc, _15 -> $pc, _16 -> $qc, _17 -> $rc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc, + "_12" -> mc, + "_13" -> nc, + "_14" -> oc, + "_15" -> pc, + "_16" -> qc, + "_17" -> rc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() && mc.consistentWithEquals() && nc + .consistentWithEquals() && oc.consistentWithEquals() && pc.consistentWithEquals() && qc + .consistentWithEquals() && rc.consistentWithEquals() + + override def structuralValue(value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R)): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11), + mc.structuralValue(value._12), + nc.structuralValue(value._13), + oc.structuralValue(value._14), + pc.structuralValue(value._15), + qc.structuralValue(value._16), + rc.structuralValue(value._17) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R) + ): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) && mc.isRegisterByteSizeObserverCheap( + value._12 + ) && nc.isRegisterByteSizeObserverCheap(value._13) && oc.isRegisterByteSizeObserverCheap( + value._14 + ) && pc.isRegisterByteSizeObserverCheap(value._15) && qc.isRegisterByteSizeObserverCheap( + value._16 + ) && rc.isRegisterByteSizeObserverCheap(value._17) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + mc.registerByteSizeObserver(value._12, observer) + nc.registerByteSizeObserver(value._13, observer) + oc.registerByteSizeObserver(value._14, observer) + pc.registerByteSizeObserver(value._15, observer) + qc.registerByteSizeObserver(value._16, observer) + rc.registerByteSizeObserver(value._17, observer) + } +} + +final private[coders] class Tuple18Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L], + val mc: BCoder[M], + val nc: BCoder[N], + val oc: BCoder[O], + val pc: BCoder[P], + val qc: BCoder[Q], + val rc: BCoder[R], + val sc: BCoder[S] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TR](msg: => (String, String))(f: => TR): TR = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple18: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S), + os: OutputStream + ): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + onErrorMsg("encode" -> "_12")(mc.encode(value._12, os)) + onErrorMsg("encode" -> "_13")(nc.encode(value._13, os)) + onErrorMsg("encode" -> "_14")(oc.encode(value._14, os)) + onErrorMsg("encode" -> "_15")(pc.encode(value._15, os)) + onErrorMsg("encode" -> "_16")(qc.encode(value._16, os)) + onErrorMsg("encode" -> "_17")(rc.encode(value._17, os)) + onErrorMsg("encode" -> "_18")(sc.encode(value._18, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)), + onErrorMsg("decode" -> "_12")(mc.decode(is)), + onErrorMsg("decode" -> "_13")(nc.decode(is)), + onErrorMsg("decode" -> "_14")(oc.decode(is)), + onErrorMsg("decode" -> "_15")(pc.decode(is)), + onErrorMsg("decode" -> "_16")(qc.decode(is)), + onErrorMsg("decode" -> "_17")(rc.decode(is)), + onErrorMsg("decode" -> "_18")(sc.decode(is)) + ) + + override def toString: String = + s"Tuple18Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc, _12 -> $mc, _13 -> $nc, _14 -> $oc, _15 -> $pc, _16 -> $qc, _17 -> $rc, _18 -> $sc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc, + "_12" -> mc, + "_13" -> nc, + "_14" -> oc, + "_15" -> pc, + "_16" -> qc, + "_17" -> rc, + "_18" -> sc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() && mc.consistentWithEquals() && nc + .consistentWithEquals() && oc.consistentWithEquals() && pc.consistentWithEquals() && qc + .consistentWithEquals() && rc.consistentWithEquals() && sc.consistentWithEquals() + + override def structuralValue( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S) + ): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11), + mc.structuralValue(value._12), + nc.structuralValue(value._13), + oc.structuralValue(value._14), + pc.structuralValue(value._15), + qc.structuralValue(value._16), + rc.structuralValue(value._17), + sc.structuralValue(value._18) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S) + ): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) && mc.isRegisterByteSizeObserverCheap( + value._12 + ) && nc.isRegisterByteSizeObserverCheap(value._13) && oc.isRegisterByteSizeObserverCheap( + value._14 + ) && pc.isRegisterByteSizeObserverCheap(value._15) && qc.isRegisterByteSizeObserverCheap( + value._16 + ) && rc.isRegisterByteSizeObserverCheap(value._17) && sc.isRegisterByteSizeObserverCheap( + value._18 + ) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + mc.registerByteSizeObserver(value._12, observer) + nc.registerByteSizeObserver(value._13, observer) + oc.registerByteSizeObserver(value._14, observer) + pc.registerByteSizeObserver(value._15, observer) + qc.registerByteSizeObserver(value._16, observer) + rc.registerByteSizeObserver(value._17, observer) + sc.registerByteSizeObserver(value._18, observer) + } +} + +final private[coders] class Tuple19Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L], + val mc: BCoder[M], + val nc: BCoder[N], + val oc: BCoder[O], + val pc: BCoder[P], + val qc: BCoder[Q], + val rc: BCoder[R], + val sc: BCoder[S], + val tc: BCoder[T] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TS](msg: => (String, String))(f: => TS): TS = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple19: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T), + os: OutputStream + ): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + onErrorMsg("encode" -> "_12")(mc.encode(value._12, os)) + onErrorMsg("encode" -> "_13")(nc.encode(value._13, os)) + onErrorMsg("encode" -> "_14")(oc.encode(value._14, os)) + onErrorMsg("encode" -> "_15")(pc.encode(value._15, os)) + onErrorMsg("encode" -> "_16")(qc.encode(value._16, os)) + onErrorMsg("encode" -> "_17")(rc.encode(value._17, os)) + onErrorMsg("encode" -> "_18")(sc.encode(value._18, os)) + onErrorMsg("encode" -> "_19")(tc.encode(value._19, os)) + } + override def decode(is: InputStream): (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)), + onErrorMsg("decode" -> "_12")(mc.decode(is)), + onErrorMsg("decode" -> "_13")(nc.decode(is)), + onErrorMsg("decode" -> "_14")(oc.decode(is)), + onErrorMsg("decode" -> "_15")(pc.decode(is)), + onErrorMsg("decode" -> "_16")(qc.decode(is)), + onErrorMsg("decode" -> "_17")(rc.decode(is)), + onErrorMsg("decode" -> "_18")(sc.decode(is)), + onErrorMsg("decode" -> "_19")(tc.decode(is)) + ) + + override def toString: String = + s"Tuple19Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc, _12 -> $mc, _13 -> $nc, _14 -> $oc, _15 -> $pc, _16 -> $qc, _17 -> $rc, _18 -> $sc, _19 -> $tc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc, + "_12" -> mc, + "_13" -> nc, + "_14" -> oc, + "_15" -> pc, + "_16" -> qc, + "_17" -> rc, + "_18" -> sc, + "_19" -> tc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() && mc.consistentWithEquals() && nc + .consistentWithEquals() && oc.consistentWithEquals() && pc.consistentWithEquals() && qc + .consistentWithEquals() && rc.consistentWithEquals() && sc.consistentWithEquals() && tc + .consistentWithEquals() + + override def structuralValue( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) + ): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11), + mc.structuralValue(value._12), + nc.structuralValue(value._13), + oc.structuralValue(value._14), + pc.structuralValue(value._15), + qc.structuralValue(value._16), + rc.structuralValue(value._17), + sc.structuralValue(value._18), + tc.structuralValue(value._19) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) + ): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) && mc.isRegisterByteSizeObserverCheap( + value._12 + ) && nc.isRegisterByteSizeObserverCheap(value._13) && oc.isRegisterByteSizeObserverCheap( + value._14 + ) && pc.isRegisterByteSizeObserverCheap(value._15) && qc.isRegisterByteSizeObserverCheap( + value._16 + ) && rc.isRegisterByteSizeObserverCheap(value._17) && sc.isRegisterByteSizeObserverCheap( + value._18 + ) && tc.isRegisterByteSizeObserverCheap(value._19) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + mc.registerByteSizeObserver(value._12, observer) + nc.registerByteSizeObserver(value._13, observer) + oc.registerByteSizeObserver(value._14, observer) + pc.registerByteSizeObserver(value._15, observer) + qc.registerByteSizeObserver(value._16, observer) + rc.registerByteSizeObserver(value._17, observer) + sc.registerByteSizeObserver(value._18, observer) + tc.registerByteSizeObserver(value._19, observer) + } +} + +final private[coders] class Tuple20Coder[ + A, + B, + C, + D, + E, + G, + H, + I, + J, + K, + L, + M, + N, + O, + P, + Q, + R, + S, + T, + U +]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L], + val mc: BCoder[M], + val nc: BCoder[N], + val oc: BCoder[O], + val pc: BCoder[P], + val qc: BCoder[Q], + val rc: BCoder[R], + val sc: BCoder[S], + val tc: BCoder[T], + val uc: BCoder[U] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TT](msg: => (String, String))(f: => TT): TT = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple20: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U), + os: OutputStream + ): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + onErrorMsg("encode" -> "_12")(mc.encode(value._12, os)) + onErrorMsg("encode" -> "_13")(nc.encode(value._13, os)) + onErrorMsg("encode" -> "_14")(oc.encode(value._14, os)) + onErrorMsg("encode" -> "_15")(pc.encode(value._15, os)) + onErrorMsg("encode" -> "_16")(qc.encode(value._16, os)) + onErrorMsg("encode" -> "_17")(rc.encode(value._17, os)) + onErrorMsg("encode" -> "_18")(sc.encode(value._18, os)) + onErrorMsg("encode" -> "_19")(tc.encode(value._19, os)) + onErrorMsg("encode" -> "_20")(uc.encode(value._20, os)) + } + override def decode( + is: InputStream + ): (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)), + onErrorMsg("decode" -> "_12")(mc.decode(is)), + onErrorMsg("decode" -> "_13")(nc.decode(is)), + onErrorMsg("decode" -> "_14")(oc.decode(is)), + onErrorMsg("decode" -> "_15")(pc.decode(is)), + onErrorMsg("decode" -> "_16")(qc.decode(is)), + onErrorMsg("decode" -> "_17")(rc.decode(is)), + onErrorMsg("decode" -> "_18")(sc.decode(is)), + onErrorMsg("decode" -> "_19")(tc.decode(is)), + onErrorMsg("decode" -> "_20")(uc.decode(is)) + ) + + override def toString: String = + s"Tuple20Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc, _12 -> $mc, _13 -> $nc, _14 -> $oc, _15 -> $pc, _16 -> $qc, _17 -> $rc, _18 -> $sc, _19 -> $tc, _20 -> $uc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc, + "_12" -> mc, + "_13" -> nc, + "_14" -> oc, + "_15" -> pc, + "_16" -> qc, + "_17" -> rc, + "_18" -> sc, + "_19" -> tc, + "_20" -> uc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() && mc.consistentWithEquals() && nc + .consistentWithEquals() && oc.consistentWithEquals() && pc.consistentWithEquals() && qc + .consistentWithEquals() && rc.consistentWithEquals() && sc.consistentWithEquals() && tc + .consistentWithEquals() && uc.consistentWithEquals() + + override def structuralValue( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) + ): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11), + mc.structuralValue(value._12), + nc.structuralValue(value._13), + oc.structuralValue(value._14), + pc.structuralValue(value._15), + qc.structuralValue(value._16), + rc.structuralValue(value._17), + sc.structuralValue(value._18), + tc.structuralValue(value._19), + uc.structuralValue(value._20) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) + ): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) && mc.isRegisterByteSizeObserverCheap( + value._12 + ) && nc.isRegisterByteSizeObserverCheap(value._13) && oc.isRegisterByteSizeObserverCheap( + value._14 + ) && pc.isRegisterByteSizeObserverCheap(value._15) && qc.isRegisterByteSizeObserverCheap( + value._16 + ) && rc.isRegisterByteSizeObserverCheap(value._17) && sc.isRegisterByteSizeObserverCheap( + value._18 + ) && tc.isRegisterByteSizeObserverCheap(value._19) && uc.isRegisterByteSizeObserverCheap( + value._20 + ) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + mc.registerByteSizeObserver(value._12, observer) + nc.registerByteSizeObserver(value._13, observer) + oc.registerByteSizeObserver(value._14, observer) + pc.registerByteSizeObserver(value._15, observer) + qc.registerByteSizeObserver(value._16, observer) + rc.registerByteSizeObserver(value._17, observer) + sc.registerByteSizeObserver(value._18, observer) + tc.registerByteSizeObserver(value._19, observer) + uc.registerByteSizeObserver(value._20, observer) + } +} + +final private[coders] class Tuple21Coder[ + A, + B, + C, + D, + E, + G, + H, + I, + J, + K, + L, + M, + N, + O, + P, + Q, + R, + S, + T, + U, + V +]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L], + val mc: BCoder[M], + val nc: BCoder[N], + val oc: BCoder[O], + val pc: BCoder[P], + val qc: BCoder[Q], + val rc: BCoder[R], + val sc: BCoder[S], + val tc: BCoder[T], + val uc: BCoder[U], + val vc: BCoder[V] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TU](msg: => (String, String))(f: => TU): TU = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple21: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V), + os: OutputStream + ): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + onErrorMsg("encode" -> "_12")(mc.encode(value._12, os)) + onErrorMsg("encode" -> "_13")(nc.encode(value._13, os)) + onErrorMsg("encode" -> "_14")(oc.encode(value._14, os)) + onErrorMsg("encode" -> "_15")(pc.encode(value._15, os)) + onErrorMsg("encode" -> "_16")(qc.encode(value._16, os)) + onErrorMsg("encode" -> "_17")(rc.encode(value._17, os)) + onErrorMsg("encode" -> "_18")(sc.encode(value._18, os)) + onErrorMsg("encode" -> "_19")(tc.encode(value._19, os)) + onErrorMsg("encode" -> "_20")(uc.encode(value._20, os)) + onErrorMsg("encode" -> "_21")(vc.encode(value._21, os)) + } + override def decode( + is: InputStream + ): (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)), + onErrorMsg("decode" -> "_12")(mc.decode(is)), + onErrorMsg("decode" -> "_13")(nc.decode(is)), + onErrorMsg("decode" -> "_14")(oc.decode(is)), + onErrorMsg("decode" -> "_15")(pc.decode(is)), + onErrorMsg("decode" -> "_16")(qc.decode(is)), + onErrorMsg("decode" -> "_17")(rc.decode(is)), + onErrorMsg("decode" -> "_18")(sc.decode(is)), + onErrorMsg("decode" -> "_19")(tc.decode(is)), + onErrorMsg("decode" -> "_20")(uc.decode(is)), + onErrorMsg("decode" -> "_21")(vc.decode(is)) + ) + + override def toString: String = + s"Tuple21Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc, _12 -> $mc, _13 -> $nc, _14 -> $oc, _15 -> $pc, _16 -> $qc, _17 -> $rc, _18 -> $sc, _19 -> $tc, _20 -> $uc, _21 -> $vc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc, + "_12" -> mc, + "_13" -> nc, + "_14" -> oc, + "_15" -> pc, + "_16" -> qc, + "_17" -> rc, + "_18" -> sc, + "_19" -> tc, + "_20" -> uc, + "_21" -> vc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() && mc.consistentWithEquals() && nc + .consistentWithEquals() && oc.consistentWithEquals() && pc.consistentWithEquals() && qc + .consistentWithEquals() && rc.consistentWithEquals() && sc.consistentWithEquals() && tc + .consistentWithEquals() && uc.consistentWithEquals() && vc.consistentWithEquals() + + override def structuralValue( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) + ): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11), + mc.structuralValue(value._12), + nc.structuralValue(value._13), + oc.structuralValue(value._14), + pc.structuralValue(value._15), + qc.structuralValue(value._16), + rc.structuralValue(value._17), + sc.structuralValue(value._18), + tc.structuralValue(value._19), + uc.structuralValue(value._20), + vc.structuralValue(value._21) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V) + ): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) && mc.isRegisterByteSizeObserverCheap( + value._12 + ) && nc.isRegisterByteSizeObserverCheap(value._13) && oc.isRegisterByteSizeObserverCheap( + value._14 + ) && pc.isRegisterByteSizeObserverCheap(value._15) && qc.isRegisterByteSizeObserverCheap( + value._16 + ) && rc.isRegisterByteSizeObserverCheap(value._17) && sc.isRegisterByteSizeObserverCheap( + value._18 + ) && tc.isRegisterByteSizeObserverCheap(value._19) && uc.isRegisterByteSizeObserverCheap( + value._20 + ) && vc.isRegisterByteSizeObserverCheap(value._21) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + mc.registerByteSizeObserver(value._12, observer) + nc.registerByteSizeObserver(value._13, observer) + oc.registerByteSizeObserver(value._14, observer) + pc.registerByteSizeObserver(value._15, observer) + qc.registerByteSizeObserver(value._16, observer) + rc.registerByteSizeObserver(value._17, observer) + sc.registerByteSizeObserver(value._18, observer) + tc.registerByteSizeObserver(value._19, observer) + uc.registerByteSizeObserver(value._20, observer) + vc.registerByteSizeObserver(value._21, observer) + } +} + +final private[coders] class Tuple22Coder[ + A, + B, + C, + D, + E, + G, + H, + I, + J, + K, + L, + M, + N, + O, + P, + Q, + R, + S, + T, + U, + V, + W +]( + val ac: BCoder[A], + val bc: BCoder[B], + val cc: BCoder[C], + val dc: BCoder[D], + val ec: BCoder[E], + val gc: BCoder[G], + val hc: BCoder[H], + val ic: BCoder[I], + val jc: BCoder[J], + val kc: BCoder[K], + val lc: BCoder[L], + val mc: BCoder[M], + val nc: BCoder[N], + val oc: BCoder[O], + val pc: BCoder[P], + val qc: BCoder[Q], + val rc: BCoder[R], + val sc: BCoder[S], + val tc: BCoder[T], + val uc: BCoder[U], + val vc: BCoder[V], + val wc: BCoder[W] +) extends AtomicCoder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W)] { + private[this] val materializationStackTrace: Array[StackTraceElement] = CoderStackTrace.prepare + + @inline def onErrorMsg[TV](msg: => (String, String))(f: => TV): TV = + try { + f + } catch { + case e: Exception => + // allow Flink memory management, see WrappedBCoder#catching comment. + throw CoderStackTrace.append( + e, + Some( + s"Exception while trying to `${msg._1}` an instance" + + s" of Tuple22: Can't decode field ${msg._2}" + ), + materializationStackTrace + ) + } + + override def encode( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W), + os: OutputStream + ): Unit = { + onErrorMsg("encode" -> "_1")(ac.encode(value._1, os)) + onErrorMsg("encode" -> "_2")(bc.encode(value._2, os)) + onErrorMsg("encode" -> "_3")(cc.encode(value._3, os)) + onErrorMsg("encode" -> "_4")(dc.encode(value._4, os)) + onErrorMsg("encode" -> "_5")(ec.encode(value._5, os)) + onErrorMsg("encode" -> "_6")(gc.encode(value._6, os)) + onErrorMsg("encode" -> "_7")(hc.encode(value._7, os)) + onErrorMsg("encode" -> "_8")(ic.encode(value._8, os)) + onErrorMsg("encode" -> "_9")(jc.encode(value._9, os)) + onErrorMsg("encode" -> "_10")(kc.encode(value._10, os)) + onErrorMsg("encode" -> "_11")(lc.encode(value._11, os)) + onErrorMsg("encode" -> "_12")(mc.encode(value._12, os)) + onErrorMsg("encode" -> "_13")(nc.encode(value._13, os)) + onErrorMsg("encode" -> "_14")(oc.encode(value._14, os)) + onErrorMsg("encode" -> "_15")(pc.encode(value._15, os)) + onErrorMsg("encode" -> "_16")(qc.encode(value._16, os)) + onErrorMsg("encode" -> "_17")(rc.encode(value._17, os)) + onErrorMsg("encode" -> "_18")(sc.encode(value._18, os)) + onErrorMsg("encode" -> "_19")(tc.encode(value._19, os)) + onErrorMsg("encode" -> "_20")(uc.encode(value._20, os)) + onErrorMsg("encode" -> "_21")(vc.encode(value._21, os)) + onErrorMsg("encode" -> "_22")(wc.encode(value._22, os)) + } + override def decode( + is: InputStream + ): (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W) = + ( + onErrorMsg("decode" -> "_1")(ac.decode(is)), + onErrorMsg("decode" -> "_2")(bc.decode(is)), + onErrorMsg("decode" -> "_3")(cc.decode(is)), + onErrorMsg("decode" -> "_4")(dc.decode(is)), + onErrorMsg("decode" -> "_5")(ec.decode(is)), + onErrorMsg("decode" -> "_6")(gc.decode(is)), + onErrorMsg("decode" -> "_7")(hc.decode(is)), + onErrorMsg("decode" -> "_8")(ic.decode(is)), + onErrorMsg("decode" -> "_9")(jc.decode(is)), + onErrorMsg("decode" -> "_10")(kc.decode(is)), + onErrorMsg("decode" -> "_11")(lc.decode(is)), + onErrorMsg("decode" -> "_12")(mc.decode(is)), + onErrorMsg("decode" -> "_13")(nc.decode(is)), + onErrorMsg("decode" -> "_14")(oc.decode(is)), + onErrorMsg("decode" -> "_15")(pc.decode(is)), + onErrorMsg("decode" -> "_16")(qc.decode(is)), + onErrorMsg("decode" -> "_17")(rc.decode(is)), + onErrorMsg("decode" -> "_18")(sc.decode(is)), + onErrorMsg("decode" -> "_19")(tc.decode(is)), + onErrorMsg("decode" -> "_20")(uc.decode(is)), + onErrorMsg("decode" -> "_21")(vc.decode(is)), + onErrorMsg("decode" -> "_22")(wc.decode(is)) + ) + + override def toString: String = + s"Tuple22Coder(_1 -> $ac, _2 -> $bc, _3 -> $cc, _4 -> $dc, _5 -> $ec, _6 -> $gc, _7 -> $hc, _8 -> $ic, _9 -> $jc, _10 -> $kc, _11 -> $lc, _12 -> $mc, _13 -> $nc, _14 -> $oc, _15 -> $pc, _16 -> $qc, _17 -> $rc, _18 -> $sc, _19 -> $tc, _20 -> $uc, _21 -> $vc, _22 -> $wc)" + + // delegate methods for determinism and equality checks + + override def verifyDeterministic(): Unit = { + val cs = List( + "_1" -> ac, + "_2" -> bc, + "_3" -> cc, + "_4" -> dc, + "_5" -> ec, + "_6" -> gc, + "_7" -> hc, + "_8" -> ic, + "_9" -> jc, + "_10" -> kc, + "_11" -> lc, + "_12" -> mc, + "_13" -> nc, + "_14" -> oc, + "_15" -> pc, + "_16" -> qc, + "_17" -> rc, + "_18" -> sc, + "_19" -> tc, + "_20" -> uc, + "_21" -> vc, + "_22" -> wc + ) + val problems = cs.flatMap { case (label, c) => + try { + c.verifyDeterministic() + Nil + } catch { + case e: NonDeterministicException => + val reason = s"field $label is using non-deterministic $c" + List(reason -> e) + } + } + + problems match { + case (_, e) :: _ => + val reasons = problems.map { case (reason, _) => reason } + throw new NonDeterministicException(this, reasons.asJava, e) + case Nil => + } + } + + override def consistentWithEquals(): Boolean = + ac.consistentWithEquals() && bc.consistentWithEquals() && cc.consistentWithEquals() && dc + .consistentWithEquals() && ec.consistentWithEquals() && gc.consistentWithEquals() && hc + .consistentWithEquals() && ic.consistentWithEquals() && jc.consistentWithEquals() && kc + .consistentWithEquals() && lc.consistentWithEquals() && mc.consistentWithEquals() && nc + .consistentWithEquals() && oc.consistentWithEquals() && pc.consistentWithEquals() && qc + .consistentWithEquals() && rc.consistentWithEquals() && sc.consistentWithEquals() && tc + .consistentWithEquals() && uc.consistentWithEquals() && vc.consistentWithEquals() && wc + .consistentWithEquals() + + override def structuralValue( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W) + ): AnyRef = + if (consistentWithEquals()) { + value.asInstanceOf[AnyRef] + } else { + ( + ac.structuralValue(value._1), + bc.structuralValue(value._2), + cc.structuralValue(value._3), + dc.structuralValue(value._4), + ec.structuralValue(value._5), + gc.structuralValue(value._6), + hc.structuralValue(value._7), + ic.structuralValue(value._8), + jc.structuralValue(value._9), + kc.structuralValue(value._10), + lc.structuralValue(value._11), + mc.structuralValue(value._12), + nc.structuralValue(value._13), + oc.structuralValue(value._14), + pc.structuralValue(value._15), + qc.structuralValue(value._16), + rc.structuralValue(value._17), + sc.structuralValue(value._18), + tc.structuralValue(value._19), + uc.structuralValue(value._20), + vc.structuralValue(value._21), + wc.structuralValue(value._22) + ) + } + + // delegate methods for byte size estimation + override def isRegisterByteSizeObserverCheap( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W) + ): Boolean = + ac.isRegisterByteSizeObserverCheap(value._1) && bc.isRegisterByteSizeObserverCheap( + value._2 + ) && cc.isRegisterByteSizeObserverCheap(value._3) && dc.isRegisterByteSizeObserverCheap( + value._4 + ) && ec.isRegisterByteSizeObserverCheap(value._5) && gc.isRegisterByteSizeObserverCheap( + value._6 + ) && hc.isRegisterByteSizeObserverCheap(value._7) && ic.isRegisterByteSizeObserverCheap( + value._8 + ) && jc.isRegisterByteSizeObserverCheap(value._9) && kc.isRegisterByteSizeObserverCheap( + value._10 + ) && lc.isRegisterByteSizeObserverCheap(value._11) && mc.isRegisterByteSizeObserverCheap( + value._12 + ) && nc.isRegisterByteSizeObserverCheap(value._13) && oc.isRegisterByteSizeObserverCheap( + value._14 + ) && pc.isRegisterByteSizeObserverCheap(value._15) && qc.isRegisterByteSizeObserverCheap( + value._16 + ) && rc.isRegisterByteSizeObserverCheap(value._17) && sc.isRegisterByteSizeObserverCheap( + value._18 + ) && tc.isRegisterByteSizeObserverCheap(value._19) && uc.isRegisterByteSizeObserverCheap( + value._20 + ) && vc.isRegisterByteSizeObserverCheap(value._21) && wc.isRegisterByteSizeObserverCheap( + value._22 + ) + + override def registerByteSizeObserver( + value: (A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W), + observer: ElementByteSizeObserver + ): Unit = { + ac.registerByteSizeObserver(value._1, observer) + bc.registerByteSizeObserver(value._2, observer) + cc.registerByteSizeObserver(value._3, observer) + dc.registerByteSizeObserver(value._4, observer) + ec.registerByteSizeObserver(value._5, observer) + gc.registerByteSizeObserver(value._6, observer) + hc.registerByteSizeObserver(value._7, observer) + ic.registerByteSizeObserver(value._8, observer) + jc.registerByteSizeObserver(value._9, observer) + kc.registerByteSizeObserver(value._10, observer) + lc.registerByteSizeObserver(value._11, observer) + mc.registerByteSizeObserver(value._12, observer) + nc.registerByteSizeObserver(value._13, observer) + oc.registerByteSizeObserver(value._14, observer) + pc.registerByteSizeObserver(value._15, observer) + qc.registerByteSizeObserver(value._16, observer) + rc.registerByteSizeObserver(value._17, observer) + sc.registerByteSizeObserver(value._18, observer) + tc.registerByteSizeObserver(value._19, observer) + uc.registerByteSizeObserver(value._20, observer) + vc.registerByteSizeObserver(value._21, observer) + wc.registerByteSizeObserver(value._22, observer) + } +} + +trait TupleCoders { + + implicit def tuple2Coder[A, B](implicit CA: Coder[A], CB: Coder[B]): Coder[(A, B)] = + Coder.transform(CA) { ac => + Coder.transform(CB)(bc => Coder.beam(new Tuple2Coder[A, B](ac, bc))) + } + + implicit def tuple3Coder[A, B, C](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C] + ): Coder[(A, B, C)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC)(cc => Coder.beam(new Tuple3Coder[A, B, C](ac, bc, cc))) + } + } + + implicit def tuple4Coder[A, B, C, D](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D] + ): Coder[(A, B, C, D)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD)(dc => Coder.beam(new Tuple4Coder[A, B, C, D](ac, bc, cc, dc))) + } + } + } + + implicit def tuple5Coder[A, B, C, D, E](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E] + ): Coder[(A, B, C, D, E)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE)(ec => + Coder.beam(new Tuple5Coder[A, B, C, D, E](ac, bc, cc, dc, ec)) + ) + } + } + } + } + + implicit def tuple6Coder[A, B, C, D, E, G](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G] + ): Coder[(A, B, C, D, E, G)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG)(gc => + Coder.beam(new Tuple6Coder[A, B, C, D, E, G](ac, bc, cc, dc, ec, gc)) + ) + } + } + } + } + } + + implicit def tuple7Coder[A, B, C, D, E, G, H](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H] + ): Coder[(A, B, C, D, E, G, H)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH)(hc => + Coder.beam(new Tuple7Coder[A, B, C, D, E, G, H](ac, bc, cc, dc, ec, gc, hc)) + ) + } + } + } + } + } + } + + implicit def tuple8Coder[A, B, C, D, E, G, H, I](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I] + ): Coder[(A, B, C, D, E, G, H, I)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI)(ic => + Coder.beam( + new Tuple8Coder[A, B, C, D, E, G, H, I](ac, bc, cc, dc, ec, gc, hc, ic) + ) + ) + } + } + } + } + } + } + } + + implicit def tuple9Coder[A, B, C, D, E, G, H, I, J](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J] + ): Coder[(A, B, C, D, E, G, H, I, J)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ)(jc => + Coder.beam( + new Tuple9Coder[A, B, C, D, E, G, H, I, J]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc + ) + ) + ) + } + } + } + } + } + } + } + } + + implicit def tuple10Coder[A, B, C, D, E, G, H, I, J, K](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K] + ): Coder[(A, B, C, D, E, G, H, I, J, K)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK)(kc => + Coder.beam( + new Tuple10Coder[A, B, C, D, E, G, H, I, J, K]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc + ) + ) + ) + } + } + } + } + } + } + } + } + } + + implicit def tuple11Coder[A, B, C, D, E, G, H, I, J, K, L](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL)(lc => + Coder.beam( + new Tuple11Coder[A, B, C, D, E, G, H, I, J, K, L]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + + implicit def tuple12Coder[A, B, C, D, E, G, H, I, J, K, L, M](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L], + CM: Coder[M] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L, M)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL) { lc => + Coder.transform(CM)(mc => + Coder.beam( + new Tuple12Coder[A, B, C, D, E, G, H, I, J, K, L, M]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc, + mc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + } + + implicit def tuple13Coder[A, B, C, D, E, G, H, I, J, K, L, M, N](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L], + CM: Coder[M], + CN: Coder[N] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L, M, N)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL) { lc => + Coder.transform(CM) { mc => + Coder.transform(CN)(nc => + Coder.beam( + new Tuple13Coder[A, B, C, D, E, G, H, I, J, K, L, M, N]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc, + mc, + nc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + } + } + + implicit def tuple14Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L], + CM: Coder[M], + CN: Coder[N], + CO: Coder[O] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL) { lc => + Coder.transform(CM) { mc => + Coder.transform(CN) { nc => + Coder.transform(CO)(oc => + Coder.beam( + new Tuple14Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc, + mc, + nc, + oc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + } + } + } + + implicit def tuple15Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L], + CM: Coder[M], + CN: Coder[N], + CO: Coder[O], + CP: Coder[P] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL) { lc => + Coder.transform(CM) { mc => + Coder.transform(CN) { nc => + Coder.transform(CO) { oc => + Coder.transform(CP)(pc => + Coder.beam( + new Tuple15Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc, + mc, + nc, + oc, + pc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + } + } + } + } + + implicit def tuple16Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L], + CM: Coder[M], + CN: Coder[N], + CO: Coder[O], + CP: Coder[P], + CQ: Coder[Q] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL) { lc => + Coder.transform(CM) { mc => + Coder.transform(CN) { nc => + Coder.transform(CO) { oc => + Coder.transform(CP) { pc => + Coder.transform(CQ)(qc => + Coder.beam( + new Tuple16Coder[ + A, + B, + C, + D, + E, + G, + H, + I, + J, + K, + L, + M, + N, + O, + P, + Q + ]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc, + mc, + nc, + oc, + pc, + qc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + + implicit def tuple17Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L], + CM: Coder[M], + CN: Coder[N], + CO: Coder[O], + CP: Coder[P], + CQ: Coder[Q], + CR: Coder[R] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL) { lc => + Coder.transform(CM) { mc => + Coder.transform(CN) { nc => + Coder.transform(CO) { oc => + Coder.transform(CP) { pc => + Coder.transform(CQ) { qc => + Coder.transform(CR)(rc => + Coder.beam( + new Tuple17Coder[ + A, + B, + C, + D, + E, + G, + H, + I, + J, + K, + L, + M, + N, + O, + P, + Q, + R + ]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc, + mc, + nc, + oc, + pc, + qc, + rc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + + implicit def tuple18Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L], + CM: Coder[M], + CN: Coder[N], + CO: Coder[O], + CP: Coder[P], + CQ: Coder[Q], + CR: Coder[R], + CS: Coder[S] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL) { lc => + Coder.transform(CM) { mc => + Coder.transform(CN) { nc => + Coder.transform(CO) { oc => + Coder.transform(CP) { pc => + Coder.transform(CQ) { qc => + Coder.transform(CR) { rc => + Coder.transform(CS)(sc => + Coder.beam( + new Tuple18Coder[ + A, + B, + C, + D, + E, + G, + H, + I, + J, + K, + L, + M, + N, + O, + P, + Q, + R, + S + ]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc, + mc, + nc, + oc, + pc, + qc, + rc, + sc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + + implicit def tuple19Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L], + CM: Coder[M], + CN: Coder[N], + CO: Coder[O], + CP: Coder[P], + CQ: Coder[Q], + CR: Coder[R], + CS: Coder[S], + CT: Coder[T] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL) { lc => + Coder.transform(CM) { mc => + Coder.transform(CN) { nc => + Coder.transform(CO) { oc => + Coder.transform(CP) { pc => + Coder.transform(CQ) { qc => + Coder.transform(CR) { rc => + Coder.transform(CS) { sc => + Coder.transform(CT)(tc => + Coder.beam( + new Tuple19Coder[ + A, + B, + C, + D, + E, + G, + H, + I, + J, + K, + L, + M, + N, + O, + P, + Q, + R, + S, + T + ]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc, + mc, + nc, + oc, + pc, + qc, + rc, + sc, + tc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + + implicit def tuple20Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L], + CM: Coder[M], + CN: Coder[N], + CO: Coder[O], + CP: Coder[P], + CQ: Coder[Q], + CR: Coder[R], + CS: Coder[S], + CT: Coder[T], + CU: Coder[U] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL) { lc => + Coder.transform(CM) { mc => + Coder.transform(CN) { nc => + Coder.transform(CO) { oc => + Coder.transform(CP) { pc => + Coder.transform(CQ) { qc => + Coder.transform(CR) { rc => + Coder.transform(CS) { sc => + Coder.transform(CT) { tc => + Coder.transform(CU)(uc => + Coder.beam( + new Tuple20Coder[ + A, + B, + C, + D, + E, + G, + H, + I, + J, + K, + L, + M, + N, + O, + P, + Q, + R, + S, + T, + U + ]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc, + mc, + nc, + oc, + pc, + qc, + rc, + sc, + tc, + uc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + + implicit def tuple21Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L], + CM: Coder[M], + CN: Coder[N], + CO: Coder[O], + CP: Coder[P], + CQ: Coder[Q], + CR: Coder[R], + CS: Coder[S], + CT: Coder[T], + CU: Coder[U], + CV: Coder[V] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL) { lc => + Coder.transform(CM) { mc => + Coder.transform(CN) { nc => + Coder.transform(CO) { oc => + Coder.transform(CP) { pc => + Coder.transform(CQ) { qc => + Coder.transform(CR) { rc => + Coder.transform(CS) { sc => + Coder.transform(CT) { tc => + Coder.transform(CU) { uc => + Coder.transform(CV)(vc => + Coder.beam( + new Tuple21Coder[ + A, + B, + C, + D, + E, + G, + H, + I, + J, + K, + L, + M, + N, + O, + P, + Q, + R, + S, + T, + U, + V + ]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc, + mc, + nc, + oc, + pc, + qc, + rc, + sc, + tc, + uc, + vc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + + implicit def tuple22Coder[A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W]( + implicit + CA: Coder[A], + CB: Coder[B], + CC: Coder[C], + CD: Coder[D], + CE: Coder[E], + CG: Coder[G], + CH: Coder[H], + CI: Coder[I], + CJ: Coder[J], + CK: Coder[K], + CL: Coder[L], + CM: Coder[M], + CN: Coder[N], + CO: Coder[O], + CP: Coder[P], + CQ: Coder[Q], + CR: Coder[R], + CS: Coder[S], + CT: Coder[T], + CU: Coder[U], + CV: Coder[V], + CW: Coder[W] + ): Coder[(A, B, C, D, E, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V, W)] = + Coder.transform(CA) { ac => + Coder.transform(CB) { bc => + Coder.transform(CC) { cc => + Coder.transform(CD) { dc => + Coder.transform(CE) { ec => + Coder.transform(CG) { gc => + Coder.transform(CH) { hc => + Coder.transform(CI) { ic => + Coder.transform(CJ) { jc => + Coder.transform(CK) { kc => + Coder.transform(CL) { lc => + Coder.transform(CM) { mc => + Coder.transform(CN) { nc => + Coder.transform(CO) { oc => + Coder.transform(CP) { pc => + Coder.transform(CQ) { qc => + Coder.transform(CR) { rc => + Coder.transform(CS) { sc => + Coder.transform(CT) { tc => + Coder.transform(CU) { uc => + Coder.transform(CV) { vc => + Coder.transform(CW)(wc => + Coder.beam( + new Tuple22Coder[ + A, + B, + C, + D, + E, + G, + H, + I, + J, + K, + L, + M, + N, + O, + P, + Q, + R, + S, + T, + U, + V, + W + ]( + ac, + bc, + cc, + dc, + ec, + gc, + hc, + ic, + jc, + kc, + lc, + mc, + nc, + oc, + pc, + qc, + rc, + sc, + tc, + uc, + vc, + wc + ) + ) + ) + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } + } +} diff --git a/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/kryo/JTraversableSerializer.scala b/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/kryo/JTraversableSerializer.scala new file mode 100644 index 0000000000..e7fc5c1ef3 --- /dev/null +++ b/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/kryo/JTraversableSerializer.scala @@ -0,0 +1,91 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.coders.instances.kryo + +import com.esotericsoftware.kryo.Kryo +import com.esotericsoftware.kryo.io.{Input, InputChunked, Output, OutputChunked} +import com.twitter.chill.KSerializer + +import scala.jdk.CollectionConverters._ +import scala.collection.mutable +import scala.collection.compat._ + +/** + * Based on [[org.apache.beam.sdk.coders.IterableLikeCoder]] and + * [[org.apache.beam.sdk.util.BufferedElementCountingOutputStream]]. + */ +private[coders] class JTraversableSerializer[T, C <: Traversable[T]]( + val bufferSize: Int = 64 * 1024 +)(implicit cbf: Factory[T, C]) + extends KSerializer[C] { + override def write(kser: Kryo, out: Output, obj: C): Unit = { + val i = obj.iterator + val chunked = new OutputChunked(out, bufferSize) + while (i.hasNext) { + chunked.writeBoolean(true) + kser.writeClassAndObject(chunked, i.next()) + } + chunked.writeBoolean(false) + chunked.endChunks() + chunked.flush() + } + + override def read(kser: Kryo, in: Input, cls: Class[C]): C = { + val b = cbf.newBuilder + val chunked = new InputChunked(in, bufferSize) + while (chunked.readBoolean()) { + b += kser.readClassAndObject(chunked).asInstanceOf[T] + } + b.result() + } +} + +// workaround for Java Iterable/Collection missing proper equality check +abstract private[coders] class JWrapperCBF[T] extends Factory[T, Iterable[T]] { + def asScala(xs: java.util.List[T]): Iterable[T] + + class JIterableWrapperBuilder extends mutable.Builder[T, Iterable[T]] { + private val xs = new java.util.ArrayList[T]() + + override def addOne(elem: T): this.type = { + xs.add(elem) + this + } + + override def clear(): Unit = xs.clear() + override def result(): Iterable[T] = asScala(xs) + } + + override def fromSpecific(it: IterableOnce[T]): Iterable[T] = { + val b = new JIterableWrapperBuilder + it.foreach(b += _) + b.result() + } + + override def newBuilder: mutable.Builder[T, Iterable[T]] = new JIterableWrapperBuilder +} + +private[coders] class JIterableWrapperCBF[T] extends JWrapperCBF[T] { + override def asScala(xs: java.util.List[T]): Iterable[T] = + xs.asInstanceOf[java.lang.Iterable[T]].asScala +} + +private[coders] class JCollectionWrapperCBF[T] extends JWrapperCBF[T] { + override def asScala(xs: java.util.List[T]): Iterable[T] = + xs.asInstanceOf[java.util.Collection[T]].asScala +} diff --git a/scio-core/src/main/scala-3/com/spotify/scio/coders/macros/FallbackCoderMacros.scala b/scio-core/src/main/scala-3/com/spotify/scio/coders/macros/FallbackCoderMacros.scala new file mode 100644 index 0000000000..180b4e0cbb --- /dev/null +++ b/scio-core/src/main/scala-3/com/spotify/scio/coders/macros/FallbackCoderMacros.scala @@ -0,0 +1,104 @@ +package com.spotify.scio.coders.macros + +import com.spotify.scio.coders.Coder +import scala.compiletime._ +import scala.deriving._ +import scala.quoted._ +import scala.reflect.ClassTag + +object FallbackCoderMacros { + + private[this] var verbose = true + private[this] val reported: scala.collection.mutable.Set[(String, String)] = + scala.collection.mutable.Set.empty + + private[this] val BlacklistedTypes = List("org.apache.beam.sdk.values.Row") + + private[this] val Warnings = + Map( + "org.apache.avro.generic.GenericRecord" -> + """ + |Using a fallback coder for Avro's GenericRecord is discouraged as it is VERY inefficient. + |It is highly recommended to define a proper Coder[GenericRecord] using: + | + | Coder.avroGenericRecordCoder(schema) + """.stripMargin + ) + + def issueFallbackWarning[T](using Quotes, Type[T]): Expr[Coder[T]] = { + import quotes.reflect._ + // TODO: scala3 implement macro settings + // val show = MacroSettings.showCoderFallback(c) == FeatureFlag.Enable + val show = true + + val fullTypeColored = Type.showAnsiColored[T] + val fullType = Type.show[T] + val typeName: String = fullType.split('.').last // TODO: Type.showShort[T] ? + + val toReport = Position.ofMacroExpansion.toString -> fullType + val alreadyReported = reported.contains(toReport) + if (!alreadyReported) reported += toReport + + val shortMessage = + s""" + | Warning: No implicit Coder found for the following type: + | + | >> $fullTypeColored + | + | using Kryo fallback instead. + """ + + val longMessage = + shortMessage + + s""" + | + | Scio will use a fallback Kryo coder instead. + | + | If a type is not supported, consider implementing your own implicit Coder for this type. + | It is recommended to declare this Coder in your class companion object: + | + | object $typeName { + | import com.spotify.scio.coders.Coder + | import org.apache.beam.sdk.coders.AtomicCoder + | + | implicit def coder$typeName: Coder[$fullType] = + | Coder.beam(new AtomicCoder[$fullType] { + | def decode(in: InputStream): $fullType = ??? + | def encode(ts: $fullType, out: OutputStream): Unit = ??? + | }) + | } + | + | If you do want to use a Kryo coder, be explicit about it: + | + | implicit def coder$typeName: Coder[$fullType] = Coder.kryo[$fullType] + | + | Additional info at: + | - https://spotify.github.io/scio/internals/Coders + | + """ + + val Some(tag) = Expr.summon[ClassTag[T]] + val fallback = '{ Coder.kryo[T]($tag) } + + (verbose, alreadyReported) match { + case _ if BlacklistedTypes.contains(fullType) => + val msg = + s"Can't use a Kryo coder for $fullType. You need to explicitly set the Coder for this type" + report.throwError(msg) + case _ if Warnings.contains(fullType) => + report.warning(Warnings(fullType)) + fallback + case (false, false) => + if (show) report.warning(shortMessage.stripMargin) + fallback + case (true, false) => + if (show) report.warning(longMessage.stripMargin) + verbose = false + fallback + case (_, _) => + fallback + } + + } + +} diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala new file mode 100644 index 0000000000..1809cef330 --- /dev/null +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -0,0 +1,42 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.schemas + +import org.apache.beam.sdk.schemas.{SchemaCoder, Schema => BSchema} + +import scala.compiletime._ +import scala.deriving._ +import scala.quoted._ + +object ToMacro { + def safeImpl[I, O](si: Expr[Schema[I]])(implicit q: Quotes): Expr[To[I, O]] = { + ??? + } +} + +trait ToMacro { + /** + * Convert instance of ${T} in this SCollection into instances of ${O} + * based on the Schemas on the 2 classes. The compatibility of thoses classes is checked + * at compile time. + * @see To#unsafe + */ + // TODO: scala3 + inline def safe[I, O](inline si: Schema[I], inline so: Schema[O]): To[I, O] = + ??? +} diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/instances/LowPrioritySchemaDerivation.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/instances/LowPrioritySchemaDerivation.scala new file mode 100644 index 0000000000..91e1b0b015 --- /dev/null +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/instances/LowPrioritySchemaDerivation.scala @@ -0,0 +1,56 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.schemas.instances + +import com.spotify.scio.schemas._ +import com.spotify.scio.macros.DerivationUtils + +import scala.deriving._ +import scala.quoted._ +import scala.compiletime._ + +trait LowPrioritySchemaDerivation { + type Typeclass[T] = Schema[T] + + // back compat + inline def gen[T <: Product](implicit m: Mirror.Of[T]): Schema[T] = + derived[T] + + def coderProduct[T <: Product](p: Mirror.ProductOf[T], schemas: List[(String, Schema[Any])]): Schema[T] = { + val rawConstruct: Seq[Any] => T = + vs => p.fromProduct(Tuple.fromArray(vs.toArray)) + + val destruct: T => Array[Any] = + t => Tuple.fromProduct(t).toArray.asInstanceOf[Array[Any]] + + Record(schemas.toArray, rawConstruct, destruct) + } + + inline implicit def derived[T <: Product](implicit m: Mirror.Of[T]): Schema[T] = { + inline m match { + case p: Mirror.ProductOf[T] => + val elemInstances = + DerivationUtils.summonAllF[Schema, p.MirroredElemTypes] + .toList + .asInstanceOf[List[Schema[Any]]] // YOLO + val fields = DerivationUtils.mirrorFields[p.MirroredElemLabels] + val schemas = fields.zip(elemInstances) + coderProduct(p, schemas) + } + } +} diff --git a/scio-core/src/main/scala-3/com/spotify/scio/util/JMapWrapper.scala b/scio-core/src/main/scala-3/com/spotify/scio/util/JMapWrapper.scala new file mode 100644 index 0000000000..0900b741a2 --- /dev/null +++ b/scio-core/src/main/scala-3/com/spotify/scio/util/JMapWrapper.scala @@ -0,0 +1,71 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.util + +import java.lang.{Iterable => JIterable} +import java.util.{Map => JMap} + +import scala.jdk.CollectionConverters._ + +/** + * Immutable wrappers for [[java.util.Map]]. + * Java `Map`s are mutable and `.asJava` returns `mutable.Map[K, V]` which is inconsistent and not + * idiomatic Scala. When wrapping Beam API, in many cases the underlying [[java.util.Map]] is + * immutable in nature and it's safe to wrap them with this. + */ +private[scio] object JMapWrapper { + def ofMultiMap[A, B](self: JMap[A, JIterable[B]]): Map[A, Iterable[B]] = + new Map[A, Iterable[B]] { + // make eager copies when necessary + + override def removed(key: A): Map[A, Iterable[B]] = + self.asScala.iterator + .filter { case (k, _) => k != key } + .map { case (k, v) => + (k, v.asScala) + } + .toMap + + // lazy transform underlying j.u.Map + override def get(key: A): Option[Iterable[B]] = + Option(self.get(key)).map(_.asScala) + override def iterator: Iterator[(A, Iterable[B])] = + self.asScala.iterator.map(kv => (kv._1, kv._2.asScala)) + + override def updated[V1 >: Iterable[B]](key: A, value: V1): Map[A, V1] = + self.asScala.iterator + .map { case (k, v) => (k, v.asScala) } + .toMap + .updated(key, value) + } + + def of[K, V](self: JMap[K, V]): Map[K, V] = + new Map[K, V] { + // make eager copies when necessary + + override def removed(key: K): Map[K, V] = + self.asScala.iterator.filter { case (k, _) => k != key }.toMap + + // lazy transform underlying j.u.Map + override def get(key: K): Option[V] = Option(self.get(key)) + override def iterator: Iterator[(K, V)] = self.asScala.iterator + + override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = + self.asScala.toMap.updated(key, value) + } +} diff --git a/scio-core/src/main/scala-3/scala/collection/compat/extra.scala b/scio-core/src/main/scala-3/scala/collection/compat/extra.scala new file mode 100644 index 0000000000..03b3f1cf70 --- /dev/null +++ b/scio-core/src/main/scala-3/scala/collection/compat/extra.scala @@ -0,0 +1,24 @@ +/* + * Copyright 2020 Spotify AB + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package scala.collection.compat + +object extra { + type Wrappers = scala.collection.convert.JavaCollectionWrappers.type + val Wrappers = scala.collection.convert.JavaCollectionWrappers + + val CollectionConverters = scala.jdk.javaapi.CollectionConverters +} diff --git a/scio-core/src/main/scala/com/spotify/scio/VersionUtil.scala b/scio-core/src/main/scala/com/spotify/scio/VersionUtil.scala index 0fa12b4216..4a81a37461 100644 --- a/scio-core/src/main/scala/com/spotify/scio/VersionUtil.scala +++ b/scio-core/src/main/scala/com/spotify/scio/VersionUtil.scala @@ -32,8 +32,10 @@ import scala.util.Try private[scio] object VersionUtil { case class SemVer(major: Int, minor: Int, rev: Int, suffix: String) extends Ordered[SemVer] { - def compare(that: SemVer): Int = - Ordering[(Int, Int, Int, String)].compare(SemVer.unapply(this).get, SemVer.unapply(that).get) + def compare(that: SemVer): Int = (this, that) match { + case (SemVer(ma0, mi0, rev0, suf0), SemVer(ma1, mi1, rev1, suf1)) => + Ordering[(Int, Int, Int, String)].compare((ma0, mi0, rev0, suf0), (ma1, mi1, rev1, suf1)) + } } private[this] val Timeout = 3000 diff --git a/scio-core/src/main/scala/com/spotify/scio/coders/Coder.scala b/scio-core/src/main/scala/com/spotify/scio/coders/Coder.scala index b6a1820d85..e5997e46b1 100644 --- a/scio-core/src/main/scala/com/spotify/scio/coders/Coder.scala +++ b/scio-core/src/main/scala/com/spotify/scio/coders/Coder.scala @@ -525,6 +525,7 @@ object Coder with GuavaCoders with JodaCoders with BeamTypeCoders + with FallbackCoder with LowPriorityCoders { @inline final def apply[T](implicit c: Coder[T]): Coder[T] = c @@ -595,9 +596,6 @@ object Coder implicit val jPeriodCoder: Coder[java.time.Period] = JavaCoders.jPeriodCoder implicit val jSqlTimestamp: Coder[java.sql.Timestamp] = JavaCoders.jSqlTimestamp implicit def coderJEnum[E <: java.lang.Enum[E]: ClassTag]: Coder[E] = JavaCoders.coderJEnum - - def fallback[T](implicit lp: shapeless.LowPriority): Coder[T] = - macro CoderMacros.issueFallbackWarning[T] } trait LowPriorityCoders extends LowPriorityCoderDerivation { diff --git a/scio-core/src/main/scala/com/spotify/scio/coders/KryoAtomicCoder.scala b/scio-core/src/main/scala/com/spotify/scio/coders/KryoAtomicCoder.scala index 70b2d869a2..a152b0a304 100644 --- a/scio-core/src/main/scala/com/spotify/scio/coders/KryoAtomicCoder.scala +++ b/scio-core/src/main/scala/com/spotify/scio/coders/KryoAtomicCoder.scala @@ -231,7 +231,7 @@ final private[scio] class KryoAtomicCoder[T](private val options: KryoOptions) } private def kryoEncodedElementByteSize(obj: Any): Long = - withKryoState(instanceId, options) { kryoState: KryoState => + withKryoState(instanceId, options) { (kryoState: KryoState) => val s = new CountingOutputStream(ByteStreams.nullOutputStream()) val output = new Output(options.bufferSize, options.maxBufferSize) output.setOutputStream(s) diff --git a/scio-core/src/main/scala/com/spotify/scio/coders/instances/AvroCoders.scala b/scio-core/src/main/scala/com/spotify/scio/coders/instances/AvroCoders.scala index 3a0c048613..18f2d076f7 100644 --- a/scio-core/src/main/scala/com/spotify/scio/coders/instances/AvroCoders.scala +++ b/scio-core/src/main/scala/com/spotify/scio/coders/instances/AvroCoders.scala @@ -19,7 +19,7 @@ package com.spotify.scio.coders.instances import java.io.{InputStream, OutputStream} -import com.spotify.scio.coders.{AvroCoderMacros, Coder} +import com.spotify.scio.coders.Coder import org.apache.avro.Schema import org.apache.avro.generic.GenericRecord import org.apache.avro.specific.{SpecificData, SpecificFixed} @@ -104,7 +104,7 @@ private object SpecificFixedCoder { } } -trait AvroCoders { +trait AvroCoders extends AvroCodersMacros { /** * Create a Coder for Avro GenericRecord given the schema of the GenericRecord. @@ -120,10 +120,6 @@ trait AvroCoders { def avroGenericRecordCoder: Coder[GenericRecord] = Coder.beam(new SlowGenericRecordCoder) - import org.apache.avro.specific.SpecificRecordBase - implicit def genAvro[T <: SpecificRecordBase]: Coder[T] = - macro AvroCoderMacros.staticInvokeCoder[T] - implicit def avroSpecificFixedCoder[T <: SpecificFixed: ClassTag]: Coder[T] = SpecificFixedCoder[T] } diff --git a/scio-core/src/main/scala/com/spotify/scio/coders/instances/ScalaCoders.scala b/scio-core/src/main/scala/com/spotify/scio/coders/instances/ScalaCoders.scala index 19171f7de3..9a373ea3d9 100644 --- a/scio-core/src/main/scala/com/spotify/scio/coders/instances/ScalaCoders.scala +++ b/scio-core/src/main/scala/com/spotify/scio/coders/instances/ScalaCoders.scala @@ -189,21 +189,22 @@ private class VectorCoder[T](bc: BCoder[T]) extends SeqLikeCoder[Vector, T](bc) override def decode(inStream: InputStream): Vector[T] = decode(inStream, Vector.newBuilder[T]) } -private class ArrayCoder[@specialized(Short, Int, Long, Float, Double, Boolean, Char) T: ClassTag]( - bc: BCoder[T] -) extends SeqLikeCoder[Array, T](bc) { - override def decode(inStream: InputStream): Array[T] = { - val size = VarInt.decodeInt(inStream) - val arr = new Array[T](size) - var i = 0 - while (i < size) { - arr(i) = bc.decode(inStream) - i += 1 - } - arr - } - override def consistentWithEquals(): Boolean = false -} +// TODO: restore once https://github.com/lampepfl/dotty/issues/10599 is fixed +// private class ArrayCoder[@specialized(Short, Int, Long, Float, Double, Boolean, Char) T: ClassTag]( +// bc: BCoder[T] +// ) extends SeqLikeCoder[Array, T](bc) { +// override def decode(inStream: InputStream): Array[T] = { +// val size = VarInt.decodeInt(inStream) +// val arr = new Array[T](size) +// var i = 0 +// while (i < size) { +// arr(i) = bc.decode(inStream) +// i += 1 +// } +// arr +// } +// override def consistentWithEquals(): Boolean = false +// } private class ArrayBufferCoder[T](bc: BCoder[T]) extends SeqLikeCoder[m.ArrayBuffer, T](bc) { override def decode(inStream: InputStream): m.ArrayBuffer[T] = @@ -488,8 +489,9 @@ trait ScalaCoders { implicit def listBufferCoder[T: Coder]: Coder[m.ListBuffer[T]] = Coder.xmap(bufferCoder[T])(x => m.ListBuffer(x.toSeq: _*), identity) - implicit def arrayCoder[T: Coder: ClassTag]: Coder[Array[T]] = - Coder.transform(Coder[T])(bc => Coder.beam(new ArrayCoder[T](bc))) + // TODO: scala3 restore previous implementation (see ArrayCoder) + // Coder.transform(Coder[T])(bc => Coder.beam(new ArrayCoder[T](bc))) + implicit def arrayCoder[T: Coder: ClassTag]: Coder[Array[T]] = ??? implicit def arrayByteCoder: Coder[Array[Byte]] = Coder.beam(ByteArrayCoder.of()) diff --git a/scio-core/src/main/scala/com/spotify/scio/estimators/ApproxDistinctCounter.scala b/scio-core/src/main/scala/com/spotify/scio/estimators/ApproxDistinctCounter.scala index 202d4154c7..376a7be83c 100644 --- a/scio-core/src/main/scala/com/spotify/scio/estimators/ApproxDistinctCounter.scala +++ b/scio-core/src/main/scala/com/spotify/scio/estimators/ApproxDistinctCounter.scala @@ -21,6 +21,7 @@ import com.spotify.scio.coders.{BeamCoders, Coder, CoderMaterializer} import com.spotify.scio.util.TupleFunctions._ import com.spotify.scio.values.SCollection import org.apache.beam.sdk.{transforms => beam} + import com.spotify.scio.values.SCollection.makePairSCollectionFunctions /** * Approximate distinct element counter for type `T`, e.g. HyperLogLog or HyperLogLog++. This has two APIs one diff --git a/scio-core/src/main/scala/com/spotify/scio/hash/ApproxFilter.scala b/scio-core/src/main/scala/com/spotify/scio/hash/ApproxFilter.scala index d04ae75207..5b45742bbd 100644 --- a/scio-core/src/main/scala/com/spotify/scio/hash/ApproxFilter.scala +++ b/scio-core/src/main/scala/com/spotify/scio/hash/ApproxFilter.scala @@ -196,7 +196,7 @@ sealed trait ApproxFilterCompanion { elems.transform { _.groupBy(_ => ()).values .map { xs => - val n = if (expectedInsertions > 0) expectedInsertions else xs.size + val n: Long = if (expectedInsertions > 0) expectedInsertions else xs.size create(xs, n, fpp) } } diff --git a/scio-core/src/main/scala/com/spotify/scio/io/dynamic/package.scala b/scio-core/src/main/scala/com/spotify/scio/io/dynamic/package.scala index 4fee32a0e0..b96ef775d8 100644 --- a/scio-core/src/main/scala/com/spotify/scio/io/dynamic/package.scala +++ b/scio-core/src/main/scala/com/spotify/scio/io/dynamic/package.scala @@ -17,8 +17,6 @@ package com.spotify.scio.io -import com.spotify.scio.io.dynamic.syntax.AllSyntax - /** * IO package for dynamic destinations. Import All. * @@ -26,4 +24,4 @@ import com.spotify.scio.io.dynamic.syntax.AllSyntax * import com.spotify.scio.io.dynamic._ * }}} */ -package object dynamic extends AllSyntax +package object dynamic extends com.spotify.scio.io.dynamic.syntax.AllSyntax diff --git a/scio-core/src/main/scala/com/spotify/scio/schemas/Schema.scala b/scio-core/src/main/scala/com/spotify/scio/schemas/Schema.scala index 201dd1fac1..1a4b938de0 100644 --- a/scio-core/src/main/scala/com/spotify/scio/schemas/Schema.scala +++ b/scio-core/src/main/scala/com/spotify/scio/schemas/Schema.scala @@ -19,7 +19,6 @@ package com.spotify.scio.schemas import java.util.{List => jList, Map => jMap} -import com.spotify.scio.{FeatureFlag, IsJavaBean, MacroSettings} import com.spotify.scio.schemas.instances.{ AvroInstances, JavaInstances, @@ -36,9 +35,9 @@ import com.twitter.chill.ClosureCleaner import scala.jdk.CollectionConverters._ import scala.reflect.ClassTag -import org.apache.beam.sdk.values.TupleTag import scala.collection.{mutable, SortedSet} +import com.spotify.scio.IsJavaBean object Schema extends JodaInstances with AvroInstances with LowPrioritySchemaDerivation { @inline final def apply[T](implicit c: Schema[T]): Schema[T] = c @@ -210,7 +209,10 @@ final case class MapType[F[_, _], K, V]( private[scio] case class ScalarWrapper[T](value: T) extends AnyVal object ScalarWrapper { implicit def schemaScalarWrapper[T: Schema]: Schema[ScalarWrapper[T]] = - Schema.gen[ScalarWrapper[T]] + Record( + Array("value" -> Schema[T].asInstanceOf[Schema[Any]]), + vs => ScalarWrapper(vs.head.asInstanceOf[T]), + w => Array(w.value)) } private[scio] object SchemaTypes { @@ -233,46 +235,3 @@ private[scio] object SchemaTypes { case _ => false }) } - -private[scio] trait SchemaMacroHelpers { - import scala.reflect.macros._ - - val ctx: blackbox.Context - import ctx.universe._ - - val cacheImplicitSchemas: FeatureFlag = MacroSettings.cacheImplicitSchemas(ctx) - - def untyped[A](expr: ctx.Expr[Schema[A]]): ctx.Expr[Schema[A]] = - ctx.Expr[Schema[A]](ctx.untypecheck(expr.tree.duplicate)) - - def inferImplicitSchema[A: ctx.WeakTypeTag]: ctx.Expr[Schema[A]] = - inferImplicitSchema(weakTypeOf[A]).asInstanceOf[ctx.Expr[Schema[A]]] - - def inferImplicitSchema(t: ctx.Type): ctx.Expr[Schema[_]] = { - val tpe = - cacheImplicitSchemas match { - case FeatureFlag.Enable => - tq"_root_.shapeless.Cached[_root_.com.spotify.scio.schemas.Schema[$t]]" - case _ => - tq"_root_.com.spotify.scio.schemas.Schema[$t]" - } - - val tp = ctx.typecheck(tpe, ctx.TYPEmode).tpe - val typedTree = ctx.inferImplicitValue(tp, silent = false) - val untypedTree = ctx.untypecheck(typedTree.duplicate) - - cacheImplicitSchemas match { - case FeatureFlag.Enable => - ctx.Expr[Schema[_]](q"$untypedTree.value") - case _ => - ctx.Expr[Schema[_]](untypedTree) - } - } - - def inferClassTag(t: ctx.Type): ctx.Expr[ClassTag[_]] = - ctx.Expr[ClassTag[_]](q"implicitly[_root_.scala.reflect.ClassTag[$t]]") - - implicit def liftTupleTag[A: ctx.WeakTypeTag]: Liftable[TupleTag[A]] = Liftable[TupleTag[A]] { - x => q"new _root_.org.apache.beam.sdk.values.TupleTag[${weakTypeOf[A]}](${x.getId()})" - } -} diff --git a/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala b/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala index f889083e6f..24d9da116a 100644 --- a/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala +++ b/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala @@ -83,14 +83,14 @@ object SchemaMaterializer { private def decode[A](schema: LogicalType[A])(v: schema.Repr): A = schema.fromBase(v) - private def decode[F[_], A: ClassTag](schema: ArrayType[F, A])(v: schema.Repr): F[A] = { - val values = new Array[A](v.size) + private def decode[F[_], A](schema: ArrayType[F, A])(v: schema.Repr): F[A] = { + val values = new Array[Any](v.size) var i = 0 while (i < v.size) { values.update(i, dispatchDecode[A](schema.schema)(v.get(i))) i = i + 1 } - schema.fromList(java.util.Arrays.asList(values: _*)) + schema.fromList(java.util.Arrays.asList(values.asInstanceOf[Array[A]]: _*)) } private def decode[F[_, _], A, B](schema: MapType[F, A, B])(v: schema.Repr): F[A, B] = { @@ -150,7 +150,7 @@ object SchemaMaterializer { schema .toList(v) .asScala - .map(dispatchEncode(schema.schema, fieldType.getCollectionElementType)) + .map[schema.schema.Repr](dispatchEncode(schema.schema, fieldType.getCollectionElementType)) .asJava private def encode[F[_, _], A, B](schema: MapType[F, A, B], fieldType: BFieldType)( diff --git a/scio-core/src/main/scala/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala/com/spotify/scio/schemas/To.scala index 9b2d05f670..e93bd5e6d6 100644 --- a/scio-core/src/main/scala/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala/com/spotify/scio/schemas/To.scala @@ -35,7 +35,7 @@ sealed trait To[I, O] extends (SCollection[I] => SCollection[O]) with Serializab coll.map(i => convert(i))(coder) } -object To { +object To extends ToMacro { @tailrec @inline private def getBaseType(t: BSchema.FieldType): BSchema.FieldType = { @@ -207,37 +207,4 @@ object To { val coder = Coder.beam(SchemaCoder.of(bso, td, toO, fromO)) def convert(i: I): O = f.curried(bso).andThen(fromO(_))(i) } - - /** - * Convert instance of ${T} in this SCollection into instances of ${O} - * based on the Schemas on the 2 classes. The compatibility of thoses classes is checked - * at compile time. - * @see To#unsafe - */ - def safe[I: Schema, O: Schema]: To[I, O] = - macro ToMacro.safeImpl[I, O] -} - -object ToMacro { - import scala.reflect.macros._ - def safeImpl[I: c.WeakTypeTag, O: c.WeakTypeTag]( - c: blackbox.Context - )(iSchema: c.Expr[Schema[I]], oSchema: c.Expr[Schema[O]]): c.Expr[To[I, O]] = { - val h = new { val ctx: c.type = c } with SchemaMacroHelpers - import h._ - import c.universe._ - - val tpeI = weakTypeOf[I] - val tpeO = weakTypeOf[O] - - val expr = c.Expr[(Schema[I], Schema[O])](q"(${untyped(iSchema)}, ${untyped(oSchema)})") - val (sIn, sOut) = c.eval(expr) - - val schemaOut: BSchema = SchemaMaterializer.fieldType(sOut).getRowSchema() - val schemaIn: BSchema = SchemaMaterializer.fieldType(sIn).getRowSchema() - - To.checkCompatibility(schemaIn, schemaOut) { - q"""_root_.com.spotify.scio.schemas.To.unchecked[$tpeI, $tpeO]""" - }.fold(message => c.abort(c.enclosingPosition, message), t => c.Expr[To[I, O]](t)) - } } diff --git a/scio-core/src/main/scala/com/spotify/scio/util/ArtisanJoin.scala b/scio-core/src/main/scala/com/spotify/scio/util/ArtisanJoin.scala index 70e504e19c..966046188d 100644 --- a/scio-core/src/main/scala/com/spotify/scio/util/ArtisanJoin.scala +++ b/scio-core/src/main/scala/com/spotify/scio/util/ArtisanJoin.scala @@ -31,6 +31,7 @@ import org.apache.beam.sdk.values.{KV, TupleTag} import org.slf4j.LoggerFactory import scala.jdk.CollectionConverters._ +import com.spotify.scio.values.SCollection.makePairSCollectionFunctions private[scio] object ArtisanJoin { private val log = LoggerFactory.getLogger(this.getClass) @@ -72,7 +73,7 @@ private[scio] object ArtisanJoin { .and(tagB, b.toKV.internal) .apply(s"CoGroupByKey@$name", CoGroupByKey.create()) - implicit val (kCoder, aCoder, bCoder) = (a.keyCoder, a.valueCoder, b.valueCoder) + implicit val (kCoder: Coder[KEY], aCoder: Coder[A], bCoder: Coder[B]) = (a.keyCoder, a.valueCoder, b.valueCoder) type DF = DoFn[KV[KEY, CoGbkResult], (KEY, (A1, B1))] a.context diff --git a/scio-core/src/main/scala/com/spotify/scio/util/MultiJoin.scala b/scio-core/src/main/scala/com/spotify/scio/util/MultiJoin.scala index 18ee1ee15c..40476b04a6 100644 --- a/scio-core/src/main/scala/com/spotify/scio/util/MultiJoin.scala +++ b/scio-core/src/main/scala/com/spotify/scio/util/MultiJoin.scala @@ -20,10 +20,12 @@ package com.spotify.scio.util import com.spotify.scio.values.SCollection +import com.spotify.scio.coders.Coder import org.apache.beam.sdk.transforms.join.{CoGroupByKey, KeyedPCollectionTuple} import org.apache.beam.sdk.values.TupleTag import scala.jdk.CollectionConverters._ +import com.spotify.scio.values.SCollection.makePairSCollectionFunctions trait MultiJoin extends Serializable { @@ -32,8 +34,8 @@ trait MultiJoin extends Serializable { def toOptions[T](xs: Iterator[T]): Iterator[Option[T]] = if (xs.isEmpty) Iterator(None) else xs.map(Option(_)) def cogroup[KEY, A, B](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)]): SCollection[(KEY, (Iterable[A], Iterable[B]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB) = (a.valueCoder, b.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B]) = (a.valueCoder, b.valueCoder) val (tagA, tagB) = (new TupleTag[A](), new TupleTag[B]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -46,8 +48,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC) = (a.valueCoder, b.valueCoder, c.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C]) = (a.valueCoder, b.valueCoder, c.valueCoder) val (tagA, tagB, tagC) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -61,8 +63,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder) val (tagA, tagB, tagC, tagD) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -77,8 +79,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder) val (tagA, tagB, tagC, tagD, tagE) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -94,8 +96,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -112,8 +114,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -131,8 +133,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -151,8 +153,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -172,8 +174,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -194,8 +196,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -217,8 +219,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K, L](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K], Iterable[L]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -241,8 +243,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K], Iterable[L], Iterable[M]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -266,8 +268,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K], Iterable[L], Iterable[M], Iterable[N]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -292,8 +294,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K], Iterable[L], Iterable[M], Iterable[N], Iterable[O]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -319,8 +321,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K], Iterable[L], Iterable[M], Iterable[N], Iterable[O], Iterable[P]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -347,8 +349,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K], Iterable[L], Iterable[M], Iterable[N], Iterable[O], Iterable[P], Iterable[Q]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -376,8 +378,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K], Iterable[L], Iterable[M], Iterable[N], Iterable[O], Iterable[P], Iterable[Q], Iterable[R]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -406,8 +408,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K], Iterable[L], Iterable[M], Iterable[N], Iterable[O], Iterable[P], Iterable[Q], Iterable[R], Iterable[S]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -437,8 +439,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K], Iterable[L], Iterable[M], Iterable[N], Iterable[O], Iterable[P], Iterable[Q], Iterable[R], Iterable[S], Iterable[T]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -469,8 +471,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)], u: SCollection[(KEY, U)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K], Iterable[L], Iterable[M], Iterable[N], Iterable[O], Iterable[P], Iterable[Q], Iterable[R], Iterable[S], Iterable[T], Iterable[U]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT, coderU) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T], coderU: Coder[U]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT, tagU) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T](), new TupleTag[U]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -502,8 +504,8 @@ trait MultiJoin extends Serializable { } def cogroup[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)], u: SCollection[(KEY, U)], v: SCollection[(KEY, V)]): SCollection[(KEY, (Iterable[A], Iterable[B], Iterable[C], Iterable[D], Iterable[E], Iterable[F], Iterable[G], Iterable[H], Iterable[I], Iterable[J], Iterable[K], Iterable[L], Iterable[M], Iterable[N], Iterable[O], Iterable[P], Iterable[Q], Iterable[R], Iterable[S], Iterable[T], Iterable[U], Iterable[V]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT, coderU, coderV) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder, v.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T], coderU: Coder[U], coderV: Coder[V]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder, v.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT, tagU, tagV) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T](), new TupleTag[U](), new TupleTag[V]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -536,8 +538,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)]): SCollection[(KEY, (A, B))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB) = (a.valueCoder, b.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B]) = (a.valueCoder, b.valueCoder) val (tagA, tagB) = (new TupleTag[A](), new TupleTag[B]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -553,8 +555,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)]): SCollection[(KEY, (A, B, C))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC) = (a.valueCoder, b.valueCoder, c.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C]) = (a.valueCoder, b.valueCoder, c.valueCoder) val (tagA, tagB, tagC) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -572,8 +574,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)]): SCollection[(KEY, (A, B, C, D))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder) val (tagA, tagB, tagC, tagD) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -593,8 +595,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)]): SCollection[(KEY, (A, B, C, D, E))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder) val (tagA, tagB, tagC, tagD, tagE) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -616,8 +618,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)]): SCollection[(KEY, (A, B, C, D, E, F))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -641,8 +643,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)]): SCollection[(KEY, (A, B, C, D, E, F, G))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -668,8 +670,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)]): SCollection[(KEY, (A, B, C, D, E, F, G, H))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -697,8 +699,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -728,8 +730,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -761,8 +763,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -796,8 +798,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K, L](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K, L))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -833,8 +835,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K, L, M))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -872,8 +874,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K, L, M, N))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -913,8 +915,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -956,8 +958,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1001,8 +1003,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1048,8 +1050,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1097,8 +1099,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1148,8 +1150,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1201,8 +1203,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)], u: SCollection[(KEY, U)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT, coderU) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T], coderU: Coder[U]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT, tagU) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T](), new TupleTag[U]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1256,8 +1258,8 @@ trait MultiJoin extends Serializable { } def apply[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)], u: SCollection[(KEY, U)], v: SCollection[(KEY, V)]): SCollection[(KEY, (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT, coderU, coderV) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder, v.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T], coderU: Coder[U], coderV: Coder[V]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder, v.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT, tagU, tagV) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T](), new TupleTag[U](), new TupleTag[V]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1313,8 +1315,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)]): SCollection[(KEY, (A, Option[B]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB) = (a.valueCoder, b.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B]) = (a.valueCoder, b.valueCoder) val (tagA, tagB) = (new TupleTag[A](), new TupleTag[B]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1330,8 +1332,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)]): SCollection[(KEY, (A, Option[B], Option[C]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC) = (a.valueCoder, b.valueCoder, c.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C]) = (a.valueCoder, b.valueCoder, c.valueCoder) val (tagA, tagB, tagC) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1349,8 +1351,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder) val (tagA, tagB, tagC, tagD) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1370,8 +1372,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder) val (tagA, tagB, tagC, tagD, tagE) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1393,8 +1395,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1418,8 +1420,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1445,8 +1447,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1474,8 +1476,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1505,8 +1507,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1538,8 +1540,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1573,8 +1575,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K, L](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1610,8 +1612,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1649,8 +1651,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1690,8 +1692,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1733,8 +1735,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1778,8 +1780,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1825,8 +1827,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q], Option[R]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1874,8 +1876,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q], Option[R], Option[S]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1925,8 +1927,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q], Option[R], Option[S], Option[T]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -1978,8 +1980,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)], u: SCollection[(KEY, U)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q], Option[R], Option[S], Option[T], Option[U]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT, coderU) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T], coderU: Coder[U]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT, tagU) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T](), new TupleTag[U]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2033,8 +2035,8 @@ trait MultiJoin extends Serializable { } def left[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)], u: SCollection[(KEY, U)], v: SCollection[(KEY, V)]): SCollection[(KEY, (A, Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q], Option[R], Option[S], Option[T], Option[U], Option[V]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT, coderU, coderV) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder, v.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T], coderU: Coder[U], coderV: Coder[V]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder, v.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT, tagU, tagV) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T](), new TupleTag[U](), new TupleTag[V]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2090,8 +2092,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)]): SCollection[(KEY, (Option[A], Option[B]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB) = (a.valueCoder, b.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B]) = (a.valueCoder, b.valueCoder) val (tagA, tagB) = (new TupleTag[A](), new TupleTag[B]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2107,8 +2109,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)]): SCollection[(KEY, (Option[A], Option[B], Option[C]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC) = (a.valueCoder, b.valueCoder, c.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C]) = (a.valueCoder, b.valueCoder, c.valueCoder) val (tagA, tagB, tagC) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2126,8 +2128,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder) val (tagA, tagB, tagC, tagD) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2147,8 +2149,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder) val (tagA, tagB, tagC, tagD, tagE) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2170,8 +2172,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2195,8 +2197,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2222,8 +2224,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2251,8 +2253,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2282,8 +2284,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2315,8 +2317,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2350,8 +2352,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K, L](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2387,8 +2389,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2426,8 +2428,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2467,8 +2469,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2510,8 +2512,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2555,8 +2557,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2602,8 +2604,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q], Option[R]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2651,8 +2653,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q], Option[R], Option[S]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2702,8 +2704,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q], Option[R], Option[S], Option[T]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2755,8 +2757,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)], u: SCollection[(KEY, U)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q], Option[R], Option[S], Option[T], Option[U]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT, coderU) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T], coderU: Coder[U]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT, tagU) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T](), new TupleTag[U]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) @@ -2810,8 +2812,8 @@ trait MultiJoin extends Serializable { } def outer[KEY, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U, V](a: SCollection[(KEY, A)], b: SCollection[(KEY, B)], c: SCollection[(KEY, C)], d: SCollection[(KEY, D)], e: SCollection[(KEY, E)], f: SCollection[(KEY, F)], g: SCollection[(KEY, G)], h: SCollection[(KEY, H)], i: SCollection[(KEY, I)], j: SCollection[(KEY, J)], k: SCollection[(KEY, K)], l: SCollection[(KEY, L)], m: SCollection[(KEY, M)], n: SCollection[(KEY, N)], o: SCollection[(KEY, O)], p: SCollection[(KEY, P)], q: SCollection[(KEY, Q)], r: SCollection[(KEY, R)], s: SCollection[(KEY, S)], t: SCollection[(KEY, T)], u: SCollection[(KEY, U)], v: SCollection[(KEY, V)]): SCollection[(KEY, (Option[A], Option[B], Option[C], Option[D], Option[E], Option[F], Option[G], Option[H], Option[I], Option[J], Option[K], Option[L], Option[M], Option[N], Option[O], Option[P], Option[Q], Option[R], Option[S], Option[T], Option[U], Option[V]))] = { - implicit val keyCoder = a.keyCoder - implicit val (coderA, coderB, coderC, coderD, coderE, coderF, coderG, coderH, coderI, coderJ, coderK, coderL, coderM, coderN, coderO, coderP, coderQ, coderR, coderS, coderT, coderU, coderV) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder, v.valueCoder) + implicit val keyCoder: Coder[KEY] = a.keyCoder + implicit val (coderA: Coder[A], coderB: Coder[B], coderC: Coder[C], coderD: Coder[D], coderE: Coder[E], coderF: Coder[F], coderG: Coder[G], coderH: Coder[H], coderI: Coder[I], coderJ: Coder[J], coderK: Coder[K], coderL: Coder[L], coderM: Coder[M], coderN: Coder[N], coderO: Coder[O], coderP: Coder[P], coderQ: Coder[Q], coderR: Coder[R], coderS: Coder[S], coderT: Coder[T], coderU: Coder[U], coderV: Coder[V]) = (a.valueCoder, b.valueCoder, c.valueCoder, d.valueCoder, e.valueCoder, f.valueCoder, g.valueCoder, h.valueCoder, i.valueCoder, j.valueCoder, k.valueCoder, l.valueCoder, m.valueCoder, n.valueCoder, o.valueCoder, p.valueCoder, q.valueCoder, r.valueCoder, s.valueCoder, t.valueCoder, u.valueCoder, v.valueCoder) val (tagA, tagB, tagC, tagD, tagE, tagF, tagG, tagH, tagI, tagJ, tagK, tagL, tagM, tagN, tagO, tagP, tagQ, tagR, tagS, tagT, tagU, tagV) = (new TupleTag[A](), new TupleTag[B](), new TupleTag[C](), new TupleTag[D](), new TupleTag[E](), new TupleTag[F](), new TupleTag[G](), new TupleTag[H](), new TupleTag[I](), new TupleTag[J](), new TupleTag[K](), new TupleTag[L](), new TupleTag[M](), new TupleTag[N](), new TupleTag[O](), new TupleTag[P](), new TupleTag[Q](), new TupleTag[R](), new TupleTag[S](), new TupleTag[T](), new TupleTag[U](), new TupleTag[V]()) val keyed = KeyedPCollectionTuple .of(tagA, a.toKV.internal) diff --git a/scio-core/src/main/scala/com/spotify/scio/values/PairHashSCollectionFunctions.scala b/scio-core/src/main/scala/com/spotify/scio/values/PairHashSCollectionFunctions.scala index 4c13b91b59..e661fcbc38 100644 --- a/scio-core/src/main/scala/com/spotify/scio/values/PairHashSCollectionFunctions.scala +++ b/scio-core/src/main/scala/com/spotify/scio/values/PairHashSCollectionFunctions.scala @@ -27,7 +27,7 @@ import com.spotify.scio.coders.{BeamCoders, Coder} */ class PairHashSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { - implicit private[this] val (keyCoder, valueCoder): (Coder[K], Coder[V]) = + implicit private[this] val (keyCoder: Coder[K], valueCoder: Coder[V]) = (self.keyCoder, self.valueCoder) /** diff --git a/scio-core/src/main/scala/com/spotify/scio/values/PairSCollectionFunctions.scala b/scio-core/src/main/scala/com/spotify/scio/values/PairSCollectionFunctions.scala index af367131cb..edbf7c523f 100644 --- a/scio-core/src/main/scala/com/spotify/scio/values/PairSCollectionFunctions.scala +++ b/scio-core/src/main/scala/com/spotify/scio/values/PairSCollectionFunctions.scala @@ -28,6 +28,7 @@ import com.spotify.scio.estimators.{ import com.spotify.scio.hash._ import com.spotify.scio.util._ import com.spotify.scio.util.random.{BernoulliValueSampler, PoissonValueSampler} +import com.spotify.scio.values.SCollection.makePairSCollectionFunctions import com.twitter.algebird.{Aggregator, Monoid, MonoidAggregator, Semigroup} import org.apache.beam.sdk.transforms._ import org.apache.beam.sdk.values.{KV, PCollection} @@ -52,7 +53,7 @@ class PairSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { private[this] val context: ScioContext = self.context - implicit val (keyCoder, valueCoder): (Coder[K], Coder[V]) = BeamCoders.getTupleCoders(self) + implicit val (keyCoder: Coder[K], valueCoder: Coder[V]) = BeamCoders.getTupleCoders(self) private[scio] def toKV: SCollection[KV[K, V]] = self.map(kv => KV.of(kv._1, kv._2))(Coder.raw(CoderMaterializer.kvCoder[K, V](context))) @@ -238,7 +239,7 @@ class PairSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { rhsNumKeys: Long, fpProb: Double = 0.01 )(implicit funnel: Funnel[K]): SCollection[(K, (Option[V], Option[W]))] = self.transform { me => - implicit val wCoder = rhs.valueCoder + implicit val wCoder: Coder[W] = rhs.valueCoder SCollection.unionAll( split(me, rhs, rhsNumKeys, fpProb).map { case (lhsUnique, lhsOverlap, rhs) => val unique = lhsUnique.map(kv => (kv._1, (Option(kv._2), Option.empty[W]))) @@ -272,7 +273,7 @@ class PairSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { fpProb: Double = 0.01 )(implicit funnel: Funnel[K]): SCollection[(K, (V, W))] = self.transform { me => - implicit val wCoder = rhs.valueCoder + implicit val wCoder: Coder[W] = rhs.valueCoder SCollection.unionAll( split(me, rhs, rhsNumKeys, fpProb).map { case (_, lhsOverlap, rhs) => lhsOverlap.join(rhs) @@ -305,7 +306,7 @@ class PairSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { fpProb: Double = 0.01 )(implicit funnel: Funnel[K]): SCollection[(K, (V, Option[W]))] = self.transform { me => - implicit val wCoder = rhs.valueCoder + implicit val wCoder: Coder[W] = rhs.valueCoder SCollection.unionAll( split(me, rhs, rhsNumKeys, fpProb).map { case (lhsUnique, lhsOverlap, rhs) => val unique = lhsUnique.map(kv => (kv._1, (kv._2, Option.empty[W]))) @@ -339,7 +340,7 @@ class PairSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { fpProb: Double = 0.01 )(implicit funnel: Funnel[K]): SCollection[(K, (Option[V], W))] = self.transform { me => - implicit val wCoder = rhs.valueCoder + implicit val wCoder: Coder[W] = rhs.valueCoder SCollection.unionAll( split(me, rhs, rhsNumKeys, fpProb).map { case (_, lhsOverlap, rhs) => lhsOverlap.rightOuterJoin(rhs) @@ -403,7 +404,7 @@ class PairSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { def sparseLookup[A](rhs: SCollection[(K, A)], thisNumKeys: Long, fpProb: Double)(implicit funnel: Funnel[K] ): SCollection[(K, (V, Iterable[A]))] = self.transform { sColl => - implicit val aCoder = rhs.valueCoder + implicit val aCoder: Coder[A] = rhs.valueCoder val selfBfSideInputs = BloomFilter.createPartitionedSideInputs(sColl.keys, thisNumKeys, fpProb) val n = selfBfSideInputs.size @@ -465,8 +466,8 @@ class PairSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { fpProb: Double )(implicit funnel: Funnel[K]): SCollection[(K, (V, Iterable[A], Iterable[B]))] = self.transform { sColl => - implicit val aCoder = rhs1.valueCoder - implicit val bCoder = rhs2.valueCoder + implicit val aCoder: Coder[A] = rhs1.valueCoder + implicit val bCoder: Coder[B] = rhs2.valueCoder val selfBfSideInputs = BloomFilter.createPartitionedSideInputs(sColl.keys, thisNumKeys, fpProb) val n = selfBfSideInputs.size @@ -1017,7 +1018,7 @@ class PairSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { (kv._1, kv._2.head) } .groupBy(_ => ()) - .map(_._2.toMap) + .map[Map[K, V]](_._2.toMap) ) .asSingletonSideInput(Map.empty[K, V]) @@ -1035,7 +1036,7 @@ class PairSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { .transform( _.groupByKey .groupBy(_ => ()) - .map(_._2.toMap) + .map[Map[K, Iterable[V]]](_._2.toMap) ) .asSingletonSideInput(Map.empty[K, Iterable[V]]) diff --git a/scio-core/src/main/scala/com/spotify/scio/values/PairSkewedSCollectionFunctions.scala b/scio-core/src/main/scala/com/spotify/scio/values/PairSkewedSCollectionFunctions.scala index 4c016db8ad..3f0ef8e412 100644 --- a/scio-core/src/main/scala/com/spotify/scio/values/PairSkewedSCollectionFunctions.scala +++ b/scio-core/src/main/scala/com/spotify/scio/values/PairSkewedSCollectionFunctions.scala @@ -33,7 +33,7 @@ final private case class Partitions[K, V](hot: SCollection[(K, V)], chill: SColl */ class PairSkewedSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { - implicit private[this] val (keyCoder, valueCoder): (Coder[K], Coder[V]) = + implicit private[this] val (keyCoder: Coder[K], valueCoder: Coder[V]) = (self.keyCoder, self.valueCoder) /** @@ -394,7 +394,7 @@ class PairSkewedSCollectionFunctions[K, V](val self: SCollection[(K, V)]) { hotKeyThreshold: Long, cms: SCollection[CMS[K]] ): (Partitions[K, V], Partitions[K, W]) = { - implicit val wCoder = rhs.valueCoder + implicit val wCoder: Coder[W] = rhs.valueCoder val (hotSelf, chillSelf) = (SideOutput[(K, V)](), SideOutput[(K, V)]()) // Use asIterableSideInput as workaround for: diff --git a/scio-core/src/main/scala/com/spotify/scio/values/SCollection.scala b/scio-core/src/main/scala/com/spotify/scio/values/SCollection.scala index 1718c2953f..2d615b6f35 100644 --- a/scio-core/src/main/scala/com/spotify/scio/values/SCollection.scala +++ b/scio-core/src/main/scala/com/spotify/scio/values/SCollection.scala @@ -643,7 +643,7 @@ sealed trait SCollection[T] extends PCollectionWrapper[T] { val cf = ClosureCleaner.clean(f) val cg = ClosureCleaner.clean(g) - _.map(t => KV.of(cf(t), cg(t)))(Coder.raw(CoderMaterializer.kvCoder[K, U](context))) + (_: SCollection[T]).map(t => KV.of(cf(t), cg(t)))(Coder.raw(CoderMaterializer.kvCoder[K, U](context))) .pApply(GroupByKey.create[K, U]()) .map(kvIterableToTuple) } @@ -664,7 +664,7 @@ sealed trait SCollection[T] extends PCollectionWrapper[T] { this.transform { val cf = ClosureCleaner.clean(f) - _.map(t => KV.of(cf(t), t))(Coder.raw(CoderMaterializer.kvCoder[K, T](context))) + (_: SCollection[T]).map(t => KV.of(cf(t), t))(Coder.raw(CoderMaterializer.kvCoder[K, T](context))) .pApply(Combine.perKey(Functions.reduceFn(context, g))) .map(kvToTuple) } @@ -985,7 +985,7 @@ sealed trait SCollection[T] extends PCollectionWrapper[T] { .transform( _.distinct .groupBy(_ => ()) - .map(_._2.toSet) + .map[Set[T]](_._2.toSet) ) .asSingletonSideInput(Set.empty[T]) diff --git a/scio-macros/src/main/scala-2/com/spotify/scio/coders/AvroCoderMacros.scala b/scio-macros/src/main/scala-2/com/spotify/scio/coders/AvroCoderMacros.scala index 043f1baec6..ca104ff1a6 100644 --- a/scio-macros/src/main/scala-2/com/spotify/scio/coders/AvroCoderMacros.scala +++ b/scio-macros/src/main/scala-2/com/spotify/scio/coders/AvroCoderMacros.scala @@ -15,13 +15,13 @@ * under the License. */ -package com.spotify.scio.coders +package com.spotify.scio.coders.instances import org.apache.avro.specific.SpecificRecordBase import scala.reflect.macros.blackbox -private[coders] object AvroCoderMacros { +private[instances] object AvroCoderMacros { /** Generate a coder which does not serialize the schema and relies exclusively on types. */ def staticInvokeCoder[T <: SpecificRecordBase: c.WeakTypeTag](c: blackbox.Context): c.Tree = { diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala b/scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala new file mode 100644 index 0000000000..8562727360 --- /dev/null +++ b/scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala @@ -0,0 +1,38 @@ +/* + * Copyright 2020 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.macros + +import scala.compiletime._ +import scala.deriving._ + +object DerivationUtils { + inline given mirrorFields[Fields <: Tuple] as List[String] = + inline erasedValue[Fields] match { + case _: (field *: fields) => constValue[field].toString :: mirrorFields[fields] + case _ => Nil + } + + inline given summonAllF[F[_], T <: Tuple] as Widen[T] = { + val res = + inline erasedValue[T] match { + case _: EmptyTuple => EmptyTuple + case _: (t *: ts) => summonInline[F[t]] *: summonAllF[F, ts] + } + res.asInstanceOf[Widen[T]] + } +} diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala index 7e60555f38..a7d6ba4633 100644 --- a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala +++ b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala @@ -45,7 +45,7 @@ object IsJavaBean { if(getters.isEmpty) { val mess = s"""Class ${sym.name} has not getter""" - report.error(mess) + report.throwError(mess) } getters.foreach { case (name, info) => @@ -75,7 +75,7 @@ object IsJavaBean { private def isJavaBeanImpl[T](using Quotes, Type[T]): Expr[IsJavaBean[T]] = { import quotes.reflect._ val sym = TypeTree.of[T].symbol - // TODO: check if symbol is a Java class ? + // TODO: scala3 - check if symbol is a Java class ? checkGetterAndSetters(sym) '{new IsJavaBean[T]{}} } diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/SysPropsMacros.scala b/scio-macros/src/main/scala-3/com/spotify/scio/SysPropsMacros.scala new file mode 100644 index 0000000000..4c53b58087 --- /dev/null +++ b/scio-macros/src/main/scala-3/com/spotify/scio/SysPropsMacros.scala @@ -0,0 +1,29 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio + +import scala.annotation.{compileTimeOnly, StaticAnnotation} + +// TODO: scala3 implement +final class registerSysProps extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? // TODO: scala3 +} + + + + diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/coders/CoderMacros.scala b/scio-macros/src/main/scala-3/com/spotify/scio/coders/CoderMacros.scala new file mode 100644 index 0000000000..0a1c595b82 --- /dev/null +++ b/scio-macros/src/main/scala-3/com/spotify/scio/coders/CoderMacros.scala @@ -0,0 +1,30 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.coders + +import scala.compiletime._ +import scala.deriving._ +import scala.quoted._ + +// TODO: scala3 Implement macros ? +private[coders] object CoderMacros { + + // Add a level of indirection to prevent the macro from capturing + // $outer which would make the Coder serialization fail + def wrappedCoder[T] = { ??? } +} diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/coders/KryoRegistrar.scala b/scio-macros/src/main/scala-3/com/spotify/scio/coders/KryoRegistrar.scala new file mode 100644 index 0000000000..f7a45b2f60 --- /dev/null +++ b/scio-macros/src/main/scala-3/com/spotify/scio/coders/KryoRegistrar.scala @@ -0,0 +1,33 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.coders + +import scala.annotation.{compileTimeOnly, StaticAnnotation} +import scala.reflect.macros._ + +/** + * Annotation for custom Kryo registrar classes. + * + * Annotated class must extend `IKryoRegistrar` and has name that ends with "KryoRegistrar". + */ +class KryoRegistrar extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? // TODO: scala3 implement +} + +/** Trait to be added to Kryo registrar class annotated with `@KryoRegistrar`. */ +trait AnnotatedKryoRegistrar diff --git a/scripts/multijoin.py b/scripts/multijoin.py index c882b49b85..5a8f6050af 100755 --- a/scripts/multijoin.py +++ b/scripts/multijoin.py @@ -48,9 +48,9 @@ def wrap(wrapper, x): def common(out, vals): - print(' implicit val keyCoder = a.keyCoder', file=out) + print(' implicit val keyCoder: Coder[KEY] = a.keyCoder', file=out) print(' implicit val (%s) = (%s)' % ( - ', '.join('coder' + x for x in vals), + ', '.join('coder' + x + ': Coder[' + x + ']' for x in vals), ', '.join('%s.valueCoder' % x.lower() for x in vals)), file=out) @@ -187,10 +187,12 @@ def main(out): package com.spotify.scio.util import com.spotify.scio.values.SCollection + import com.spotify.scio.coders.Coder import org.apache.beam.sdk.transforms.join.{CoGroupByKey, KeyedPCollectionTuple} # NOQA import org.apache.beam.sdk.values.TupleTag import scala.jdk.CollectionConverters._ + import com.spotify.scio.values.SCollection.makePairSCollectionFunctions trait MultiJoin extends Serializable { From b587518e235712a8fd3e74ea715a32cefd508e6c Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Wed, 9 Dec 2020 11:10:51 +0100 Subject: [PATCH 03/56] Make scio-avro compile --- build.sbt | 43 ++-- .../scio/avro/types/AvroTypeMacros.scala | 206 ++++++++++++++++++ .../scio/avro/types/ConverterProvider.scala | 0 .../spotify/scio/avro/types/MacroUtil.scala | 0 .../scio/avro/types/SchemaProvider.scala | 0 .../scio/avro/types/TypeProvider.scala | 0 .../scio/avro/types/AvroTypeMacros.scala | 196 +++++++++++++++++ .../spotify/scio/avro/types/AvroType.scala | 157 +------------ 8 files changed, 428 insertions(+), 174 deletions(-) create mode 100644 scio-avro/src/main/scala-2/com/spotify/scio/avro/types/AvroTypeMacros.scala rename scio-avro/src/main/{scala => scala-2}/com/spotify/scio/avro/types/ConverterProvider.scala (100%) rename scio-avro/src/main/{scala => scala-2}/com/spotify/scio/avro/types/MacroUtil.scala (100%) rename scio-avro/src/main/{scala => scala-2}/com/spotify/scio/avro/types/SchemaProvider.scala (100%) rename scio-avro/src/main/{scala => scala-2}/com/spotify/scio/avro/types/TypeProvider.scala (100%) create mode 100644 scio-avro/src/main/scala-3/com/spotify/scio/avro/types/AvroTypeMacros.scala diff --git a/build.sbt b/build.sbt index b608eb884d..1157058b8c 100644 --- a/build.sbt +++ b/build.sbt @@ -143,7 +143,7 @@ val commonSettings = Def headerLicense := Some(HeaderLicense.ALv2("2020", "Spotify AB")), headerMappings := headerMappings.value + (HeaderFileType.scala -> keepExistingHeader, HeaderFileType.java -> keepExistingHeader), scalaVersion := "2.13.5", - crossScalaVersions := Seq("2.12.13", scalaVersion.value), + crossScalaVersions := Seq("2.12.12", scalaVersion.value, "3.0.0-M2"), scalacOptions ++= Scalac.commonsOptions.value, Compile / doc / scalacOptions := Scalac.docOptions.value, javacOptions ++= Seq("-source", "1.8", "-target", "1.8", "-Xlint:unchecked"), @@ -399,9 +399,9 @@ lazy val `scio-core`: Project = project libraryDependencies ++= Seq( "com.esotericsoftware" % "kryo-shaded" % kryoVersion, "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion, - ("com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion).withDottyCompat(scalaVersion.value), - ("com.github.alexarchambault" %% "case-app" % caseappVersion).withDottyCompat(scalaVersion.value), - ("com.github.alexarchambault" %% "case-app-annotations" % caseappVersion).withDottyCompat(scalaVersion.value), + "com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion, + "com.github.alexarchambault" %% "case-app" % caseappVersion, + "com.github.alexarchambault" %% "case-app-annotations" % caseappVersion, "com.github.ben-manes.caffeine" % "caffeine" % caffeineVersion % "provided", "com.google.api-client" % "google-api-client" % googleClientsVersion, "com.google.apis" % "google-api-services-dataflow" % googleApiServicesDataflow, @@ -412,9 +412,9 @@ lazy val `scio-core`: Project = project "com.google.protobuf" % "protobuf-java" % protobufVersion, "com.twitter" % "chill-java" % chillVersion, "com.twitter" % "chill-protobuf" % chillVersion, - ("com.twitter" %% "algebird-core" % algebirdVersion).withDottyCompat(scalaVersion.value), - ("com.twitter" %% "chill" % chillVersion).withDottyCompat(scalaVersion.value), - ("com.twitter" %% "chill-algebird" % chillVersion).withDottyCompat(scalaVersion.value), + "com.twitter" %% "algebird-core" % algebirdVersion, + "com.twitter" %% "chill" % chillVersion, + "com.twitter" %% "chill-algebird" % chillVersion, "commons-io" % "commons-io" % commonsIoVersion, "io.grpc" % "grpc-auth" % grpcVersion, "io.grpc" % "grpc-core" % grpcVersion, @@ -423,7 +423,7 @@ lazy val `scio-core`: Project = project "io.grpc" % "grpc-stub" % grpcVersion, "io.netty" % "netty-handler" % nettyVersion, "joda-time" % "joda-time" % jodaTimeVersion, - ("me.lyh" %% "protobuf-generic" % protobufGenericVersion).withDottyCompat(scalaVersion.value), + ("me.lyh" %% "protobuf-generic" % protobufGenericVersion), "org.apache.avro" % "avro" % avroVersion, "org.apache.beam" % "beam-runners-core-construction-java" % beamVersion, "org.apache.beam" % "beam-runners-google-cloud-dataflow-java" % beamVersion % Provided, @@ -443,13 +443,11 @@ lazy val `scio-core`: Project = project "org.apache.commons" % "commons-math3" % commonsMath3Version, "org.scalatest" %% "scalatest" % scalatestVersion % Test, "org.slf4j" % "slf4j-api" % slf4jVersion, - ("org.typelevel" %% "algebra" % algebraVersion).withDottyCompat(scalaVersion.value), - ("org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion).withDottyCompat(scalaVersion.value) - ), + "org.typelevel" %% "algebra" % algebraVersion, + "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion + ).map(_.withDottyCompat(scalaVersion.value)), buildInfoKeys := Seq[BuildInfoKey](scalaVersion, version, "beamVersion" -> beamVersion), buildInfoPackage := "com.spotify.scio", - // Scala3 setting - crossScalaVersions += "3.0.0-M2", libraryDependencies ++= { if (!isDotty.value) Seq( @@ -485,7 +483,10 @@ lazy val `scio-sql`: Project = project "org.apache.beam" % "beam-sdks-java-extensions-sql" % beamVersion, "org.apache.commons" % "commons-lang3" % commonsLang3Version, "org.apache.beam" % "beam-vendor-calcite-1_20_0" % beamVendorVersion - ), + ).map(_.withDottyCompat(scalaVersion.value)), + scalacOptions ++= { + if (isDotty.value) Seq("-source:3.0-migration") else Nil + }, Test / compileOrder := CompileOrder.JavaThenScala ) .dependsOn( @@ -535,13 +536,16 @@ lazy val `scio-test`: Project = project "org.hamcrest" % "hamcrest" % hamcrestVersion, "org.scalactic" %% "scalactic" % "3.2.8", "com.propensive" %% "magnolia" % magnoliaVersion - ), + )map(_.withDottyCompat(scalaVersion.value)), Test / compileOrder := CompileOrder.JavaThenScala, Test / testGrouping := splitTests( (Test / definedTests).value, List("com.spotify.scio.ArgsTest"), (Test / forkOptions).value - ) + ), + scalacOptions ++= { + if (isDotty.value) Seq("-source:3.0-migration") else Nil + }, ) .configs(IntegrationTest) .dependsOn( @@ -598,7 +602,10 @@ lazy val `scio-avro`: Project = project "org.scalacheck" %% "scalacheck" % scalacheckVersion % "test,it", "com.spotify" %% "magnolify-cats" % magnolifyVersion % "test", "com.spotify" %% "magnolify-scalacheck" % magnolifyVersion % "test" - ) + ).map(_.withDottyCompat(scalaVersion.value)), + scalacOptions ++= { + if (isDotty.value) Seq("-source:3.0-migration") else Nil + }, ) .dependsOn( `scio-core` % "compile;it->it" @@ -910,7 +917,7 @@ lazy val `scio-schemas`: Project = project libraryDependencies ++= Seq( "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "org.apache.avro" % "avro" % avroVersion - ), + ).map(_.withDottyCompat(scalaVersion.value)), Compile / sourceDirectories := (Compile / sourceDirectories).value .filterNot(_.getPath.endsWith("/src_managed/main")), Compile / managedSourceDirectories := (Compile / managedSourceDirectories).value diff --git a/scio-avro/src/main/scala-2/com/spotify/scio/avro/types/AvroTypeMacros.scala b/scio-avro/src/main/scala-2/com/spotify/scio/avro/types/AvroTypeMacros.scala new file mode 100644 index 0000000000..e2e6657d9f --- /dev/null +++ b/scio-avro/src/main/scala-2/com/spotify/scio/avro/types/AvroTypeMacros.scala @@ -0,0 +1,206 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.avro.types + +import org.apache.avro.Schema +import org.apache.avro.generic.GenericRecord + +import scala.annotation.{compileTimeOnly, StaticAnnotation} +import scala.reflect.runtime.universe._ + +/** + * Macro annotations and converter generators for Avro types. + * + * The following table lists Avro types and their Scala counterparts. + * {{{ + * Avro type Scala type + * BOOLEAN Boolean + * LONG Long + * INT Int + * DOUBLE Double + * FLOAT Float + * STRING, ENUM String + * BYTES com.google.protobuf.ByteString + * ARRAY List[T] + * MAP Map[String, T] + * UNION Option[T] + * RECORD Nested case class + * }}} + * + * @groupname trait Traits for annotated types + * @groupname annotation Type annotations + * @groupname converters Converters + * @groupname Ungrouped Other Members + */ +trait AvroType { + + /** + * Macro annotation for an Avro schema. + * + * Generate case classes for an Avro schema. Note that `schema` must be a single string literal + * of the JSON schema with optional `.stripMargin` at the end. For example: + * + * {{{ + * @AvroType.fromSchema( + * """ + * |{ + * | "type": "record", + * | "namespace": "com.spotify.namespace", + * | "name": "RecordName", + * | "fields": [ + * | { "name": "boolF", "type": "boolean"}, + * | { "name": "intF", "type": "int"}, + * | { "name": "longF", "type": "long"}, + * | { "name": "floatF", "type": "float"}, + * | { "name": "doubleF", "type": "double"}, + * | { "name": "stringF", "type": "string"}, + * | { "name": "byteStringF", "type": "bytes"} + * | ] + * |} + * """.stripMargin) + * class MyRecord + * }}} + * + * Also generate a companion object with convenience methods. + * @group annotation + */ + @compileTimeOnly( + "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" + ) + class fromSchema(schema: String) extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = macro TypeProvider.schemaImpl + } + + /** + * Macro annotation for a path containing Avro files. + * + * Generates case classes from a path which contains Avro files. + * Path needs to represent a folder, hence it always needs to end with `/`. + * Inside of the folder needs to exist at least one file matching `*.avro` glob. + * + * Note that path must be a single string literal with optional `.stripMargin` at the end. + * For example: + * + * {{{ + * @AvroType.fromPath("gs://myBucket/myFolder/") + * class MyRecord + * }}} + * + * or + * + * {{{ + * @AvroType.fromPath( + * """ + * | gs://myBucket/myFolder/ + * | myLooooooooooooooooongPath/ + * """.stripMargin) + * class MyRecord + * }}} + * + * Globs are supported as a part of the path. For example: + * + * {{{ + * @AvroType.fromPath("gs://myBucket{@literal /}*{@literal /}*{@literal /}*{@literal /}") + * class MyRecord + * }}} + * + * Also generate a companion object with convenience methods. + * @group annotation + */ + @compileTimeOnly( + "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" + ) + class fromPath(folderGlob: String) extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = macro TypeProvider.pathImpl + } + + /** + * Macro annotation for a file which contains Avro schema. + * + * Generate case classes for an Avro schema. File can be either local or remote files. + * For example file can be located on Google Cloud Storage (GCS): + * + * {{{ + * @AvroType.fromSchemaFile("gs://myBucket/myFolder/schema-file.avsc") + * class MyRecord + * }}} + * + * For local files, you need to either provide absolute path, + * or path relative to project root directory. For example: + * + * {{{ + * @AvroType.fromSchemaFile("sub-project/src/main/avro/schema-file.avsc") + * class MyRecord + * }}} + * + * Also generate a companion object with convenience methods. + * @group annotation + */ + @compileTimeOnly( + "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" + ) + class fromSchemaFile(schemaFile: String) extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = macro TypeProvider.schemaFileImpl + } + + /** + * Macro annotation for case classes to be saved to Avro files. + * + * Note that this annotation does not generate case classes, only a companion object with + * convenience methods. You need to define a complete case class for as output record. For + * example: + * + * {{{ + * @AvroType.toSchema + * case class Result(name: Option[String] = None, score: Option[Double] = None) + * }}} + * + * It is recommended that you define all of your fields as Option. + * This way you could stop populating them in the future if you notice that you don't need them. + * + * This macro doesn't help you with schema evolution. + * It's up to you to follow the best practices on how to do evolution of your Avro schemas. + * Rule of thumb is to only add new fields, without removing the old ones. + * @group annotation + */ + @compileTimeOnly( + "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" + ) + class toSchema extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = macro TypeProvider.toSchemaImpl + } + + /** Generate [[org.apache.avro.Schema Schema]] for a case class. */ + def schemaOf[T: TypeTag]: Schema = SchemaProvider.schemaOf[T] + + /** + * Generate a converter function from [[org.apache.avro.generic.GenericRecord GenericRecord]] + * to the given case class `T`. + * @group converters + */ + def fromGenericRecord[T]: GenericRecord => T = + macro ConverterProvider.fromGenericRecordImpl[T] + + /** + * Generate a converter function from the given case class `T` to + * [[org.apache.avro.generic.GenericRecord GenericRecord]]. + * @group converters + */ + def toGenericRecord[T]: T => GenericRecord = + macro ConverterProvider.toGenericRecordImpl[T] +} diff --git a/scio-avro/src/main/scala/com/spotify/scio/avro/types/ConverterProvider.scala b/scio-avro/src/main/scala-2/com/spotify/scio/avro/types/ConverterProvider.scala similarity index 100% rename from scio-avro/src/main/scala/com/spotify/scio/avro/types/ConverterProvider.scala rename to scio-avro/src/main/scala-2/com/spotify/scio/avro/types/ConverterProvider.scala diff --git a/scio-avro/src/main/scala/com/spotify/scio/avro/types/MacroUtil.scala b/scio-avro/src/main/scala-2/com/spotify/scio/avro/types/MacroUtil.scala similarity index 100% rename from scio-avro/src/main/scala/com/spotify/scio/avro/types/MacroUtil.scala rename to scio-avro/src/main/scala-2/com/spotify/scio/avro/types/MacroUtil.scala diff --git a/scio-avro/src/main/scala/com/spotify/scio/avro/types/SchemaProvider.scala b/scio-avro/src/main/scala-2/com/spotify/scio/avro/types/SchemaProvider.scala similarity index 100% rename from scio-avro/src/main/scala/com/spotify/scio/avro/types/SchemaProvider.scala rename to scio-avro/src/main/scala-2/com/spotify/scio/avro/types/SchemaProvider.scala diff --git a/scio-avro/src/main/scala/com/spotify/scio/avro/types/TypeProvider.scala b/scio-avro/src/main/scala-2/com/spotify/scio/avro/types/TypeProvider.scala similarity index 100% rename from scio-avro/src/main/scala/com/spotify/scio/avro/types/TypeProvider.scala rename to scio-avro/src/main/scala-2/com/spotify/scio/avro/types/TypeProvider.scala diff --git a/scio-avro/src/main/scala-3/com/spotify/scio/avro/types/AvroTypeMacros.scala b/scio-avro/src/main/scala-3/com/spotify/scio/avro/types/AvroTypeMacros.scala new file mode 100644 index 0000000000..682fba6521 --- /dev/null +++ b/scio-avro/src/main/scala-3/com/spotify/scio/avro/types/AvroTypeMacros.scala @@ -0,0 +1,196 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.avro.types + +import org.apache.avro.Schema +import org.apache.avro.generic.GenericRecord + +import scala.annotation.{compileTimeOnly, StaticAnnotation} +import scala.reflect.runtime.universe._ + +/** + * Macro annotations and converter generators for Avro types. + * + * The following table lists Avro types and their Scala counterparts. + * {{{ + * Avro type Scala type + * BOOLEAN Boolean + * LONG Long + * INT Int + * DOUBLE Double + * FLOAT Float + * STRING, ENUM String + * BYTES com.google.protobuf.ByteString + * ARRAY List[T] + * MAP Map[String, T] + * UNION Option[T] + * RECORD Nested case class + * }}} + * + * @groupname trait Traits for annotated types + * @groupname annotation Type annotations + * @groupname converters Converters + * @groupname Ungrouped Other Members + */ +trait AvroTypeMacros { + + /** + * Macro annotation for an Avro schema. + * + * Generate case classes for an Avro schema. Note that `schema` must be a single string literal + * of the JSON schema with optional `.stripMargin` at the end. For example: + * + * {{{ + * @AvroType.fromSchema( + * """ + * |{ + * | "type": "record", + * | "namespace": "com.spotify.namespace", + * | "name": "RecordName", + * | "fields": [ + * | { "name": "boolF", "type": "boolean"}, + * | { "name": "intF", "type": "int"}, + * | { "name": "longF", "type": "long"}, + * | { "name": "floatF", "type": "float"}, + * | { "name": "doubleF", "type": "double"}, + * | { "name": "stringF", "type": "string"}, + * | { "name": "byteStringF", "type": "bytes"} + * | ] + * |} + * """.stripMargin) + * class MyRecord + * }}} + * + * Also generate a companion object with convenience methods. + * @group annotation + */ + @compileTimeOnly("") + class fromSchema(schema: String) extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? + } + + /** + * Macro annotation for a path containing Avro files. + * + * Generates case classes from a path which contains Avro files. + * Path needs to represent a folder, hence it always needs to end with `/`. + * Inside of the folder needs to exist at least one file matching `*.avro` glob. + * + * Note that path must be a single string literal with optional `.stripMargin` at the end. + * For example: + * + * {{{ + * @AvroType.fromPath("gs://myBucket/myFolder/") + * class MyRecord + * }}} + * + * or + * + * {{{ + * @AvroType.fromPath( + * """ + * | gs://myBucket/myFolder/ + * | myLooooooooooooooooongPath/ + * """.stripMargin) + * class MyRecord + * }}} + * + * Globs are supported as a part of the path. For example: + * + * {{{ + * @AvroType.fromPath("gs://myBucket{@literal /}*{@literal /}*{@literal /}*{@literal /}") + * class MyRecord + * }}} + * + * Also generate a companion object with convenience methods. + * @group annotation + */ + @compileTimeOnly("") + class fromPath(folderGlob: String) extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? + } + + /** + * Macro annotation for a file which contains Avro schema. + * + * Generate case classes for an Avro schema. File can be either local or remote files. + * For example file can be located on Google Cloud Storage (GCS): + * + * {{{ + * @AvroType.fromSchemaFile("gs://myBucket/myFolder/schema-file.avsc") + * class MyRecord + * }}} + * + * For local files, you need to either provide absolute path, + * or path relative to project root directory. For example: + * + * {{{ + * @AvroType.fromSchemaFile("sub-project/src/main/avro/schema-file.avsc") + * class MyRecord + * }}} + * + * Also generate a companion object with convenience methods. + * @group annotation + */ + @compileTimeOnly("") + class fromSchemaFile(schemaFile: String) extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? + } + + /** + * Macro annotation for case classes to be saved to Avro files. + * + * Note that this annotation does not generate case classes, only a companion object with + * convenience methods. You need to define a complete case class for as output record. For + * example: + * + * {{{ + * @AvroType.toSchema + * case class Result(name: Option[String] = None, score: Option[Double] = None) + * }}} + * + * It is recommended that you define all of your fields as Option. + * This way you could stop populating them in the future if you notice that you don't need them. + * + * This macro doesn't help you with schema evolution. + * It's up to you to follow the best practices on how to do evolution of your Avro schemas. + * Rule of thumb is to only add new fields, without removing the old ones. + * @group annotation + */ + @compileTimeOnly("") + class toSchema extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? + } + + /** Generate [[org.apache.avro.Schema Schema]] for a case class. */ + def schemaOf[T: TypeTag]: Schema = ??? + + /** + * Generate a converter function from [[org.apache.avro.generic.GenericRecord GenericRecord]] + * to the given case class `T`. + * @group converters + */ + def fromGenericRecord[T]: GenericRecord => T = ??? + + /** + * Generate a converter function from the given case class `T` to + * [[org.apache.avro.generic.GenericRecord GenericRecord]]. + * @group converters + */ + def toGenericRecord[T]: T => GenericRecord = ??? +} diff --git a/scio-avro/src/main/scala/com/spotify/scio/avro/types/AvroType.scala b/scio-avro/src/main/scala/com/spotify/scio/avro/types/AvroType.scala index 4a21d3b1b5..07cb824757 100644 --- a/scio-avro/src/main/scala/com/spotify/scio/avro/types/AvroType.scala +++ b/scio-avro/src/main/scala/com/spotify/scio/avro/types/AvroType.scala @@ -47,143 +47,7 @@ import scala.reflect.runtime.universe._ * @groupname converters Converters * @groupname Ungrouped Other Members */ -object AvroType { - - /** - * Macro annotation for an Avro schema. - * - * Generate case classes for an Avro schema. Note that `schema` must be a single string literal - * of the JSON schema with optional `.stripMargin` at the end. For example: - * - * {{{ - * @AvroType.fromSchema( - * """ - * |{ - * | "type": "record", - * | "namespace": "com.spotify.namespace", - * | "name": "RecordName", - * | "fields": [ - * | { "name": "boolF", "type": "boolean"}, - * | { "name": "intF", "type": "int"}, - * | { "name": "longF", "type": "long"}, - * | { "name": "floatF", "type": "float"}, - * | { "name": "doubleF", "type": "double"}, - * | { "name": "stringF", "type": "string"}, - * | { "name": "byteStringF", "type": "bytes"} - * | ] - * |} - * """.stripMargin) - * class MyRecord - * }}} - * - * Also generate a companion object with convenience methods. - * @group annotation - */ - @compileTimeOnly( - "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" - ) - class fromSchema(schema: String) extends StaticAnnotation { - def macroTransform(annottees: Any*): Any = macro TypeProvider.schemaImpl - } - - /** - * Macro annotation for a path containing Avro files. - * - * Generates case classes from a path which contains Avro files. - * Path needs to represent a folder, hence it always needs to end with `/`. - * Inside of the folder needs to exist at least one file matching `*.avro` glob. - * - * Note that path must be a single string literal with optional `.stripMargin` at the end. - * For example: - * - * {{{ - * @AvroType.fromPath("gs://myBucket/myFolder/") - * class MyRecord - * }}} - * - * or - * - * {{{ - * @AvroType.fromPath( - * """ - * | gs://myBucket/myFolder/ - * | myLooooooooooooooooongPath/ - * """.stripMargin) - * class MyRecord - * }}} - * - * Globs are supported as a part of the path. For example: - * - * {{{ - * @AvroType.fromPath("gs://myBucket{@literal /}*{@literal /}*{@literal /}*{@literal /}") - * class MyRecord - * }}} - * - * Also generate a companion object with convenience methods. - * @group annotation - */ - @compileTimeOnly( - "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" - ) - class fromPath(folderGlob: String) extends StaticAnnotation { - def macroTransform(annottees: Any*): Any = macro TypeProvider.pathImpl - } - - /** - * Macro annotation for a file which contains Avro schema. - * - * Generate case classes for an Avro schema. File can be either local or remote files. - * For example file can be located on Google Cloud Storage (GCS): - * - * {{{ - * @AvroType.fromSchemaFile("gs://myBucket/myFolder/schema-file.avsc") - * class MyRecord - * }}} - * - * For local files, you need to either provide absolute path, - * or path relative to project root directory. For example: - * - * {{{ - * @AvroType.fromSchemaFile("sub-project/src/main/avro/schema-file.avsc") - * class MyRecord - * }}} - * - * Also generate a companion object with convenience methods. - * @group annotation - */ - @compileTimeOnly( - "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" - ) - class fromSchemaFile(schemaFile: String) extends StaticAnnotation { - def macroTransform(annottees: Any*): Any = macro TypeProvider.schemaFileImpl - } - - /** - * Macro annotation for case classes to be saved to Avro files. - * - * Note that this annotation does not generate case classes, only a companion object with - * convenience methods. You need to define a complete case class for as output record. For - * example: - * - * {{{ - * @AvroType.toSchema - * case class Result(name: Option[String] = None, score: Option[Double] = None) - * }}} - * - * It is recommended that you define all of your fields as Option. - * This way you could stop populating them in the future if you notice that you don't need them. - * - * This macro doesn't help you with schema evolution. - * It's up to you to follow the best practices on how to do evolution of your Avro schemas. - * Rule of thumb is to only add new fields, without removing the old ones. - * @group annotation - */ - @compileTimeOnly( - "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" - ) - class toSchema extends StaticAnnotation { - def macroTransform(annottees: Any*): Any = macro TypeProvider.toSchemaImpl - } +object AvroType extends AvroTypeMacros { /** * Trait for generated companion objects of case classes. @@ -213,25 +77,6 @@ object AvroType { */ trait HasAvroAnnotation - /** Generate [[org.apache.avro.Schema Schema]] for a case class. */ - def schemaOf[T: TypeTag]: Schema = SchemaProvider.schemaOf[T] - - /** - * Generate a converter function from [[org.apache.avro.generic.GenericRecord GenericRecord]] - * to the given case class `T`. - * @group converters - */ - def fromGenericRecord[T]: GenericRecord => T = - macro ConverterProvider.fromGenericRecordImpl[T] - - /** - * Generate a converter function from the given case class `T` to - * [[org.apache.avro.generic.GenericRecord GenericRecord]]. - * @group converters - */ - def toGenericRecord[T]: T => GenericRecord = - macro ConverterProvider.toGenericRecordImpl[T] - /** Create a new AvroType instance. */ def apply[T: TypeTag]: AvroType[T] = new AvroType[T] } From b4b6f9595139372f3b81cebd5afc0f37fecc6d11 Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Wed, 9 Dec 2020 12:03:46 +0100 Subject: [PATCH 04/56] Make scio-sql compile --- .../com/spotify/scio/sql/Query1.scala | 0 .../com/spotify/scio/sql/Query10.scala | 0 .../com/spotify/scio/sql/Query2.scala | 0 .../com/spotify/scio/sql/Query3.scala | 0 .../com/spotify/scio/sql/Query4.scala | 0 .../com/spotify/scio/sql/Query5.scala | 0 .../com/spotify/scio/sql/Query6.scala | 0 .../com/spotify/scio/sql/Query7.scala | 0 .../com/spotify/scio/sql/Query8.scala | 0 .../com/spotify/scio/sql/Query9.scala | 0 .../scio/sql/TypedSQLInterpolator.scala | 201 ++++++++++++++++++ .../scala-3/com/spotify/scio/sql/Query1.scala | 77 +++++++ .../com/spotify/scio/sql/Query10.scala | 196 +++++++++++++++++ .../scala-3/com/spotify/scio/sql/Query2.scala | 95 +++++++++ .../scala-3/com/spotify/scio/sql/Query3.scala | 107 ++++++++++ .../scala-3/com/spotify/scio/sql/Query4.scala | 118 ++++++++++ .../scala-3/com/spotify/scio/sql/Query5.scala | 127 +++++++++++ .../scala-3/com/spotify/scio/sql/Query6.scala | 140 ++++++++++++ .../scala-3/com/spotify/scio/sql/Query7.scala | 155 ++++++++++++++ .../scala-3/com/spotify/scio/sql/Query8.scala | 164 ++++++++++++++ .../scala-3/com/spotify/scio/sql/Query9.scala | 184 ++++++++++++++++ .../scio/sql/TypedSQLInterpolator.scala | 8 + .../main/scala/com/spotify/scio/sql/Sql.scala | 43 ---- .../spotify/scio/sql/SqlInterpolator.scala | 190 +---------------- 24 files changed, 1574 insertions(+), 231 deletions(-) rename scio-sql/src/main/{scala => scala-2}/com/spotify/scio/sql/Query1.scala (100%) rename scio-sql/src/main/{scala => scala-2}/com/spotify/scio/sql/Query10.scala (100%) rename scio-sql/src/main/{scala => scala-2}/com/spotify/scio/sql/Query2.scala (100%) rename scio-sql/src/main/{scala => scala-2}/com/spotify/scio/sql/Query3.scala (100%) rename scio-sql/src/main/{scala => scala-2}/com/spotify/scio/sql/Query4.scala (100%) rename scio-sql/src/main/{scala => scala-2}/com/spotify/scio/sql/Query5.scala (100%) rename scio-sql/src/main/{scala => scala-2}/com/spotify/scio/sql/Query6.scala (100%) rename scio-sql/src/main/{scala => scala-2}/com/spotify/scio/sql/Query7.scala (100%) rename scio-sql/src/main/{scala => scala-2}/com/spotify/scio/sql/Query8.scala (100%) rename scio-sql/src/main/{scala => scala-2}/com/spotify/scio/sql/Query9.scala (100%) create mode 100644 scio-sql/src/main/scala-2/com/spotify/scio/sql/TypedSQLInterpolator.scala create mode 100644 scio-sql/src/main/scala-3/com/spotify/scio/sql/Query1.scala create mode 100644 scio-sql/src/main/scala-3/com/spotify/scio/sql/Query10.scala create mode 100644 scio-sql/src/main/scala-3/com/spotify/scio/sql/Query2.scala create mode 100644 scio-sql/src/main/scala-3/com/spotify/scio/sql/Query3.scala create mode 100644 scio-sql/src/main/scala-3/com/spotify/scio/sql/Query4.scala create mode 100644 scio-sql/src/main/scala-3/com/spotify/scio/sql/Query5.scala create mode 100644 scio-sql/src/main/scala-3/com/spotify/scio/sql/Query6.scala create mode 100644 scio-sql/src/main/scala-3/com/spotify/scio/sql/Query7.scala create mode 100644 scio-sql/src/main/scala-3/com/spotify/scio/sql/Query8.scala create mode 100644 scio-sql/src/main/scala-3/com/spotify/scio/sql/Query9.scala create mode 100644 scio-sql/src/main/scala-3/com/spotify/scio/sql/TypedSQLInterpolator.scala diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/Query1.scala b/scio-sql/src/main/scala-2/com/spotify/scio/sql/Query1.scala similarity index 100% rename from scio-sql/src/main/scala/com/spotify/scio/sql/Query1.scala rename to scio-sql/src/main/scala-2/com/spotify/scio/sql/Query1.scala diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/Query10.scala b/scio-sql/src/main/scala-2/com/spotify/scio/sql/Query10.scala similarity index 100% rename from scio-sql/src/main/scala/com/spotify/scio/sql/Query10.scala rename to scio-sql/src/main/scala-2/com/spotify/scio/sql/Query10.scala diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/Query2.scala b/scio-sql/src/main/scala-2/com/spotify/scio/sql/Query2.scala similarity index 100% rename from scio-sql/src/main/scala/com/spotify/scio/sql/Query2.scala rename to scio-sql/src/main/scala-2/com/spotify/scio/sql/Query2.scala diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/Query3.scala b/scio-sql/src/main/scala-2/com/spotify/scio/sql/Query3.scala similarity index 100% rename from scio-sql/src/main/scala/com/spotify/scio/sql/Query3.scala rename to scio-sql/src/main/scala-2/com/spotify/scio/sql/Query3.scala diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/Query4.scala b/scio-sql/src/main/scala-2/com/spotify/scio/sql/Query4.scala similarity index 100% rename from scio-sql/src/main/scala/com/spotify/scio/sql/Query4.scala rename to scio-sql/src/main/scala-2/com/spotify/scio/sql/Query4.scala diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/Query5.scala b/scio-sql/src/main/scala-2/com/spotify/scio/sql/Query5.scala similarity index 100% rename from scio-sql/src/main/scala/com/spotify/scio/sql/Query5.scala rename to scio-sql/src/main/scala-2/com/spotify/scio/sql/Query5.scala diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/Query6.scala b/scio-sql/src/main/scala-2/com/spotify/scio/sql/Query6.scala similarity index 100% rename from scio-sql/src/main/scala/com/spotify/scio/sql/Query6.scala rename to scio-sql/src/main/scala-2/com/spotify/scio/sql/Query6.scala diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/Query7.scala b/scio-sql/src/main/scala-2/com/spotify/scio/sql/Query7.scala similarity index 100% rename from scio-sql/src/main/scala/com/spotify/scio/sql/Query7.scala rename to scio-sql/src/main/scala-2/com/spotify/scio/sql/Query7.scala diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/Query8.scala b/scio-sql/src/main/scala-2/com/spotify/scio/sql/Query8.scala similarity index 100% rename from scio-sql/src/main/scala/com/spotify/scio/sql/Query8.scala rename to scio-sql/src/main/scala-2/com/spotify/scio/sql/Query8.scala diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/Query9.scala b/scio-sql/src/main/scala-2/com/spotify/scio/sql/Query9.scala similarity index 100% rename from scio-sql/src/main/scala/com/spotify/scio/sql/Query9.scala rename to scio-sql/src/main/scala-2/com/spotify/scio/sql/Query9.scala diff --git a/scio-sql/src/main/scala-2/com/spotify/scio/sql/TypedSQLInterpolator.scala b/scio-sql/src/main/scala-2/com/spotify/scio/sql/TypedSQLInterpolator.scala new file mode 100644 index 0000000000..cbaec4bbf2 --- /dev/null +++ b/scio-sql/src/main/scala-2/com/spotify/scio/sql/TypedSQLInterpolator.scala @@ -0,0 +1,201 @@ +package com.spotify.scio.sql + +import com.spotify.scio.annotations.experimental +import com.spotify.scio.values.SCollection +import com.spotify.scio.schemas.Schema +import org.apache.beam.sdk.values.TupleTag + +import scala.reflect.macros.{blackbox, whitebox} +import com.spotify.scio.schemas.SchemaMacroHelpers + +import scala.reflect._ + + +trait TypedSQLInterpolator { + @experimental + def tsql(ps: Any*): SQLBuilder = + macro SqlInterpolatorMacro.builder +} + + +private trait SqlInterpolatorMacroHelpers { + val ctx: blackbox.Context + import ctx.universe._ + + def partsFromContext: List[Tree] = + ctx.prefix.tree match { + case Apply(_, Apply(_, xs: List[_]) :: Nil) => xs + case tree => + ctx.abort( + ctx.enclosingPosition, + s"Implementation error. Expected tsql string interpolation, found $tree" + ) + } + + def buildSQLString(parts: List[Tree], tags: List[String]): String = { + val ps2 = + parts.map { + case Literal(Constant(s: String)) => s + case tree => + ctx.abort( + ctx.enclosingPosition, + s"Implementation error. Expected Literal(Constant(...)), found $tree" + ) + } + + ps2 + .zipAll(tags, "", "") + .foldLeft("") { case (a, (x, y)) => s"$a$x $y" } + } + + def tagFor(t: Type, lbl: String): Tree = + q"new _root_.org.apache.beam.sdk.values.TupleTag[$t]($lbl)" +} + +object SqlInterpolatorMacro { + + /** This static annotation is used to pass (static) parameters to SqlInterpolatorMacro.expand */ + final class SqlParts(parts: List[String], ps: Any*) extends scala.annotation.StaticAnnotation + + // For some reason this method needs to be a whitebox macro + def builder(c: whitebox.Context)(ps: c.Expr[Any]*): c.Expr[SQLBuilder] = { + val h = new { val ctx: c.type = c } with SqlInterpolatorMacroHelpers + import h._ + import c.universe._ + + val parts = partsFromContext + + val className = TypeName(c.freshName("SQLBuilder")) + val fakeName = TypeName(c.freshName("FakeImpl")) + + // Yo Dawg i herd you like macros... + // + // The following tree generates an anonymous class to lazily expand tsqlImpl. + // + // It basically acts as curryfication of the macro, + // where the interpolated String and it's parameters are partially applied + // while the expected output type (and therefore the expected data schema) stays unapplied. + // Sadly macro do not allow explicit parameter passing so the following code would be illegal + // and the macro expansion would fail with: "term macros cannot override abstract methods" + // def as[B: Schema]: SCollection[B] = + // macro _root_.com.spotify.scio.sql.SqlInterpolatorMacro.expand[B](parts, ps) + // + // We workaround the limitation by using a StaticAnnotation to pass static values, + // as described in: https://stackoverflow.com/a/25219644/2383092 + // + // It is also illegal for a macro to override abstract methods, + // which is why an intermediate class $fakeName is introduced. + // Note that we HAVE TO extend SQLBuilder, otherwise, `tsqlImpl` fails to see the concrete + // type of B, which also makes the macro expansion fails. + val tree = + q""" + { + import _root_.com.spotify.scio.values.SCollection + import _root_.com.spotify.scio.schemas.Schema + import _root_.scala.reflect.ClassTag + + sealed trait $fakeName extends _root_.com.spotify.scio.sql.SQLBuilder { + def as[B: Schema: ClassTag]: SCollection[B] = ??? + } + + final class $className extends $fakeName { + import scala.language.experimental.macros + + @_root_.com.spotify.scio.sql.SqlInterpolatorMacro.SqlParts(List(..$parts),..$ps) + override def as[B: Schema: ClassTag]: SCollection[B] = + macro _root_.com.spotify.scio.sql.SqlInterpolatorMacro.expand[B] + } + new $className + } + """ + + c.Expr[SQLBuilder](tree) + } + + def expand[B: c.WeakTypeTag]( + c: blackbox.Context + )(schB: c.Expr[Schema[B]], classTag: c.Expr[ClassTag[B]]): c.Expr[SCollection[B]] = { + import c.universe._ + + val annotationParams = + c.macroApplication.symbol.annotations + .filter(_.tree.tpe <:< typeOf[SqlParts]) + .flatMap(_.tree.children.tail) + + if (annotationParams.isEmpty) + c.abort(c.enclosingPosition, "Annotation body not provided!") + + val ps: List[c.Expr[Any]] = + annotationParams.tail.map(t => c.Expr[Any](t)) + + val parts = + annotationParams.head match { + case Apply(TypeApply(Select(Select(_, _), TermName("apply")), _), pas) => + pas + case tree => + c.abort( + c.enclosingPosition, + s"Failed to extract SQL parts. Expected List(...), found $tree" + ) + } + + tsqlImpl[B](c)(parts, ps: _*)(classTag) + } + + def tsqlImpl[B: c.WeakTypeTag]( + c: blackbox.Context + )(parts: List[c.Tree], ps: c.Expr[Any]*)( + ct: c.Expr[ClassTag[B]] + ): c.Expr[SCollection[B]] = { + val h = new { val ctx: c.type = c } with SqlInterpolatorMacroHelpers with SchemaMacroHelpers + import h._ + import c.universe._ + + val (ss, other) = + ps.partition(_.actualType.typeSymbol == typeOf[SCollection[Any]].typeSymbol) + + other.headOption.foreach { t => + c.abort( + c.enclosingPosition, + s"tsql interpolation only support arguments of type SCollection. Found $t" + ) + } + + val scs: List[(Tree, Type)] = + ss.map { p => + val a = p.actualType.typeArgs.head + (p.tree, a) + }.toList + + val distinctSCollections = + scs.map { case (tree, t) => + (tree.symbol, (tree, t)) + }.toMap + + def toSCollectionName(s: Tree) = s.symbol.name.encodedName.toString + + distinctSCollections.values.toList match { + case list if list.size <= 10 => + val colls = list.map(_._1) + val types = list.map(_._2) + val tags = list.map(x => tagFor(x._2, toSCollectionName(x._1))) + val sql = buildSQLString(parts, scs.map(x => toSCollectionName(x._1))) + val implOut = inferImplicitSchema[B] + val implIn = types.flatMap(t => Seq(inferImplicitSchema(t), inferClassTag(t))) + + val queryTree = c.parse(s"_root_.com.spotify.scio.sql.Query${types.size}") + val q = q"$queryTree.typed[..${types :+ weakTypeOf[B]}]($sql, ..$tags)" + c.Expr[SCollection[B]](q""" + _root_.com.spotify.scio.sql.Sql + .from(..$colls)(..$implIn) + .queryAs($q)($implOut, $ct)""") + case d => + val ns = d.map(_._1).mkString(", ") + c.abort( + c.enclosingPosition, + s"Joins limited up to 10 SCollections, found ${d.size}: $ns" + ) + } + } +} + diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query1.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query1.scala new file mode 100644 index 0000000000..025c2ecebe --- /dev/null +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query1.scala @@ -0,0 +1,77 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.sql + +import com.spotify.scio.schemas._ +import com.spotify.scio.values.SCollection +import org.apache.beam.sdk.extensions.sql.SqlTransform +import org.apache.beam.sdk.extensions.sql.impl.ParseException +import org.apache.beam.sdk.values._ + +import scala.reflect.ClassTag + +final case class Query1[A, B]( + query: String, + tag: TupleTag[A] = Sql.defaultTag[A], + udfs: List[Udf] = Nil +) + +object Query1 { + /** + * Typecheck [[Query1]] q against the provided schemas. + * If the query correctly typechecks, it's simply return as a [[Right]]. + * If it fails, a error message is returned in a [[Left]]. + */ + def typecheck[A: Schema, B: Schema](q: Query1[A, B]): Either[String, Query1[A, B]] = + Queries + .typecheck( + q.query, + List((q.tag.getId, SchemaMaterializer.beamSchema[A])), + SchemaMaterializer.beamSchema[B], + q.udfs + ) + .right + .map(_ => q) +} + +final class SqlSCollection1[A: Schema: ClassTag](sc: SCollection[A]) { + def query(q: String, udfs: Udf*): SCollection[Row] = + query(Query1[A, Row](q, Sql.defaultTag, udfs = udfs.toList)) + + def query(q: Query1[A, Row]): SCollection[Row] = + sc.context.wrap { + val scWithSchema = Sql.setSchema(sc) + val transform = + SqlTransform + .query(q.query) + .withTableProvider(Sql.BeamProviderName, Sql.tableProvider(q.tag, scWithSchema)) + val sqlTransform = Sql.registerUdf(transform, q.udfs: _*) + scWithSchema.applyInternal(sqlTransform) + } + + def queryAs[R: Schema: ClassTag](q: String, udfs: Udf*): SCollection[R] = + queryAs(Query1[A, R](q, Sql.defaultTag, udfs = udfs.toList)) + + def queryAs[R: Schema: ClassTag](q: Query1[A, R]): SCollection[R] = + try { + query(Query1[A, Row](q.query, q.tag, q.udfs)).to(To.unchecked((_, i) => i)) + } catch { + case e: ParseException => + Query1.typecheck(q).fold(err => throw new RuntimeException(err, e), _ => throw e) + } +} diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query10.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query10.scala new file mode 100644 index 0000000000..272f68b312 --- /dev/null +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query10.scala @@ -0,0 +1,196 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +// !! generated with sql.py +// !! DO NOT EDIT MANUALLY +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +package com.spotify.scio.sql + +import com.spotify.scio.schemas._ +import com.spotify.scio.values.SCollection +import org.apache.beam.sdk.extensions.sql.SqlTransform +import org.apache.beam.sdk.extensions.sql.impl.ParseException +import org.apache.beam.sdk.values._ + +import scala.reflect.ClassTag + +final case class Query10[A, B, C, D, E, F, G, H, I, J, R]( + query: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + hTag: TupleTag[H], + iTag: TupleTag[I], + jTag: TupleTag[J], + udfs: List[Udf] = Nil +) + +object Query10 { + + def typecheck[ + A: Schema, + B: Schema, + C: Schema, + D: Schema, + E: Schema, + F: Schema, + G: Schema, + H: Schema, + I: Schema, + J: Schema, + R: Schema + ]( + q: Query10[A, B, C, D, E, F, G, H, I, J, R] + ): Either[String, Query10[A, B, C, D, E, F, G, H, I, J, R]] = + Queries + .typecheck( + q.query, + List( + (q.aTag.getId, SchemaMaterializer.beamSchema[A]), + (q.bTag.getId, SchemaMaterializer.beamSchema[B]), + (q.cTag.getId, SchemaMaterializer.beamSchema[C]), + (q.dTag.getId, SchemaMaterializer.beamSchema[D]), + (q.eTag.getId, SchemaMaterializer.beamSchema[E]), + (q.fTag.getId, SchemaMaterializer.beamSchema[F]), + (q.gTag.getId, SchemaMaterializer.beamSchema[G]), + (q.hTag.getId, SchemaMaterializer.beamSchema[H]), + (q.iTag.getId, SchemaMaterializer.beamSchema[I]), + (q.jTag.getId, SchemaMaterializer.beamSchema[J]) + ), + SchemaMaterializer.beamSchema[R], + q.udfs + ) + .right + .map(_ => q) +} + +final class SqlSCollection10[ + A: Schema: ClassTag, + B: Schema: ClassTag, + C: Schema: ClassTag, + D: Schema: ClassTag, + E: Schema: ClassTag, + F: Schema: ClassTag, + G: Schema: ClassTag, + H: Schema: ClassTag, + I: Schema: ClassTag, + J: Schema: ClassTag +]( + a: SCollection[A], + b: SCollection[B], + c: SCollection[C], + d: SCollection[D], + e: SCollection[E], + f: SCollection[F], + g: SCollection[G], + h: SCollection[H], + i: SCollection[I], + j: SCollection[J] +) { + + def query( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + hTag: TupleTag[H], + iTag: TupleTag[I], + jTag: TupleTag[J], + udfs: Udf* + ): SCollection[Row] = + query(Query10(q, aTag, bTag, cTag, dTag, eTag, fTag, gTag, hTag, iTag, jTag, udfs.toList)) + + def query(q: Query10[A, B, C, D, E, F, G, H, I, J, Row]): SCollection[Row] = + a.context.wrap { + val collA = Sql.setSchema(a) + val collB = Sql.setSchema(b) + val collC = Sql.setSchema(c) + val collD = Sql.setSchema(d) + val collE = Sql.setSchema(e) + val collF = Sql.setSchema(f) + val collG = Sql.setSchema(g) + val collH = Sql.setSchema(h) + val collI = Sql.setSchema(i) + val collJ = Sql.setSchema(j) + val sqlTransform = Sql.registerUdf(SqlTransform.query(q.query), q.udfs: _*) + + PCollectionTuple + .of(q.aTag, collA.internal) + .and(q.bTag, collB.internal) + .and(q.cTag, collC.internal) + .and(q.dTag, collD.internal) + .and(q.eTag, collE.internal) + .and(q.fTag, collF.internal) + .and(q.gTag, collG.internal) + .and(q.hTag, collH.internal) + .and(q.iTag, collI.internal) + .and(q.jTag, collJ.internal) + .apply( + s"${collA.tfName} join ${collB.tfName} join ${collC.tfName} join ${collD.tfName} join ${collE.tfName} join ${collF.tfName} join ${collG.tfName} join ${collH.tfName} join ${collI.tfName} join ${collJ.tfName}", + sqlTransform + ) + + } + + def queryAs[R: Schema: ClassTag]( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + hTag: TupleTag[H], + iTag: TupleTag[I], + jTag: TupleTag[J], + udfs: Udf* + ): SCollection[R] = + queryAs(Query10(q, aTag, bTag, cTag, dTag, eTag, fTag, gTag, hTag, iTag, jTag, udfs.toList)) + + def queryAs[R: Schema: ClassTag](q: Query10[A, B, C, D, E, F, G, H, I, J, R]): SCollection[R] = + try { + query( + q.query, + q.aTag, + q.bTag, + q.cTag, + q.dTag, + q.eTag, + q.fTag, + q.gTag, + q.hTag, + q.iTag, + q.jTag, + q.udfs: _* + ).to(To.unchecked((_, i) => i)) + } catch { + case e: ParseException => + Query10.typecheck(q).fold(err => throw new RuntimeException(err, e), _ => throw e) + } + +} diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query2.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query2.scala new file mode 100644 index 0000000000..8db66befd1 --- /dev/null +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query2.scala @@ -0,0 +1,95 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +// !! generated with sql.py +// !! DO NOT EDIT MANUALLY +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +package com.spotify.scio.sql + +import com.spotify.scio.schemas._ +import com.spotify.scio.values.SCollection +import org.apache.beam.sdk.extensions.sql.SqlTransform +import org.apache.beam.sdk.extensions.sql.impl.ParseException +import org.apache.beam.sdk.values._ + +import scala.reflect.ClassTag + +final case class Query2[A, B, R]( + query: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + udfs: List[Udf] = Nil +) + +object Query2 { + def typecheck[A: Schema, B: Schema, R: Schema]( + q: Query2[A, B, R] + ): Either[String, Query2[A, B, R]] = + Queries + .typecheck( + q.query, + List( + (q.aTag.getId, SchemaMaterializer.beamSchema[A]), + (q.bTag.getId, SchemaMaterializer.beamSchema[B]) + ), + SchemaMaterializer.beamSchema[R], + q.udfs + ) + .right + .map(_ => q) +} + +final class SqlSCollection2[A: Schema: ClassTag, B: Schema: ClassTag]( + a: SCollection[A], + b: SCollection[B] +) { + + def query(q: String, aTag: TupleTag[A], bTag: TupleTag[B], udfs: Udf*): SCollection[Row] = + query(Query2(q, aTag, bTag, udfs.toList)) + + def query(q: Query2[A, B, Row]): SCollection[Row] = + a.context.wrap { + val collA = Sql.setSchema(a) + val collB = Sql.setSchema(b) + val sqlTransform = Sql.registerUdf(SqlTransform.query(q.query), q.udfs: _*) + + PCollectionTuple + .of(q.aTag, collA.internal) + .and(q.bTag, collB.internal) + .apply(s"${collA.tfName} join ${collB.tfName}", sqlTransform) + + } + + def queryAs[R: Schema: ClassTag]( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + udfs: Udf* + ): SCollection[R] = + queryAs(Query2(q, aTag, bTag, udfs.toList)) + + def queryAs[R: Schema: ClassTag](q: Query2[A, B, R]): SCollection[R] = + try { + query(q.query, q.aTag, q.bTag, q.udfs: _*).to(To.unchecked((_, i) => i)) + } catch { + case e: ParseException => + Query2.typecheck(q).fold(err => throw new RuntimeException(err, e), _ => throw e) + } + +} diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query3.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query3.scala new file mode 100644 index 0000000000..666878b235 --- /dev/null +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query3.scala @@ -0,0 +1,107 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +// !! generated with sql.py +// !! DO NOT EDIT MANUALLY +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +package com.spotify.scio.sql + +import com.spotify.scio.schemas._ +import com.spotify.scio.values.SCollection +import org.apache.beam.sdk.extensions.sql.SqlTransform +import org.apache.beam.sdk.extensions.sql.impl.ParseException +import org.apache.beam.sdk.values._ + +import scala.reflect.ClassTag + +final case class Query3[A, B, C, R]( + query: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + udfs: List[Udf] = Nil +) + +object Query3 { + def typecheck[A: Schema, B: Schema, C: Schema, R: Schema]( + q: Query3[A, B, C, R] + ): Either[String, Query3[A, B, C, R]] = + Queries + .typecheck( + q.query, + List( + (q.aTag.getId, SchemaMaterializer.beamSchema[A]), + (q.bTag.getId, SchemaMaterializer.beamSchema[B]), + (q.cTag.getId, SchemaMaterializer.beamSchema[C]) + ), + SchemaMaterializer.beamSchema[R], + q.udfs + ) + .right + .map(_ => q) +} + +final class SqlSCollection3[A: Schema: ClassTag, B: Schema: ClassTag, C: Schema: ClassTag]( + a: SCollection[A], + b: SCollection[B], + c: SCollection[C] +) { + + def query( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + udfs: Udf* + ): SCollection[Row] = + query(Query3(q, aTag, bTag, cTag, udfs.toList)) + + def query(q: Query3[A, B, C, Row]): SCollection[Row] = + a.context.wrap { + val collA = Sql.setSchema(a) + val collB = Sql.setSchema(b) + val collC = Sql.setSchema(c) + val sqlTransform = Sql.registerUdf(SqlTransform.query(q.query), q.udfs: _*) + + PCollectionTuple + .of(q.aTag, collA.internal) + .and(q.bTag, collB.internal) + .and(q.cTag, collC.internal) + .apply(s"${collA.tfName} join ${collB.tfName} join ${collC.tfName}", sqlTransform) + + } + + def queryAs[R: Schema: ClassTag]( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + udfs: Udf* + ): SCollection[R] = + queryAs(Query3(q, aTag, bTag, cTag, udfs.toList)) + + def queryAs[R: Schema: ClassTag](q: Query3[A, B, C, R]): SCollection[R] = + try { + query(q.query, q.aTag, q.bTag, q.cTag, q.udfs: _*).to(To.unchecked((_, i) => i)) + } catch { + case e: ParseException => + Query3.typecheck(q).fold(err => throw new RuntimeException(err, e), _ => throw e) + } + +} diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query4.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query4.scala new file mode 100644 index 0000000000..e717e92f8f --- /dev/null +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query4.scala @@ -0,0 +1,118 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +// !! generated with sql.py +// !! DO NOT EDIT MANUALLY +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +package com.spotify.scio.sql + +import com.spotify.scio.schemas._ +import com.spotify.scio.values.SCollection +import org.apache.beam.sdk.extensions.sql.SqlTransform +import org.apache.beam.sdk.extensions.sql.impl.ParseException +import org.apache.beam.sdk.values._ + +import scala.reflect.ClassTag + +final case class Query4[A, B, C, D, R]( + query: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + udfs: List[Udf] = Nil +) + +object Query4 { + + def typecheck[A: Schema, B: Schema, C: Schema, D: Schema, R: Schema]( + q: Query4[A, B, C, D, R] + ): Either[String, Query4[A, B, C, D, R]] = + Queries + .typecheck( + q.query, + List( + (q.aTag.getId, SchemaMaterializer.beamSchema[A]), + (q.bTag.getId, SchemaMaterializer.beamSchema[B]), + (q.cTag.getId, SchemaMaterializer.beamSchema[C]), + (q.dTag.getId, SchemaMaterializer.beamSchema[D]) + ), + SchemaMaterializer.beamSchema[R], + q.udfs + ) + .right + .map(_ => q) +} + +final class SqlSCollection4[ + A: Schema: ClassTag, + B: Schema: ClassTag, + C: Schema: ClassTag, + D: Schema: ClassTag +](a: SCollection[A], b: SCollection[B], c: SCollection[C], d: SCollection[D]) { + + def query( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + udfs: Udf* + ): SCollection[Row] = + query(Query4(q, aTag, bTag, cTag, dTag, udfs.toList)) + + def query(q: Query4[A, B, C, D, Row]): SCollection[Row] = + a.context.wrap { + val collA = Sql.setSchema(a) + val collB = Sql.setSchema(b) + val collC = Sql.setSchema(c) + val collD = Sql.setSchema(d) + val sqlTransform = Sql.registerUdf(SqlTransform.query(q.query), q.udfs: _*) + + PCollectionTuple + .of(q.aTag, collA.internal) + .and(q.bTag, collB.internal) + .and(q.cTag, collC.internal) + .and(q.dTag, collD.internal) + .apply( + s"${collA.tfName} join ${collB.tfName} join ${collC.tfName} join ${collD.tfName}", + sqlTransform + ) + + } + + def queryAs[R: Schema: ClassTag]( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + udfs: Udf* + ): SCollection[R] = + queryAs(Query4(q, aTag, bTag, cTag, dTag, udfs.toList)) + + def queryAs[R: Schema: ClassTag](q: Query4[A, B, C, D, R]): SCollection[R] = + try { + query(q.query, q.aTag, q.bTag, q.cTag, q.dTag, q.udfs: _*).to(To.unchecked((_, i) => i)) + } catch { + case e: ParseException => + Query4.typecheck(q).fold(err => throw new RuntimeException(err, e), _ => throw e) + } + +} diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query5.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query5.scala new file mode 100644 index 0000000000..7d17c68fcb --- /dev/null +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query5.scala @@ -0,0 +1,127 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +// !! generated with sql.py +// !! DO NOT EDIT MANUALLY +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +package com.spotify.scio.sql + +import com.spotify.scio.schemas._ +import com.spotify.scio.values.SCollection +import org.apache.beam.sdk.extensions.sql.SqlTransform +import org.apache.beam.sdk.extensions.sql.impl.ParseException +import org.apache.beam.sdk.values._ + +import scala.reflect.ClassTag + +final case class Query5[A, B, C, D, E, R]( + query: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + udfs: List[Udf] = Nil +) + +object Query5 { + + def typecheck[A: Schema, B: Schema, C: Schema, D: Schema, E: Schema, R: Schema]( + q: Query5[A, B, C, D, E, R] + ): Either[String, Query5[A, B, C, D, E, R]] = + Queries + .typecheck( + q.query, + List( + (q.aTag.getId, SchemaMaterializer.beamSchema[A]), + (q.bTag.getId, SchemaMaterializer.beamSchema[B]), + (q.cTag.getId, SchemaMaterializer.beamSchema[C]), + (q.dTag.getId, SchemaMaterializer.beamSchema[D]), + (q.eTag.getId, SchemaMaterializer.beamSchema[E]) + ), + SchemaMaterializer.beamSchema[R], + q.udfs + ) + .right + .map(_ => q) + +} + +final class SqlSCollection5[ + A: Schema: ClassTag, + B: Schema: ClassTag, + C: Schema: ClassTag, + D: Schema: ClassTag, + E: Schema: ClassTag +](a: SCollection[A], b: SCollection[B], c: SCollection[C], d: SCollection[D], e: SCollection[E]) { + + def query( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + udfs: Udf* + ): SCollection[Row] = + query(Query5(q, aTag, bTag, cTag, dTag, eTag, udfs.toList)) + + def query(q: Query5[A, B, C, D, E, Row]): SCollection[Row] = + a.context.wrap { + val collA = Sql.setSchema(a) + val collB = Sql.setSchema(b) + val collC = Sql.setSchema(c) + val collD = Sql.setSchema(d) + val collE = Sql.setSchema(e) + val sqlTransform = Sql.registerUdf(SqlTransform.query(q.query), q.udfs: _*) + + PCollectionTuple + .of(q.aTag, collA.internal) + .and(q.bTag, collB.internal) + .and(q.cTag, collC.internal) + .and(q.dTag, collD.internal) + .and(q.eTag, collE.internal) + .apply( + s"${collA.tfName} join ${collB.tfName} join ${collC.tfName} join ${collD.tfName} join ${collE.tfName}", + sqlTransform + ) + + } + + def queryAs[R: Schema: ClassTag]( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + udfs: Udf* + ): SCollection[R] = + queryAs(Query5(q, aTag, bTag, cTag, dTag, eTag, udfs.toList)) + + def queryAs[R: Schema: ClassTag](q: Query5[A, B, C, D, E, R]): SCollection[R] = + try { + query(q.query, q.aTag, q.bTag, q.cTag, q.dTag, q.eTag, q.udfs: _*) + .to(To.unchecked((_, i) => i)) + } catch { + case e: ParseException => + Query5.typecheck(q).fold(err => throw new RuntimeException(err, e), _ => throw e) + } + +} diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query6.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query6.scala new file mode 100644 index 0000000000..241cf43be2 --- /dev/null +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query6.scala @@ -0,0 +1,140 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +// !! generated with sql.py +// !! DO NOT EDIT MANUALLY +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +package com.spotify.scio.sql + +import com.spotify.scio.schemas._ +import com.spotify.scio.values.SCollection +import org.apache.beam.sdk.extensions.sql.SqlTransform +import org.apache.beam.sdk.extensions.sql.impl.ParseException +import org.apache.beam.sdk.values._ + +import scala.reflect.ClassTag + +final case class Query6[A, B, C, D, E, F, R]( + query: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + udfs: List[Udf] = Nil +) + +object Query6 { + + def typecheck[A: Schema, B: Schema, C: Schema, D: Schema, E: Schema, F: Schema, R: Schema]( + q: Query6[A, B, C, D, E, F, R] + ): Either[String, Query6[A, B, C, D, E, F, R]] = + Queries + .typecheck( + q.query, + List( + (q.aTag.getId, SchemaMaterializer.beamSchema[A]), + (q.bTag.getId, SchemaMaterializer.beamSchema[B]), + (q.cTag.getId, SchemaMaterializer.beamSchema[C]), + (q.dTag.getId, SchemaMaterializer.beamSchema[D]), + (q.eTag.getId, SchemaMaterializer.beamSchema[E]), + (q.fTag.getId, SchemaMaterializer.beamSchema[F]) + ), + SchemaMaterializer.beamSchema[R], + q.udfs + ) + .right + .map(_ => q) +} + +final class SqlSCollection6[ + A: Schema: ClassTag, + B: Schema: ClassTag, + C: Schema: ClassTag, + D: Schema: ClassTag, + E: Schema: ClassTag, + F: Schema: ClassTag +]( + a: SCollection[A], + b: SCollection[B], + c: SCollection[C], + d: SCollection[D], + e: SCollection[E], + f: SCollection[F] +) { + + def query( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + udfs: Udf* + ): SCollection[Row] = + query(Query6(q, aTag, bTag, cTag, dTag, eTag, fTag, udfs.toList)) + + def query(q: Query6[A, B, C, D, E, F, Row]): SCollection[Row] = + a.context.wrap { + val collA = Sql.setSchema(a) + val collB = Sql.setSchema(b) + val collC = Sql.setSchema(c) + val collD = Sql.setSchema(d) + val collE = Sql.setSchema(e) + val collF = Sql.setSchema(f) + val sqlTransform = Sql.registerUdf(SqlTransform.query(q.query), q.udfs: _*) + + PCollectionTuple + .of(q.aTag, collA.internal) + .and(q.bTag, collB.internal) + .and(q.cTag, collC.internal) + .and(q.dTag, collD.internal) + .and(q.eTag, collE.internal) + .and(q.fTag, collF.internal) + .apply( + s"${collA.tfName} join ${collB.tfName} join ${collC.tfName} join ${collD.tfName} join ${collE.tfName} join ${collF.tfName}", + sqlTransform + ) + + } + + def queryAs[R: Schema: ClassTag]( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + udfs: Udf* + ): SCollection[R] = + queryAs(Query6(q, aTag, bTag, cTag, dTag, eTag, fTag, udfs.toList)) + + def queryAs[R: Schema: ClassTag](q: Query6[A, B, C, D, E, F, R]): SCollection[R] = + try { + query(q.query, q.aTag, q.bTag, q.cTag, q.dTag, q.eTag, q.fTag, q.udfs: _*) + .to(To.unchecked((_, i) => i)) + } catch { + case e: ParseException => + Query6.typecheck(q).fold(err => throw new RuntimeException(err, e), _ => throw e) + } + +} diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query7.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query7.scala new file mode 100644 index 0000000000..97f0b3474f --- /dev/null +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query7.scala @@ -0,0 +1,155 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +// !! generated with sql.py +// !! DO NOT EDIT MANUALLY +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +package com.spotify.scio.sql + +import com.spotify.scio.schemas._ +import com.spotify.scio.values.SCollection +import org.apache.beam.sdk.extensions.sql.SqlTransform +import org.apache.beam.sdk.extensions.sql.impl.ParseException +import org.apache.beam.sdk.values._ + +import scala.reflect.ClassTag + +final case class Query7[A, B, C, D, E, F, G, R]( + query: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + udfs: List[Udf] = Nil +) + +object Query7 { + + def typecheck[ + A: Schema, + B: Schema, + C: Schema, + D: Schema, + E: Schema, + F: Schema, + G: Schema, + R: Schema + ](q: Query7[A, B, C, D, E, F, G, R]): Either[String, Query7[A, B, C, D, E, F, G, R]] = + Queries + .typecheck( + q.query, + List( + (q.aTag.getId, SchemaMaterializer.beamSchema[A]), + (q.bTag.getId, SchemaMaterializer.beamSchema[B]), + (q.cTag.getId, SchemaMaterializer.beamSchema[C]), + (q.dTag.getId, SchemaMaterializer.beamSchema[D]), + (q.eTag.getId, SchemaMaterializer.beamSchema[E]), + (q.fTag.getId, SchemaMaterializer.beamSchema[F]), + (q.gTag.getId, SchemaMaterializer.beamSchema[G]) + ), + SchemaMaterializer.beamSchema[R], + q.udfs + ) + .right + .map(_ => q) +} + +final class SqlSCollection7[ + A: Schema: ClassTag, + B: Schema: ClassTag, + C: Schema: ClassTag, + D: Schema: ClassTag, + E: Schema: ClassTag, + F: Schema: ClassTag, + G: Schema: ClassTag +]( + a: SCollection[A], + b: SCollection[B], + c: SCollection[C], + d: SCollection[D], + e: SCollection[E], + f: SCollection[F], + g: SCollection[G] +) { + + def query( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + udfs: Udf* + ): SCollection[Row] = + query(Query7(q, aTag, bTag, cTag, dTag, eTag, fTag, gTag, udfs.toList)) + + def query(q: Query7[A, B, C, D, E, F, G, Row]): SCollection[Row] = + a.context.wrap { + val collA = Sql.setSchema(a) + val collB = Sql.setSchema(b) + val collC = Sql.setSchema(c) + val collD = Sql.setSchema(d) + val collE = Sql.setSchema(e) + val collF = Sql.setSchema(f) + val collG = Sql.setSchema(g) + val sqlTransform = Sql.registerUdf(SqlTransform.query(q.query), q.udfs: _*) + + PCollectionTuple + .of(q.aTag, collA.internal) + .and(q.bTag, collB.internal) + .and(q.cTag, collC.internal) + .and(q.dTag, collD.internal) + .and(q.eTag, collE.internal) + .and(q.fTag, collF.internal) + .and(q.gTag, collG.internal) + .apply( + s"${collA.tfName} join ${collB.tfName} join ${collC.tfName} join ${collD.tfName} join ${collE.tfName} join ${collF.tfName} join ${collG.tfName}", + sqlTransform + ) + + } + + def queryAs[R: Schema: ClassTag]( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + udfs: Udf* + ): SCollection[R] = + queryAs(Query7(q, aTag, bTag, cTag, dTag, eTag, fTag, gTag, udfs.toList)) + + def queryAs[R: Schema: ClassTag](q: Query7[A, B, C, D, E, F, G, R]): SCollection[R] = + try { + query(q.query, q.aTag, q.bTag, q.cTag, q.dTag, q.eTag, q.fTag, q.gTag, q.udfs: _*) + .to(To.unchecked((_, i) => i)) + } catch { + case e: ParseException => + Query7.typecheck(q).fold(err => throw new RuntimeException(err, e), _ => throw e) + } + +} diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query8.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query8.scala new file mode 100644 index 0000000000..4d2ab44809 --- /dev/null +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query8.scala @@ -0,0 +1,164 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +// !! generated with sql.py +// !! DO NOT EDIT MANUALLY +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +package com.spotify.scio.sql + +import com.spotify.scio.schemas._ +import com.spotify.scio.values.SCollection +import org.apache.beam.sdk.extensions.sql.SqlTransform +import org.apache.beam.sdk.extensions.sql.impl.ParseException +import org.apache.beam.sdk.values._ + +import scala.reflect.ClassTag + +final case class Query8[A, B, C, D, E, F, G, H, R]( + query: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + hTag: TupleTag[H], + udfs: List[Udf] = Nil +) + +object Query8 { + + def typecheck[ + A: Schema, + B: Schema, + C: Schema, + D: Schema, + E: Schema, + F: Schema, + G: Schema, + H: Schema, + R: Schema + ](q: Query8[A, B, C, D, E, F, G, H, R]): Either[String, Query8[A, B, C, D, E, F, G, H, R]] = + Queries + .typecheck( + q.query, + List( + (q.aTag.getId, SchemaMaterializer.beamSchema[A]), + (q.bTag.getId, SchemaMaterializer.beamSchema[B]), + (q.cTag.getId, SchemaMaterializer.beamSchema[C]), + (q.dTag.getId, SchemaMaterializer.beamSchema[D]), + (q.eTag.getId, SchemaMaterializer.beamSchema[E]), + (q.fTag.getId, SchemaMaterializer.beamSchema[F]), + (q.gTag.getId, SchemaMaterializer.beamSchema[G]), + (q.hTag.getId, SchemaMaterializer.beamSchema[H]) + ), + SchemaMaterializer.beamSchema[R], + q.udfs + ) + .right + .map(_ => q) +} + +final class SqlSCollection8[ + A: Schema: ClassTag, + B: Schema: ClassTag, + C: Schema: ClassTag, + D: Schema: ClassTag, + E: Schema: ClassTag, + F: Schema: ClassTag, + G: Schema: ClassTag, + H: Schema: ClassTag +]( + a: SCollection[A], + b: SCollection[B], + c: SCollection[C], + d: SCollection[D], + e: SCollection[E], + f: SCollection[F], + g: SCollection[G], + h: SCollection[H] +) { + + def query( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + hTag: TupleTag[H], + udfs: Udf* + ): SCollection[Row] = + query(Query8(q, aTag, bTag, cTag, dTag, eTag, fTag, gTag, hTag, udfs.toList)) + + def query(q: Query8[A, B, C, D, E, F, G, H, Row]): SCollection[Row] = + a.context.wrap { + val collA = Sql.setSchema(a) + val collB = Sql.setSchema(b) + val collC = Sql.setSchema(c) + val collD = Sql.setSchema(d) + val collE = Sql.setSchema(e) + val collF = Sql.setSchema(f) + val collG = Sql.setSchema(g) + val collH = Sql.setSchema(h) + val sqlTransform = Sql.registerUdf(SqlTransform.query(q.query), q.udfs: _*) + + PCollectionTuple + .of(q.aTag, collA.internal) + .and(q.bTag, collB.internal) + .and(q.cTag, collC.internal) + .and(q.dTag, collD.internal) + .and(q.eTag, collE.internal) + .and(q.fTag, collF.internal) + .and(q.gTag, collG.internal) + .and(q.hTag, collH.internal) + .apply( + s"${collA.tfName} join ${collB.tfName} join ${collC.tfName} join ${collD.tfName} join ${collE.tfName} join ${collF.tfName} join ${collG.tfName} join ${collH.tfName}", + sqlTransform + ) + + } + + def queryAs[R: Schema: ClassTag]( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + hTag: TupleTag[H], + udfs: Udf* + ): SCollection[R] = + queryAs(Query8(q, aTag, bTag, cTag, dTag, eTag, fTag, gTag, hTag, udfs.toList)) + + def queryAs[R: Schema: ClassTag](q: Query8[A, B, C, D, E, F, G, H, R]): SCollection[R] = + try { + query(q.query, q.aTag, q.bTag, q.cTag, q.dTag, q.eTag, q.fTag, q.gTag, q.hTag, q.udfs: _*) + .to(To.unchecked((_, i) => i)) + } catch { + case e: ParseException => + Query8.typecheck(q).fold(err => throw new RuntimeException(err, e), _ => throw e) + } + +} diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query9.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query9.scala new file mode 100644 index 0000000000..8015f6af34 --- /dev/null +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/Query9.scala @@ -0,0 +1,184 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +// !! generated with sql.py +// !! DO NOT EDIT MANUALLY +// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +package com.spotify.scio.sql + +import com.spotify.scio.schemas._ +import com.spotify.scio.values.SCollection +import org.apache.beam.sdk.extensions.sql.SqlTransform +import org.apache.beam.sdk.extensions.sql.impl.ParseException +import org.apache.beam.sdk.values._ + +import scala.reflect.ClassTag + +final case class Query9[A, B, C, D, E, F, G, H, I, R]( + query: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + hTag: TupleTag[H], + iTag: TupleTag[I], + udfs: List[Udf] = Nil +) + +object Query9 { + + def typecheck[ + A: Schema, + B: Schema, + C: Schema, + D: Schema, + E: Schema, + F: Schema, + G: Schema, + H: Schema, + I: Schema, + R: Schema + ](q: Query9[A, B, C, D, E, F, G, H, I, R]): Either[String, Query9[A, B, C, D, E, F, G, H, I, R]] = + Queries + .typecheck( + q.query, + List( + (q.aTag.getId, SchemaMaterializer.beamSchema[A]), + (q.bTag.getId, SchemaMaterializer.beamSchema[B]), + (q.cTag.getId, SchemaMaterializer.beamSchema[C]), + (q.dTag.getId, SchemaMaterializer.beamSchema[D]), + (q.eTag.getId, SchemaMaterializer.beamSchema[E]), + (q.fTag.getId, SchemaMaterializer.beamSchema[F]), + (q.gTag.getId, SchemaMaterializer.beamSchema[G]), + (q.hTag.getId, SchemaMaterializer.beamSchema[H]), + (q.iTag.getId, SchemaMaterializer.beamSchema[I]) + ), + SchemaMaterializer.beamSchema[R], + q.udfs + ) + .right + .map(_ => q) +} + +final class SqlSCollection9[ + A: Schema: ClassTag, + B: Schema: ClassTag, + C: Schema: ClassTag, + D: Schema: ClassTag, + E: Schema: ClassTag, + F: Schema: ClassTag, + G: Schema: ClassTag, + H: Schema: ClassTag, + I: Schema: ClassTag +]( + a: SCollection[A], + b: SCollection[B], + c: SCollection[C], + d: SCollection[D], + e: SCollection[E], + f: SCollection[F], + g: SCollection[G], + h: SCollection[H], + i: SCollection[I] +) { + + def query( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + hTag: TupleTag[H], + iTag: TupleTag[I], + udfs: Udf* + ): SCollection[Row] = + query(Query9(q, aTag, bTag, cTag, dTag, eTag, fTag, gTag, hTag, iTag, udfs.toList)) + + def query(q: Query9[A, B, C, D, E, F, G, H, I, Row]): SCollection[Row] = + a.context.wrap { + val collA = Sql.setSchema(a) + val collB = Sql.setSchema(b) + val collC = Sql.setSchema(c) + val collD = Sql.setSchema(d) + val collE = Sql.setSchema(e) + val collF = Sql.setSchema(f) + val collG = Sql.setSchema(g) + val collH = Sql.setSchema(h) + val collI = Sql.setSchema(i) + val sqlTransform = Sql.registerUdf(SqlTransform.query(q.query), q.udfs: _*) + + PCollectionTuple + .of(q.aTag, collA.internal) + .and(q.bTag, collB.internal) + .and(q.cTag, collC.internal) + .and(q.dTag, collD.internal) + .and(q.eTag, collE.internal) + .and(q.fTag, collF.internal) + .and(q.gTag, collG.internal) + .and(q.hTag, collH.internal) + .and(q.iTag, collI.internal) + .apply( + s"${collA.tfName} join ${collB.tfName} join ${collC.tfName} join ${collD.tfName} join ${collE.tfName} join ${collF.tfName} join ${collG.tfName} join ${collH.tfName} join ${collI.tfName}", + sqlTransform + ) + + } + + def queryAs[R: Schema: ClassTag]( + q: String, + aTag: TupleTag[A], + bTag: TupleTag[B], + cTag: TupleTag[C], + dTag: TupleTag[D], + eTag: TupleTag[E], + fTag: TupleTag[F], + gTag: TupleTag[G], + hTag: TupleTag[H], + iTag: TupleTag[I], + udfs: Udf* + ): SCollection[R] = + queryAs(Query9(q, aTag, bTag, cTag, dTag, eTag, fTag, gTag, hTag, iTag, udfs.toList)) + + def queryAs[R: Schema: ClassTag](q: Query9[A, B, C, D, E, F, G, H, I, R]): SCollection[R] = + try { + query( + q.query, + q.aTag, + q.bTag, + q.cTag, + q.dTag, + q.eTag, + q.fTag, + q.gTag, + q.hTag, + q.iTag, + q.udfs: _* + ).to(To.unchecked((_, i) => i)) + } catch { + case e: ParseException => + Query9.typecheck(q).fold(err => throw new RuntimeException(err, e), _ => throw e) + } + +} diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/TypedSQLInterpolator.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/TypedSQLInterpolator.scala new file mode 100644 index 0000000000..cab4f2f697 --- /dev/null +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/TypedSQLInterpolator.scala @@ -0,0 +1,8 @@ +package com.spotify.scio.sql + +import com.spotify.scio.annotations.experimental + +trait TypedSQLInterpolator { + @experimental + def tsql(ps: Any*): SQLBuilder = ??? +} diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/Sql.scala b/scio-sql/src/main/scala/com/spotify/scio/sql/Sql.scala index 6352b44694..afca28be25 100644 --- a/scio-sql/src/main/scala/com/spotify/scio/sql/Sql.scala +++ b/scio-sql/src/main/scala/com/spotify/scio/sql/Sql.scala @@ -180,46 +180,3 @@ private object Queries { Left(message) } } - -private object QueryMacros { - import scala.reflect.macros.blackbox - - /** - * Make sure that A is a concrete type bc. SQL macros can only - * materialize Schema[A] is A is concrete - */ - def assertConcrete[A: c.WeakTypeTag](c: blackbox.Context): Unit = { - import c.universe._ - val wtt = weakTypeOf[A].dealias - val isVal = wtt <:< typeOf[AnyVal] - val isSealed = - if (wtt.typeSymbol.isClass) { - wtt.typeSymbol.asClass.isSealed - } else false - val isAbstract = wtt.typeSymbol.asType.isAbstract - if (!isVal && isAbstract && !isSealed) { - c.abort(c.enclosingPosition, s"$wtt is an abstract type, expected a concrete type.") - } else { - () - } - } - - def cons[A](c: blackbox.Context)(e: c.Expr[String]): String = { - import c.universe._ - e.tree match { - case Literal(Constant(q: String)) => q - case _ => - c.abort(c.enclosingPosition, s"Expression ${e.tree} does not evaluate to a constant") - } - } - - def tupleTag[T](c: blackbox.Context)(e: c.Expr[TupleTag[T]]): TupleTag[T] = { - import c.universe._ - - e.tree match { - case Apply(_, List(Literal(Constant(tag: String)))) => new TupleTag[T](tag) - case _ => - c.abort(c.enclosingPosition, s"Expression ${e.tree}") - } - } -} diff --git a/scio-sql/src/main/scala/com/spotify/scio/sql/SqlInterpolator.scala b/scio-sql/src/main/scala/com/spotify/scio/sql/SqlInterpolator.scala index 245a4c87f9..513dc71800 100644 --- a/scio-sql/src/main/scala/com/spotify/scio/sql/SqlInterpolator.scala +++ b/scio-sql/src/main/scala/com/spotify/scio/sql/SqlInterpolator.scala @@ -23,7 +23,6 @@ import com.spotify.scio.schemas.Schema import org.apache.beam.sdk.values.TupleTag import scala.reflect.macros.{blackbox, whitebox} -import com.spotify.scio.schemas.SchemaMacroHelpers import scala.reflect._ @@ -40,7 +39,7 @@ final case class SCollectionRef[A: Schema](coll: SCollection[A]) extends SqlPara final case class UdfRef(udf: Udf) extends SqlParam -final class SqlInterpolator(private val sc: StringContext) extends AnyVal { +final class SqlInterpolator(private val sc: StringContext) extends TypedSQLInterpolator { private def paramToString( tags: Map[String, (SCollectionRef[_], TupleTag[_])] )(p: SqlParam): String = @@ -73,191 +72,6 @@ final class SqlInterpolator(private val sc: StringContext) extends AnyVal { val q = strings.zipAll(expr, "", "").foldLeft("") { case (a, (x, y)) => s"$a$x $y" } - SQLBuilders.from(q, tags.values.toList, udfs) - } - - @experimental - def tsql(ps: Any*): SQLBuilder = - macro SqlInterpolatorMacro.builder -} - -private trait SqlInterpolatorMacroHelpers { - val ctx: blackbox.Context - import ctx.universe._ - - def partsFromContext: List[Tree] = - ctx.prefix.tree match { - case Apply(_, Apply(_, xs: List[_]) :: Nil) => xs - case tree => - ctx.abort( - ctx.enclosingPosition, - s"Implementation error. Expected tsql string interpolation, found $tree" - ) - } - - def buildSQLString(parts: List[Tree], tags: List[String]): String = { - val ps2 = - parts.map { - case Literal(Constant(s: String)) => s - case tree => - ctx.abort( - ctx.enclosingPosition, - s"Implementation error. Expected Literal(Constant(...)), found $tree" - ) - } - - ps2 - .zipAll(tags, "", "") - .foldLeft("") { case (a, (x, y)) => s"$a$x $y" } - } - - def tagFor(t: Type, lbl: String): Tree = - q"new _root_.org.apache.beam.sdk.values.TupleTag[$t]($lbl)" -} - -object SqlInterpolatorMacro { - - /** This static annotation is used to pass (static) parameters to SqlInterpolatorMacro.expand */ - final class SqlParts(parts: List[String], ps: Any*) extends scala.annotation.StaticAnnotation - - // For some reason this method needs to be a whitebox macro - def builder(c: whitebox.Context)(ps: c.Expr[Any]*): c.Expr[SQLBuilder] = { - val h = new { val ctx: c.type = c } with SqlInterpolatorMacroHelpers - import h._ - import c.universe._ - - val parts = partsFromContext - - val className = TypeName(c.freshName("SQLBuilder")) - val fakeName = TypeName(c.freshName("FakeImpl")) - - // Yo Dawg i herd you like macros... - // - // The following tree generates an anonymous class to lazily expand tsqlImpl. - // - // It basically acts as curryfication of the macro, - // where the interpolated String and it's parameters are partially applied - // while the expected output type (and therefore the expected data schema) stays unapplied. - // Sadly macro do not allow explicit parameter passing so the following code would be illegal - // and the macro expansion would fail with: "term macros cannot override abstract methods" - // def as[B: Schema]: SCollection[B] = - // macro _root_.com.spotify.scio.sql.SqlInterpolatorMacro.expand[B](parts, ps) - // - // We workaround the limitation by using a StaticAnnotation to pass static values, - // as described in: https://stackoverflow.com/a/25219644/2383092 - // - // It is also illegal for a macro to override abstract methods, - // which is why an intermediate class $fakeName is introduced. - // Note that we HAVE TO extend SQLBuilder, otherwise, `tsqlImpl` fails to see the concrete - // type of B, which also makes the macro expansion fails. - val tree = - q""" - { - import _root_.com.spotify.scio.values.SCollection - import _root_.com.spotify.scio.schemas.Schema - import _root_.scala.reflect.ClassTag - - sealed trait $fakeName extends _root_.com.spotify.scio.sql.SQLBuilder { - def as[B: Schema: ClassTag]: SCollection[B] = ??? - } - - final class $className extends $fakeName { - import scala.language.experimental.macros - - @_root_.com.spotify.scio.sql.SqlInterpolatorMacro.SqlParts(List(..$parts),..$ps) - override def as[B: Schema: ClassTag]: SCollection[B] = - macro _root_.com.spotify.scio.sql.SqlInterpolatorMacro.expand[B] - } - new $className - } - """ - - c.Expr[SQLBuilder](tree) - } - - def expand[B: c.WeakTypeTag]( - c: blackbox.Context - )(schB: c.Expr[Schema[B]], classTag: c.Expr[ClassTag[B]]): c.Expr[SCollection[B]] = { - import c.universe._ - - val annotationParams = - c.macroApplication.symbol.annotations - .filter(_.tree.tpe <:< typeOf[SqlParts]) - .flatMap(_.tree.children.tail) - - if (annotationParams.isEmpty) - c.abort(c.enclosingPosition, "Annotation body not provided!") - - val ps: List[c.Expr[Any]] = - annotationParams.tail.map(t => c.Expr[Any](t)) - - val parts = - annotationParams.head match { - case Apply(TypeApply(Select(Select(_, _), TermName("apply")), _), pas) => - pas - case tree => - c.abort( - c.enclosingPosition, - s"Failed to extract SQL parts. Expected List(...), found $tree" - ) - } - - tsqlImpl[B](c)(parts, ps: _*)(classTag) - } - - def tsqlImpl[B: c.WeakTypeTag]( - c: blackbox.Context - )(parts: List[c.Tree], ps: c.Expr[Any]*)( - ct: c.Expr[ClassTag[B]] - ): c.Expr[SCollection[B]] = { - val h = new { val ctx: c.type = c } with SqlInterpolatorMacroHelpers with SchemaMacroHelpers - import h._ - import c.universe._ - - val (ss, other) = - ps.partition(_.actualType.typeSymbol == typeOf[SCollection[Any]].typeSymbol) - - other.headOption.foreach { t => - c.abort( - c.enclosingPosition, - s"tsql interpolation only support arguments of type SCollection. Found $t" - ) - } - - val scs: List[(Tree, Type)] = - ss.map { p => - val a = p.actualType.typeArgs.head - (p.tree, a) - }.toList - - val distinctSCollections = - scs.map { case (tree, t) => - (tree.symbol, (tree, t)) - }.toMap - - def toSCollectionName(s: Tree) = s.symbol.name.encodedName.toString - - distinctSCollections.values.toList match { - case list if list.size <= 10 => - val colls = list.map(_._1) - val types = list.map(_._2) - val tags = list.map(x => tagFor(x._2, toSCollectionName(x._1))) - val sql = buildSQLString(parts, scs.map(x => toSCollectionName(x._1))) - val implOut = inferImplicitSchema[B] - val implIn = types.flatMap(t => Seq(inferImplicitSchema(t), inferClassTag(t))) - - val queryTree = c.parse(s"_root_.com.spotify.scio.sql.Query${types.size}") - val q = q"$queryTree.typed[..${types :+ weakTypeOf[B]}]($sql, ..$tags)" - c.Expr[SCollection[B]](q""" - _root_.com.spotify.scio.sql.Sql - .from(..$colls)(..$implIn) - .queryAs($q)($implOut, $ct)""") - case d => - val ns = d.map(_._1).mkString(", ") - c.abort( - c.enclosingPosition, - s"Joins limited up to 10 SCollections, found ${d.size}: $ns" - ) - } + SQLBuilders.from(q, tags.values.toList.asInstanceOf[List[(SCollectionRef[Any], TupleTag[Any])]], udfs) } } From 455d9740ebdf41c4f370d262a02e70332dcda06a Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Wed, 9 Dec 2020 12:10:13 +0100 Subject: [PATCH 05/56] Make scio-cassandra3 compile --- build.sbt | 4 ++-- .../scala/com/spotify/scio/cassandra/BulkOperations.scala | 2 +- .../main/scala/com/spotify/scio/cassandra/CassandraIO.scala | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/build.sbt b/build.sbt index 1157058b8c..5adaeca239 100644 --- a/build.sbt +++ b/build.sbt @@ -536,7 +536,7 @@ lazy val `scio-test`: Project = project "org.hamcrest" % "hamcrest" % hamcrestVersion, "org.scalactic" %% "scalactic" % "3.2.8", "com.propensive" %% "magnolia" % magnoliaVersion - )map(_.withDottyCompat(scalaVersion.value)), + ).map(_.withDottyCompat(scalaVersion.value)), Test / compileOrder := CompileOrder.JavaThenScala, Test / testGrouping := splitTests( (Test / definedTests).value, @@ -698,7 +698,7 @@ lazy val `scio-cassandra3`: Project = project "com.esotericsoftware" % "kryo-shaded" % kryoVersion, "com.google.guava" % "guava" % guavaVersion, "com.twitter" % "chill-java" % chillVersion - ) + ).map(_.withDottyCompat(scalaVersion.value)) ) .dependsOn( `scio-core`, diff --git a/scio-cassandra/cassandra3/src/main/scala/com/spotify/scio/cassandra/BulkOperations.scala b/scio-cassandra/cassandra3/src/main/scala/com/spotify/scio/cassandra/BulkOperations.scala index 944dae9a82..8cd5d09396 100644 --- a/scio-cassandra/cassandra3/src/main/scala/com/spotify/scio/cassandra/BulkOperations.scala +++ b/scio-cassandra/cassandra3/src/main/scala/com/spotify/scio/cassandra/BulkOperations.scala @@ -97,7 +97,7 @@ private[cassandra] class BulkOperations(val opts: CassandraOptions, val parallel val (q, mod) = (maxToken - minToken + 1) /% numPartitions val rangePerGroup = (if (mod != 0) q + 1 else q).bigInteger - values: Array[ByteString] => { + (values: Array[ByteString]) => { val key = if (config.partitionKeyIndices.length == 1) { values(config.partitionKeyIndices.head).asReadOnlyByteBuffer() } else { diff --git a/scio-cassandra/cassandra3/src/main/scala/com/spotify/scio/cassandra/CassandraIO.scala b/scio-cassandra/cassandra3/src/main/scala/com/spotify/scio/cassandra/CassandraIO.scala index f9d742d302..d3f21532c3 100644 --- a/scio-cassandra/cassandra3/src/main/scala/com/spotify/scio/cassandra/CassandraIO.scala +++ b/scio-cassandra/cassandra3/src/main/scala/com/spotify/scio/cassandra/CassandraIO.scala @@ -57,7 +57,7 @@ object CassandraIO { private[cassandra] val DefaultPar = 0 } - final case class WriteParam[T] private ( + final case class WriteParam[T] private[cassandra] ( outputFn: T => Seq[Any], parallelism: Int = WriteParam.DefaultPar ) From 3f56f373c68f23fd526457985a25d558f6fe095b Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Wed, 9 Dec 2020 12:19:37 +0100 Subject: [PATCH 06/56] Make scio-elasticsearch{6|7} compile --- build.sbt | 6 ++++-- .../com/spotify/scio/elasticsearch/ElasticsearchIO.scala | 4 ++-- .../com/spotify/scio/elasticsearch/ElasticsearchIO.scala | 4 ++-- 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/build.sbt b/build.sbt index 5adaeca239..94fac2e843 100644 --- a/build.sbt +++ b/build.sbt @@ -721,7 +721,8 @@ lazy val `scio-elasticsearch6`: Project = project "org.elasticsearch" % "elasticsearch" % elasticsearch6Version, "org.elasticsearch" % "elasticsearch-x-content" % elasticsearch6Version, "org.elasticsearch.client" % "transport" % elasticsearch6Version - ) + ).map(_.withDottyCompat(scalaVersion.value)), + compileOrder := CompileOrder.JavaThenScala, // required for Scala 3 ) .dependsOn( `scio-core`, @@ -746,7 +747,8 @@ lazy val `scio-elasticsearch7`: Project = project "org.elasticsearch.client" % "elasticsearch-rest-high-level-client" % elasticsearch7Version, "org.apache.httpcomponents" % "httpcore" % httpCoreVersion, "org.elasticsearch" % "elasticsearch" % elasticsearch7Version - ) + ).map(_.withDottyCompat(scalaVersion.value)), + compileOrder := CompileOrder.JavaThenScala, // required for Scala 3 ) .dependsOn( `scio-core`, diff --git a/scio-elasticsearch/es6/src/main/scala/com/spotify/scio/elasticsearch/ElasticsearchIO.scala b/scio-elasticsearch/es6/src/main/scala/com/spotify/scio/elasticsearch/ElasticsearchIO.scala index 8e4be32e48..bb016cf022 100644 --- a/scio-elasticsearch/es6/src/main/scala/com/spotify/scio/elasticsearch/ElasticsearchIO.scala +++ b/scio-elasticsearch/es6/src/main/scala/com/spotify/scio/elasticsearch/ElasticsearchIO.scala @@ -41,7 +41,7 @@ final case class ElasticsearchIO[T](esOptions: ElasticsearchOptions) extends Sci /** Save this SCollection into Elasticsearch. */ override protected def write(data: SCollection[T], params: WriteP): Tap[Nothing] = { - val shards = if (params.numOfShards >= 0) { + val shards: Long = if (params.numOfShards >= 0) { params.numOfShards } else { esOptions.servers.size @@ -89,7 +89,7 @@ object ElasticsearchIO { ) } - final case class WriteParam[T] private ( + final case class WriteParam[T] private[elasticsearch] ( f: T => Iterable[DocWriteRequest[_]], errorFn: BulkExecutionException => Unit = WriteParam.DefaultErrorFn, flushInterval: Duration = WriteParam.DefaultFlushInterval, diff --git a/scio-elasticsearch/es7/src/main/scala/com/spotify/scio/elasticsearch/ElasticsearchIO.scala b/scio-elasticsearch/es7/src/main/scala/com/spotify/scio/elasticsearch/ElasticsearchIO.scala index 22c5076dc8..1af615ef3a 100644 --- a/scio-elasticsearch/es7/src/main/scala/com/spotify/scio/elasticsearch/ElasticsearchIO.scala +++ b/scio-elasticsearch/es7/src/main/scala/com/spotify/scio/elasticsearch/ElasticsearchIO.scala @@ -42,7 +42,7 @@ final case class ElasticsearchIO[T](esOptions: ElasticsearchOptions) extends Sci /** Save this SCollection into Elasticsearch. */ override protected def write(data: SCollection[T], params: WriteP): Tap[Nothing] = { - val shards = if (params.numOfShards >= 0) { + val shards: Long = if (params.numOfShards >= 0) { params.numOfShards } else { esOptions.nodes.size @@ -93,7 +93,7 @@ object ElasticsearchIO { ) } - final case class WriteParam[T] private ( + final case class WriteParam[T] private[elasticsearch]( f: T => Iterable[DocWriteRequest[_]], errorFn: BulkExecutionException => Unit = WriteParam.DefaultErrorFn, flushInterval: Duration = WriteParam.DefaultFlushInterval, From 0f1d1ea273d3cd2e7c7a390747876c4af58645ea Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Wed, 9 Dec 2020 15:32:49 +0100 Subject: [PATCH 07/56] Make scio-redis compile --- build.sbt | 2 +- .../scala/com/spotify/scio/redis/RedisIO.scala | 4 ++-- .../scio/redis/instances/CoderInstances.scala | 17 +++++++++-------- 3 files changed, 12 insertions(+), 11 deletions(-) diff --git a/build.sbt b/build.sbt index 94fac2e843..299500d790 100644 --- a/build.sbt +++ b/build.sbt @@ -1149,7 +1149,7 @@ lazy val `scio-redis`: Project = project "org.apache.beam" % "beam-sdks-java-core" % beamVersion, "org.scalatest" %% "scalatest" % scalatestVersion % Test, "org.apache.beam" % "beam-sdks-java-io-redis" % beamVersion - ) + ).map(_.withDottyCompat(scalaVersion.value)) ) .dependsOn( `scio-core`, diff --git a/scio-redis/src/main/scala/com/spotify/scio/redis/RedisIO.scala b/scio-redis/src/main/scala/com/spotify/scio/redis/RedisIO.scala index 784306e0e0..419a33affb 100644 --- a/scio-redis/src/main/scala/com/spotify/scio/redis/RedisIO.scala +++ b/scio-redis/src/main/scala/com/spotify/scio/redis/RedisIO.scala @@ -98,7 +98,7 @@ object RedisRead { private[redis] val DefaultOutputParallelization: Boolean = true } - final case class ReadParam private ( + final case class ReadParam private[redis] ( batchSize: Int = ReadParam.DefaultBatchSize, outputParallelization: Boolean = ReadParam.DefaultOutputParallelization ) @@ -161,6 +161,6 @@ object RedisWrite { private[redis] val DefaultBatchSize: Int = 1000 } - final case class WriteParam private (batchSize: Int = WriteParam.DefaultBatchSize) + final case class WriteParam private[redis] (batchSize: Int = WriteParam.DefaultBatchSize) } diff --git a/scio-redis/src/main/scala/com/spotify/scio/redis/instances/CoderInstances.scala b/scio-redis/src/main/scala/com/spotify/scio/redis/instances/CoderInstances.scala index 733dc3706d..9127c58c26 100644 --- a/scio-redis/src/main/scala/com/spotify/scio/redis/instances/CoderInstances.scala +++ b/scio-redis/src/main/scala/com/spotify/scio/redis/instances/CoderInstances.scala @@ -21,14 +21,15 @@ import com.spotify.scio.redis.types._ trait CoderInstances { - implicit def appendCoder[T: Coder: RedisType]: Coder[Append[T]] = Coder.gen[Append[T]] - implicit def setCoder[T: Coder: RedisType]: Coder[Set[T]] = Coder.gen[Set[T]] - implicit def incrByCoder[T: Coder: RedisType]: Coder[IncrBy[T]] = Coder.gen[IncrBy[T]] - implicit def decrByCoder[T: Coder: RedisType]: Coder[DecrBy[T]] = Coder.gen[DecrBy[T]] - implicit def sAddCoder[T: Coder: RedisType]: Coder[SAdd[T]] = Coder.gen[SAdd[T]] - implicit def lPushCoder[T: Coder: RedisType]: Coder[LPush[T]] = Coder.gen[LPush[T]] - implicit def rPushCoder[T: Coder: RedisType]: Coder[RPush[T]] = Coder.gen[RPush[T]] - implicit def pfAddCoder[T: Coder: RedisType]: Coder[PFAdd[T]] = Coder.gen[PFAdd[T]] + // TODO: scala3 - workaround https://github.com/lampepfl/dotty/issues/9985 + implicit def appendCoder[T: Coder: RedisType]: Coder[Append[T]] = ??? // Coder.gen[Append[T]] + implicit def setCoder[T: Coder: RedisType]: Coder[Set[T]] = ??? // Coder.gen[Set[T]] + implicit def incrByCoder[T: Coder: RedisType]: Coder[IncrBy[T]] = ??? // Coder.gen[IncrBy[T]] + implicit def decrByCoder[T: Coder: RedisType]: Coder[DecrBy[T]] = ??? // Coder.gen[DecrBy[T]] + implicit def sAddCoder[T: Coder: RedisType]: Coder[SAdd[T]] = ??? // Coder.gen[SAdd[T]] + implicit def lPushCoder[T: Coder: RedisType]: Coder[LPush[T]] = ??? // Coder.gen[LPush[T]] + implicit def rPushCoder[T: Coder: RedisType]: Coder[RPush[T]] = ??? // Coder.gen[RPush[T]] + implicit def pfAddCoder[T: Coder: RedisType]: Coder[PFAdd[T]] = ??? // Coder.gen[PFAdd[T]] private[this] def coders: Map[Int, Coder[_]] = Map( 1 -> appendCoder[String], From 3bc90444b11e6bb2fac4121922913e29ea209e8a Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Thu, 10 Dec 2020 11:17:06 +0100 Subject: [PATCH 08/56] More support for scala3 in build --- build.sbt | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/build.sbt b/build.sbt index 299500d790..00cbbf83e1 100644 --- a/build.sbt +++ b/build.sbt @@ -665,7 +665,8 @@ lazy val `scio-google-cloud-platform`: Project = project "org.scalatestplus" %% "scalatestplus-scalacheck" % scalatestplusVersion % "test,it", "org.slf4j" % "slf4j-api" % slf4jVersion, "org.slf4j" % "slf4j-simple" % slf4jVersion % "test,it" - ) + ).map(_.withDottyCompat(scalaVersion.value)), + compileOrder := CompileOrder.JavaThenScala, // required for Scala 3 ) .dependsOn( `scio-core` % "compile;it->it", @@ -865,7 +866,8 @@ lazy val `scio-parquet`: Project = project "org.apache.parquet" % "parquet-common" % parquetVersion, "org.apache.parquet" % "parquet-hadoop" % parquetVersion, "org.slf4j" % "slf4j-api" % slf4jVersion - ) + ).map(_.withDottyCompat(scalaVersion.value)), + compileOrder := CompileOrder.JavaThenScala ) .dependsOn( `scio-core`, @@ -900,7 +902,11 @@ lazy val `scio-tensorflow`: Project = project "com.spotify" %% "magnolify-tensorflow" % magnolifyVersion % Test, "com.spotify" % "zoltar-core" % zoltarVersion, "org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion - ) + ).map(_.withDottyCompat(scalaVersion.value)), + compileOrder := CompileOrder.JavaThenScala, + scalacOptions ++= { + if (isDotty.value) Seq("-source:3.0-migration") else Nil // Easily fixable + }, ) .dependsOn( `scio-avro`, @@ -1066,7 +1072,7 @@ lazy val `scio-jmh`: Project = project "org.hamcrest" % "hamcrest-core" % hamcrestVersion % "test", "org.hamcrest" % "hamcrest-library" % hamcrestVersion % "test", "org.slf4j" % "slf4j-nop" % slf4jVersion - ), + ).map(_.withDottyCompat(scalaVersion.value)), publish / skip := true ) .dependsOn( @@ -1118,7 +1124,7 @@ lazy val `scio-smb`: Project = project "org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion, "org.slf4j" % "slf4j-api" % slf4jVersion, "com.github.ben-manes.caffeine" % "caffeine" % caffeineVersion % "provided" - ), + ).map(_.withDottyCompat(scalaVersion.value)), javacOptions ++= { (Compile / sourceManaged).value.mkdirs() Seq("-s", (Compile / sourceManaged).value.getAbsolutePath) From 191bf1576e56c31de9a39c8d0e985c45ca7b4467 Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Thu, 10 Dec 2020 11:17:33 +0100 Subject: [PATCH 09/56] Support scala3 in scio-parquet --- .../scala/com/spotify/scio/parquet/avro/ParquetAvroIO.scala | 4 ++-- .../spotify/scio/parquet/tensorflow/ParquetExampleIO.scala | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scio-parquet/src/main/scala/com/spotify/scio/parquet/avro/ParquetAvroIO.scala b/scio-parquet/src/main/scala/com/spotify/scio/parquet/avro/ParquetAvroIO.scala index c4ec56e0aa..acf46e77b5 100644 --- a/scio-parquet/src/main/scala/com/spotify/scio/parquet/avro/ParquetAvroIO.scala +++ b/scio-parquet/src/main/scala/com/spotify/scio/parquet/avro/ParquetAvroIO.scala @@ -169,7 +169,7 @@ object ParquetAvroIO { ReadParam(projectionFn, projection, predicate) } - final case class ReadParam[A: ClassTag, T: ClassTag] private ( + final case class ReadParam[A: ClassTag, T: ClassTag] private[avro] ( projectionFn: A => T, projection: Schema = ReadParam.DefaultProjection, predicate: FilterPredicate = ReadParam.DefaultPredicate, @@ -229,7 +229,7 @@ object ParquetAvroIO { private[avro] val DefaultConfiguration = new Configuration() } - final case class WriteParam private ( + final case class WriteParam private[avro] ( schema: Schema = WriteParam.DefaultSchema, numShards: Int = WriteParam.DefaultNumShards, suffix: String = WriteParam.DefaultSuffix, diff --git a/scio-parquet/src/main/scala/com/spotify/scio/parquet/tensorflow/ParquetExampleIO.scala b/scio-parquet/src/main/scala/com/spotify/scio/parquet/tensorflow/ParquetExampleIO.scala index 33f119c9a1..ba6998506c 100644 --- a/scio-parquet/src/main/scala/com/spotify/scio/parquet/tensorflow/ParquetExampleIO.scala +++ b/scio-parquet/src/main/scala/com/spotify/scio/parquet/tensorflow/ParquetExampleIO.scala @@ -113,7 +113,7 @@ object ParquetExampleIO { private[tensorflow] val DefaultPredicate = null private[tensorflow] val DefaultConfiguration = new Configuration() } - final case class ReadParam private ( + final case class ReadParam private[tensorflow] ( projection: Seq[String] = ReadParam.DefaultProjection, predicate: FilterPredicate = ReadParam.DefaultPredicate, conf: Configuration = ReadParam.DefaultConfiguration @@ -126,7 +126,7 @@ object ParquetExampleIO { private[tensorflow] val DefaultConfiguration = new Configuration() } - final case class WriteParam private ( + final case class WriteParam private[tensorflow] ( schema: Schema, numShards: Int = WriteParam.DefaultNumShards, suffix: String = WriteParam.DefaultSuffix, From e8f2c79719354014d7810ba1f5bd58e91d2d2d86 Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Thu, 10 Dec 2020 11:17:45 +0100 Subject: [PATCH 10/56] Support scala3 in scio-smb --- scio-smb/src/main/scala/com/spotify/scio/smb/package.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scio-smb/src/main/scala/com/spotify/scio/smb/package.scala b/scio-smb/src/main/scala/com/spotify/scio/smb/package.scala index e403e89b40..effaebd350 100644 --- a/scio-smb/src/main/scala/com/spotify/scio/smb/package.scala +++ b/scio-smb/src/main/scala/com/spotify/scio/smb/package.scala @@ -17,6 +17,4 @@ package com.spotify.scio -import com.spotify.scio.smb.syntax.AllSyntax - -package object smb extends AllSyntax {} +package object smb extends com.spotify.scio.smb.syntax.AllSyntax {} From d683ccc3c12357a82ec4a3a84301c7f4d2703432 Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Thu, 10 Dec 2020 11:18:05 +0100 Subject: [PATCH 11/56] Support scala3 in scio-google-cloud-platform --- .../scio/bigquery/types/BigQueryType.scala | 0 .../bigquery/types/ConverterProvider.scala | 0 .../scio/bigquery/types/MacroUtil.scala | 0 .../scio/bigquery/types/SchemaProvider.scala | 0 .../scio/bigquery/types/SchemaUtil.scala | 0 .../scio/bigquery/types/TypeProvider.scala | 0 .../SampleOverrideTypeProvider.scala | 0 .../bigquery/validation/SetProperty.scala | 0 .../scio/bigquery/types/BigQueryType.scala | 448 ++++++++++++++++++ .../bigquery/validation/SetProperty.scala | 38 ++ .../spotify/scio/bigquery/BigQueryIO.scala | 6 +- .../com/spotify/scio/bigquery/taps.scala | 20 +- .../spotify/scio/bigtable/BigTableIO.scala | 9 +- .../pubsub/syntax/SCollectionSyntax.scala | 3 +- .../com/spotify/scio/spanner/SpannerIO.scala | 4 +- 15 files changed, 516 insertions(+), 12 deletions(-) rename scio-google-cloud-platform/src/main/{scala => scala-2}/com/spotify/scio/bigquery/types/BigQueryType.scala (100%) rename scio-google-cloud-platform/src/main/{scala => scala-2}/com/spotify/scio/bigquery/types/ConverterProvider.scala (100%) rename scio-google-cloud-platform/src/main/{scala => scala-2}/com/spotify/scio/bigquery/types/MacroUtil.scala (100%) rename scio-google-cloud-platform/src/main/{scala => scala-2}/com/spotify/scio/bigquery/types/SchemaProvider.scala (100%) rename scio-google-cloud-platform/src/main/{scala => scala-2}/com/spotify/scio/bigquery/types/SchemaUtil.scala (100%) rename scio-google-cloud-platform/src/main/{scala => scala-2}/com/spotify/scio/bigquery/types/TypeProvider.scala (100%) rename scio-google-cloud-platform/src/main/{scala => scala-2}/com/spotify/scio/bigquery/validation/SampleOverrideTypeProvider.scala (100%) rename scio-google-cloud-platform/src/main/{scala => scala-2}/com/spotify/scio/bigquery/validation/SetProperty.scala (100%) create mode 100644 scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/types/BigQueryType.scala create mode 100644 scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/validation/SetProperty.scala diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/BigQueryType.scala b/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/BigQueryType.scala similarity index 100% rename from scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/BigQueryType.scala rename to scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/BigQueryType.scala diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/ConverterProvider.scala b/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/ConverterProvider.scala similarity index 100% rename from scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/ConverterProvider.scala rename to scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/ConverterProvider.scala diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/MacroUtil.scala b/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/MacroUtil.scala similarity index 100% rename from scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/MacroUtil.scala rename to scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/MacroUtil.scala diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/SchemaProvider.scala b/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/SchemaProvider.scala similarity index 100% rename from scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/SchemaProvider.scala rename to scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/SchemaProvider.scala diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/SchemaUtil.scala b/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/SchemaUtil.scala similarity index 100% rename from scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/SchemaUtil.scala rename to scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/SchemaUtil.scala diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/TypeProvider.scala b/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/TypeProvider.scala similarity index 100% rename from scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/TypeProvider.scala rename to scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/TypeProvider.scala diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/validation/SampleOverrideTypeProvider.scala b/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/validation/SampleOverrideTypeProvider.scala similarity index 100% rename from scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/validation/SampleOverrideTypeProvider.scala rename to scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/validation/SampleOverrideTypeProvider.scala diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/validation/SetProperty.scala b/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/validation/SetProperty.scala similarity index 100% rename from scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/validation/SetProperty.scala rename to scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/validation/SetProperty.scala diff --git a/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/types/BigQueryType.scala b/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/types/BigQueryType.scala new file mode 100644 index 0000000000..a5be40a6c3 --- /dev/null +++ b/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/types/BigQueryType.scala @@ -0,0 +1,448 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.bigquery.types + +import com.google.api.services.bigquery.model.{TableRow, TableSchema} +import org.apache.avro.Schema +import org.apache.avro.generic.GenericRecord + +import scala.annotation.{compileTimeOnly, StaticAnnotation} +import scala.reflect.runtime.universe._ +import scala.util.Try + +/** + * Macro annotations and converter generators for BigQuery types. + * + * The following table lists each legacy SQL data type, its standard SQL equivalent and Scala type. + * | Legacy SQL | Standard SQL | Scala type | + * |:-----------|:-------------|:------------------------------------------------| + * | BOOLEAN | BOOL | `Boolean` | + * | INTEGER | INT64 | `Long`, `Int` | + * | FLOAT | FLOAT64 | `Double`, `Float` | + * | STRING | STRING | `String` | + * | NUMERIC | NUMERIC | `BigDecimal` | + * | BYTES | BYTES | `com.google.protobuf.ByteString`, `Array[Byte]` | + * | RECORD | STRUCT | Nested case class | + * | REPEATED | ARRAY | `List[T]` | + * | TIMESTAMP | TIMESTAMP | `org.joda.time.Instant` | + * | DATE | DATE | `org.joda.time.LocalDate` | + * | TIME | TIME | `org.joda.time.LocalTime` | + * | DATETIME | DATETIME | `org.joda.time.LocalDateTime` | + * + * @groupname trait Traits for annotated types + * @groupname annotation Type annotations + * @groupname converters Converters + * @groupname Ungrouped Other Members + */ +object BigQueryType { + + /** + * Trait for companion objects of case classes generated with table. + * @group trait + */ + trait HasTable { + + /** Table for case class schema. */ + def table: String + } + + /** + * Trait for companion objects of case classes generated with table. + * Instance of this trait are provided as implicits allowing static discovery. + * That trait provide evidence that a BQ table is statically known for a given type T. + * @group trait + */ + trait Table[T] extends HasTable + + /** + * Trait for companion objects of case classes generated with schema. + * @group trait + */ + trait HasSchema[T] { + + /** Case class schema. */ + def schema: TableSchema + + /** Case class avro schema. */ + def avroSchema: Schema + + /** Avro [[GenericRecord]] to `T` converter. */ + def fromAvro: GenericRecord => T + + /** `T` to GenericRecord converter. */ + def toAvro: T => GenericRecord + + /** TableRow to `T` converter. */ + def fromTableRow: TableRow => T + + /** `T` to TableRow converter. */ + def toTableRow: T => TableRow + + /** Get a pretty string representation of the schema. */ + def toPrettyString(indent: Int = 0): String + } + + /** + * Trait for companion objects of case classes generated with storage API. + * @group trait + */ + trait HasStorageOptions { + + /** Table for case class schema. */ + def table: String + + /** Selected fields for case class schema. */ + def selectedFields: List[String] + + /** Row restriction for case class schema. */ + def rowRestriction: Option[String] + } + + /** + * Trait for companion objects of case classes generated with storage API. + * Instance of this trait are provided as implicits allowing static discovery. + * That trait provide evidence that a BQ table is statically known for a given type T. + * @group trait + */ + trait StorageOptions[T] extends HasStorageOptions + + /** + * Trait for companion objects of case classes generated with SELECT query. + * @group trait + */ + trait HasQuery { + + /** + * SELECT query for case class schema. + * @return unformatted query + */ + @deprecated("use queryRaw instead", "0.9.0") + def query: String + + /** + * SELECT query for case class schema. + * @return unformatted query + */ + def queryRaw: String + } + + /** + * Trait for companion objects of case classes generated with query. + * Instance of this trait are provided as implicits allowing static discovery. + * That trait provide evidence that a BQ query is statically known for a given type T. + * @group trait + */ + trait Query[T] extends HasQuery + + /** + * Trait for companion objects of case classes generated with table description. + * @group trait + */ + trait HasTableDescription { + + /** Case class table description. */ + def tableDescription: String + } + + /** + * Trait for case classes with generated companion objects. + * @group trait + */ + trait HasAnnotation + + /** + * Macro annotation for a BigQuery table. + * + * Generate case classes for a BigQuery table. Note that `tableSpec` must be a string literal in + * the form of `project:dataset.table` with optional `.stripMargin` at the end. For example: + * + * {{{ + * @BigQueryType.fromTable("project:dataset.table") class MyRecord + * }}} + * + * String formatting syntax can be used in `tableSpec` when additional `args` are supplied. For + * example: + * + * {{{ + * @BigQueryType.fromTable("project:dataset.%s", "table") + * }}} + * + * "\$LATEST" can be used as a placeholder for table partitions. The latest partition available + * will be used. For example: + * + * {{{ + * @BigQueryType.fromTable("project:dataset.table_%s", "\$LATEST") + * }}} + * + * Also generate a companion object with convenience methods. + * @group annotation + */ + @compileTimeOnly( + "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" + ) + class fromTable(tableSpec: String, args: String*) extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? + } + + /** + * Macro annotation for a BigQuery schema. + * + * Generate case classes for a BigQuery schema. Note that `schema` must be a string literal of + * the JSON schema with optional `.stripMargin` at the end. For example: + * + * {{{ + * @BigQueryType.fromSchema( + * """ + * |{ + * | "fields": [ + * | {"mode": "REQUIRED", "name": "f1", "type": "INTEGER"}, + * | {"mode": "REQUIRED", "name": "f2", "type": "FLOAT"}, + * | {"mode": "REQUIRED", "name": "f3", "type": "STRING"}, + * | {"mode": "REQUIRED", "name": "f4", "type": "TIMESTAMP"} + * | ] + * |} + * """.stripMargin) class MyRecord + * }}} + * + * Also generate a companion object with convenience methods. + * @group annotation + */ + @compileTimeOnly( + "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" + ) + class fromSchema(schema: String) extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? + } + + /** + * Macro annotation for a BigQuery table using the storage API. + * + * Generate case classes for BigQuery storage API, including column projection and filtering. + * Note that `tableSpec` must be a string literal in the form of `project:dataset.table` with + * optional `.stripMargin` at the end. For example: + * + * {{{ + * @BigQueryType.fromStorage("project:dataset.table") class MyRecord + * }}} + * + * @param selectedFields names of the fields in the table that should be read. If empty, all + * fields will be read. If the specified field is a nested field, all the + * sub-fields in the field will be selected. + * @param rowRestriction SQL text filtering statement, similar ti a WHERE clause in a query. + * Currently, we support combinations of predicates that are a comparison + * between a column and a constant value in SQL statement. Aggregates are + * not supported. For example: + * + * {{{ + * "a > DATE '2014-09-27' AND (b > 5 AND c LIKE 'date')" + * }}} + * + * String formatting syntax can be used in `tableSpec` when additional `args` are supplied. For + * example: + * + * {{{ + * @BigQueryType.fromStorage("project:dataset.%s", "table") + * }}} + * + * "\$LATEST" can be used as a placeholder for table partitions. The latest partition available + * will be used. For example: + * + * {{{ + * @BigQueryType.fromStorage("project:dataset.table_%s", "\$LATEST") + * }}} + * + * Also generate a companion object with convenience methods. + * @group annotation + */ + @compileTimeOnly( + "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" + ) + class fromStorage( + tableSpec: String, + args: List[Any] = Nil, + selectedFields: List[String] = Nil, + rowRestriction: String = "" + ) extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? + } + + /** + * Macro annotation for a BigQuery SELECT query. + * + * Generate case classes for a BigQuery SELECT query. Note that `query` must be a string + * literal of the SELECT query with optional `.stripMargin` at the end. For example: + * + * {{{ + * @BigQueryType.fromQuery("SELECT field1, field2 FROM [project:dataset.table]") + * }}} + * + * String formatting syntax can be used in `query` when additional `args` are supplied. For + * example: + * + * {{{ + * @BigQueryType.fromQuery("SELECT field1, field2 FROM [%s]", "table") + * }}} + * + * "\$LATEST" can be used as a placeholder for table partitions. The latest common partition + * available for all tables with the placeholder will be used. For example: + * + * {{{ + * @BigQueryType.fromQuery( + * "SELECT field1, field2 FROM [project:d1.t1_%s] JOIN [project:d2.t2_%s] USING field3", + * "\$LATEST", "\$LATEST") + * }}} + * + * Also generate a companion object with convenience methods. + * + * Both [[https://cloud.google.com/bigquery/docs/reference/legacy-sql Legacy SQL]] and + * [[https://cloud.google.com/bigquery/docs/reference/standard-sql/ Standard SQL]] dialects are + * supported. By default the query dialect will be automatically detected. To override this + * behavior, start the query string with `#legacysql` or `#standardsql`. + * @group annotation + */ + @compileTimeOnly( + "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" + ) + class fromQuery(query: String, args: Any*) extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? + } + + /** + * Macro annotation for case classes to be saved to a BigQuery table. + * + * Note that this annotation does not generate case classes, only a companion object with + * convenience methods. You need to define a complete case class for as output record. For + * example: + * + * {{{ + * @BigQueryType.toTable + * case class Result(name: String, score: Double) + * }}} + * @group annotation + */ + @compileTimeOnly( + "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" + ) + class toTable extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? + } + + /** Generate [[org.apache.avro.Schema Schema]] for a case class. */ + def avroSchemaOf[T: TypeTag]: Schema = ??? + + /** Generate [[com.google.api.services.bigquery.model.TableSchema TableSchema]] for a case class. */ + def schemaOf[T: TypeTag]: TableSchema = ??? + + /** + * Generate a converter function from Avro [[GenericRecord]] to the given case class `T`. + * @group converters + */ + def fromAvro[T]: GenericRecord => T = ??? + + /** + * Generate a converter function from the given case class `T` to [[GenericRecord]]. + * @group converters + */ + def toAvro[T]: T => GenericRecord = ??? + + /** + * Generate a converter function from [[TableRow]] to the given case class `T`. + * @group converters + */ + def fromTableRow[T]: TableRow => T = + ??? + + /** + * Generate a converter function from the given case class `T` to [[TableRow]]. + * @group converters + */ + def toTableRow[T]: T => TableRow = ??? + + /** Create a new BigQueryType instance. */ + @inline final def apply[T: TypeTag]: BigQueryType[T] = new BigQueryType[T] +} + +/** + * Type class for case class `T` annotated for BigQuery IO. + * + * This decouples generated fields and methods from macro expansion to keep core macro free. + */ +class BigQueryType[T] { + private[this] val bases = ??? + + private[this] val instance = ??? + + private def getField(key: String) = + ??? + + /** Whether the case class is annotated for a table. */ + def isTable: Boolean = + ??? + + /** Whether the case class is annotated for storage API. */ + def isStorage: Boolean = + ??? + + /** Whether the case class is annotated for a query. */ + def isQuery: Boolean = + ??? + + /** Table reference from the annotation. */ + def table: Option[String] = + ??? + + /** Storage API `selectedFields` from the annotation. */ + def selectedFields: Option[List[String]] = + ??? + + /** Storage API `restriction` from the annotation. */ + def rowRestriction: Option[String] = + ??? + + /** Query from the annotation. */ + def query: Option[String] = + ??? + + /** Table description from the annotation. */ + def tableDescription: Option[String] = + ??? + + /** Avro [[GenericRecord]] to `T` converter. */ + def fromAvro: GenericRecord => T = + ??? + + /** `T` to [[GenericRecord]] converter. */ + def toAvro: T => GenericRecord = + ??? + + /** TableRow to `T` converter. */ + def fromTableRow: TableRow => T = + ??? + + /** `T` to TableRow converter. */ + def toTableRow: T => TableRow = + ??? + + /** TableSchema of `T`. */ + def schema: TableSchema = + ??? + + /** Avro schema of `T`. */ + def avroSchema: Schema = + ??? +} diff --git a/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/validation/SetProperty.scala b/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/validation/SetProperty.scala new file mode 100644 index 0000000000..af81dae6df --- /dev/null +++ b/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/validation/SetProperty.scala @@ -0,0 +1,38 @@ +/* + * Copyright 2019 Spotify AB. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package com.spotify.scio.bigquery.validation + +import scala.annotation.{compileTimeOnly, StaticAnnotation} +import scala.reflect.macros.blackbox + +// This shouldn't be necessary in most production use cases. However passing System properties from +// Intellij can cause issues. The ideal place to set this system property is in your build.sbt file. +private[validation] object SetProperty { + @compileTimeOnly( + "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" + ) + class setProperty extends StaticAnnotation { + def macroTransform(annottees: Any*): Any = ??? + } + + def setSystemProperty(): String = + System.setProperty( + "override.type.provider", + "com.spotify.scio.bigquery.validation.SampleOverrideTypeProvider" + ) +} diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/BigQueryIO.scala b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/BigQueryIO.scala index f56db5bb20..ee2c8125eb 100644 --- a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/BigQueryIO.scala +++ b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/BigQueryIO.scala @@ -148,7 +148,7 @@ object BigQueryTypedSelect { private[bigquery] val DefaultFlattenResults = false } - final case class ReadParam private (flattenResults: Boolean = ReadParam.DefaultFlattenResults) + final case class ReadParam private[bigquery] (flattenResults: Boolean = ReadParam.DefaultFlattenResults) } final case class BigQueryTypedSelect[T: Coder]( @@ -284,7 +284,7 @@ object BigQueryTypedTable { */ def apply[F: Coder](table: Table, format: Format[F]): BigQueryTypedTable[F] = format match { - case Format.GenericRecord => genericRecord(table) + case Format.GenericRecord => genericRecord(table)(Coder[F]) // Not sure how this compiles since F is unbounded case Format.TableRow => tableRow(table) } @@ -465,7 +465,7 @@ object TableRowJsonIO { private[bigquery] val DefaultCompression = Compression.UNCOMPRESSED } - final case class WriteParam private ( + final case class WriteParam private[bigquery] ( numShards: Int = WriteParam.DefaultNumShards, compression: Compression = WriteParam.DefaultCompression ) diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/taps.scala b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/taps.scala index a6b14cf3a1..7996717618 100644 --- a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/taps.scala +++ b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/taps.scala @@ -32,6 +32,8 @@ import scala.reflect.ClassTag import scala.reflect.runtime.universe._ import com.spotify.scio.bigquery.BigQueryTypedTable.Format import com.twitter.chill.Externalizer +import com.twitter.chill.ClosureCleaner +import org.apache.beam.sdk.io.gcp.bigquery.SchemaAndRecord /** Tap for BigQuery TableRow JSON files. */ final case class TableRowJsonTap(path: String) extends Tap[TableRow] { @@ -50,7 +52,19 @@ final case class BigQueryTypedTap[T: Coder](table: Table, fn: (GenericRecord, Ta override def open(sc: ScioContext): SCollection[T] = { val ser = Externalizer(ts) - sc.bigQueryTable(table, Format.GenericRecord).map(gr => fn(gr, ser.get)) + val cFn = ClosureCleaner.clean(fn) + val rFn = ClosureCleaner.clean((sr: SchemaAndRecord) => cFn(sr.getRecord, sr.getTableSchema)) + + // TODO: scala3 - make sure this new implementation actually works + val io = + BigQueryTypedTable( + readerFn = rFn, + writerFn = ???, // will not be used + fn = cFn, + table = table + ) + + sc.read(io) } } @@ -142,7 +156,7 @@ final case class BigQueryTaps(self: Taps) { () => { val selectedFields = readOptions.getSelectedFieldsList.asScala.toList val rowRestriction = Option(readOptions.getRowRestriction) - BigQueryStorage(Table.Ref(table), selectedFields, rowRestriction).tap() + BigQueryStorage(Table.Ref(table), selectedFields, rowRestriction).tap(()) } ) @@ -158,7 +172,7 @@ final case class BigQueryTaps(self: Taps) { val selectedFields = readOptions.getSelectedFieldsList.asScala.toList val rowRestriction = Option(readOptions.getRowRestriction) BigQueryStorage(Table.Ref(table), selectedFields, rowRestriction) - .tap() + .tap(()) .map(fn) } ) diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigtable/BigTableIO.scala b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigtable/BigTableIO.scala index c86af401ea..068bb03a9e 100644 --- a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigtable/BigTableIO.scala +++ b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigtable/BigTableIO.scala @@ -31,6 +31,9 @@ import org.joda.time.Duration import scala.jdk.CollectionConverters._ import com.spotify.scio.io.TapT +import org.apache.beam.sdk.transforms.PTransform +import org.apache.beam.sdk.values.PCollection +import org.apache.beam.sdk.values.PDone sealed trait BigtableIO[T] extends ScioIO[T] { final override val tapT: TapT.Aux[T, Nothing] = EmptyTapOf[T] @@ -94,7 +97,7 @@ object BigtableRead { new ReadParam(Seq(keyRange), rowFilter) } - final case class ReadParam private ( + final case class ReadParam private[bigtable] ( keyRanges: Seq[ByteKeyRange] = ReadParam.DefaultKeyRanges, rowFilter: RowFilter = ReadParam.DefaultRowFilter ) @@ -152,7 +155,7 @@ final case class BigtableWrite[T <: Mutation](bigtableOptions: BigtableOptions, .map { case (key, value) => KV.of(key, value.asJava.asInstanceOf[java.lang.Iterable[Mutation]]) } - .applyInternal(sink) + .applyInternal[PDone](sink.asInstanceOf[PTransform[PCollection[KV[Serializable, java.lang.Iterable[Mutation]]], PDone]]) } EmptyTap } @@ -169,7 +172,7 @@ object BigtableWrite { private[bigtable] val DefaultFlushInterval = Duration.standardSeconds(1) } - final case class Bulk private ( + final case class Bulk private[bigtable] ( numOfShards: Int, flushInterval: Duration = Bulk.DefaultFlushInterval ) extends WriteParam diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/pubsub/syntax/SCollectionSyntax.scala b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/pubsub/syntax/SCollectionSyntax.scala index 50a1ed588d..7f80d6c0c7 100644 --- a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/pubsub/syntax/SCollectionSyntax.scala +++ b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/pubsub/syntax/SCollectionSyntax.scala @@ -22,6 +22,7 @@ import com.spotify.scio.coders.BeamCoders import com.spotify.scio.pubsub.PubsubIO import com.spotify.scio.io.ClosedTap import scala.reflect.ClassTag +import com.spotify.scio.coders.Coder trait SCollectionSyntax { implicit class SCollectionPubsubOps[T](private val coll: SCollection[T]) { @@ -80,7 +81,7 @@ trait SCollectionSyntax { maxBatchSize: Option[Int] = None, maxBatchBytesSize: Option[Int] = None )(implicit ev: T <:< (V, Map[String, String])): ClosedTap[Nothing] = { - implicit val vCoder = BeamCoders.getTupleCoders(coll.covary_[(V, Map[String, String])])._1 + implicit val vCoder: Coder[V] = BeamCoders.getTupleCoders(coll.covary_[(V, Map[String, String])])._1 val io = PubsubIO.withAttributes[V](topic, idAttribute, timestampAttribute) coll .covary_[(V, Map[String, String])] diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/spanner/SpannerIO.scala b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/spanner/SpannerIO.scala index 718fc9a5f5..301b54c917 100644 --- a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/spanner/SpannerIO.scala +++ b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/spanner/SpannerIO.scala @@ -46,7 +46,7 @@ object SpannerRead { final case class FromTable(tableName: String, columns: Seq[String]) extends ReadMethod final case class FromQuery(query: String) extends ReadMethod - final case class ReadParam private ( + final case class ReadParam private[spanner] ( readMethod: ReadMethod, withTransaction: Boolean = ReadParam.DefaultWithTransaction, withBatching: Boolean = ReadParam.DefaultWithBatching @@ -90,7 +90,7 @@ object SpannerWrite { private[spanner] val DefaultBatchSizeBytes = 1024L * 1024L } - final case class WriteParam private ( + final case class WriteParam private[spanner] ( failureMode: FailureMode = WriteParam.DefaultFailureMode, batchSizeBytes: Long = WriteParam.DefaultBatchSizeBytes ) From a195df4d71ffe859a392b9ed29ab4444d922ad75 Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Thu, 10 Dec 2020 11:18:22 +0100 Subject: [PATCH 12/56] Support scala3 in scio-tensorflow --- .../main/scala/com/spotify/scio/tensorflow/TFRecordIO.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scio-tensorflow/src/main/scala/com/spotify/scio/tensorflow/TFRecordIO.scala b/scio-tensorflow/src/main/scala/com/spotify/scio/tensorflow/TFRecordIO.scala index 0207e95d9e..0a2d617f28 100644 --- a/scio-tensorflow/src/main/scala/com/spotify/scio/tensorflow/TFRecordIO.scala +++ b/scio-tensorflow/src/main/scala/com/spotify/scio/tensorflow/TFRecordIO.scala @@ -49,7 +49,7 @@ object TFRecordIO { private[tensorflow] val DefaultCompression = Compression.AUTO } - final case class ReadParam private (compression: Compression = ReadParam.DefaultCompression) + final case class ReadParam private[tensorflow] (compression: Compression = ReadParam.DefaultCompression) object WriteParam { private[tensorflow] val DefaultSuffix = ".tfrecords" @@ -57,7 +57,7 @@ object TFRecordIO { private[tensorflow] val DefaultNumShards = 0 } - final case class WriteParam private ( + final case class WriteParam private[tensorflow] ( suffix: String = WriteParam.DefaultSuffix, compression: Compression = WriteParam.DefaultCompression, numShards: Int = WriteParam.DefaultNumShards From 20bf67c2c2f2329665ad05215253e795db588abc Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Thu, 10 Dec 2020 11:18:37 +0100 Subject: [PATCH 13/56] fallback coder ? --- .../main/scala-2/com/spotify/scio/coders/FallbackCoder.scala | 1 + .../main/scala-3/com/spotify/scio/coders/FallbackCoder.scala | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/scio-core/src/main/scala-2/com/spotify/scio/coders/FallbackCoder.scala b/scio-core/src/main/scala-2/com/spotify/scio/coders/FallbackCoder.scala index bd89032a00..1cbb08520c 100644 --- a/scio-core/src/main/scala-2/com/spotify/scio/coders/FallbackCoder.scala +++ b/scio-core/src/main/scala-2/com/spotify/scio/coders/FallbackCoder.scala @@ -1,6 +1,7 @@ package com.spotify.scio.coders trait FallbackCoder { + // TODO: scala3 - Make that the fallback implicit def fallback[T](implicit lp: shapeless.LowPriority): Coder[T] = macro CoderMacros.issueFallbackWarning[T] } diff --git a/scio-core/src/main/scala-3/com/spotify/scio/coders/FallbackCoder.scala b/scio-core/src/main/scala-3/com/spotify/scio/coders/FallbackCoder.scala index 4ba29dbf2e..f3e94a35a8 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/coders/FallbackCoder.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/coders/FallbackCoder.scala @@ -4,6 +4,6 @@ import scala.reflect.ClassTag import com.spotify.scio.coders.macros.FallbackCoderMacros trait FallbackCoder { - inline def fallback[T](implicit lp: shapeless.LowPriority): Coder[T] = + inline def fallback[T]: Coder[T] = ${ FallbackCoderMacros.issueFallbackWarning[T] } } From 628bfbeeb948f3b824dd2bc3fb480cbea9a4b28b Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Thu, 10 Dec 2020 11:34:04 +0100 Subject: [PATCH 14/56] Annotate Scala3 work to be done --- .../scala-3/com/spotify/scio/avro/types/AvroTypeMacros.scala | 1 + .../scala-3/com/spotify/scio/bigquery/types/BigQueryType.scala | 1 + .../com/spotify/scio/bigquery/validation/SetProperty.scala | 2 +- .../scala-3/com/spotify/scio/sql/TypedSQLInterpolator.scala | 1 + 4 files changed, 4 insertions(+), 1 deletion(-) diff --git a/scio-avro/src/main/scala-3/com/spotify/scio/avro/types/AvroTypeMacros.scala b/scio-avro/src/main/scala-3/com/spotify/scio/avro/types/AvroTypeMacros.scala index 682fba6521..26ad0bd999 100644 --- a/scio-avro/src/main/scala-3/com/spotify/scio/avro/types/AvroTypeMacros.scala +++ b/scio-avro/src/main/scala-3/com/spotify/scio/avro/types/AvroTypeMacros.scala @@ -47,6 +47,7 @@ import scala.reflect.runtime.universe._ * @groupname converters Converters * @groupname Ungrouped Other Members */ +// TODO: scala3 trait AvroTypeMacros { /** diff --git a/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/types/BigQueryType.scala b/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/types/BigQueryType.scala index a5be40a6c3..0340413d8c 100644 --- a/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/types/BigQueryType.scala +++ b/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/types/BigQueryType.scala @@ -49,6 +49,7 @@ import scala.util.Try * @groupname converters Converters * @groupname Ungrouped Other Members */ +// TODO: scala3 object BigQueryType { /** diff --git a/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/validation/SetProperty.scala b/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/validation/SetProperty.scala index af81dae6df..c938f8dd39 100644 --- a/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/validation/SetProperty.scala +++ b/scio-google-cloud-platform/src/main/scala-3/com/spotify/scio/bigquery/validation/SetProperty.scala @@ -27,7 +27,7 @@ private[validation] object SetProperty { "enable macro paradise (2.12) or -Ymacro-annotations (2.13) to expand macro annotations" ) class setProperty extends StaticAnnotation { - def macroTransform(annottees: Any*): Any = ??? + def macroTransform(annottees: Any*): Any = ??? // TODO: scala3 } def setSystemProperty(): String = diff --git a/scio-sql/src/main/scala-3/com/spotify/scio/sql/TypedSQLInterpolator.scala b/scio-sql/src/main/scala-3/com/spotify/scio/sql/TypedSQLInterpolator.scala index cab4f2f697..e5ab1f81e0 100644 --- a/scio-sql/src/main/scala-3/com/spotify/scio/sql/TypedSQLInterpolator.scala +++ b/scio-sql/src/main/scala-3/com/spotify/scio/sql/TypedSQLInterpolator.scala @@ -3,6 +3,7 @@ package com.spotify.scio.sql import com.spotify.scio.annotations.experimental trait TypedSQLInterpolator { + // TODO: scala3 support @experimental def tsql(ps: Any*): SQLBuilder = ??? } From 835bb5dedd52e00ac169551fc775967ed4c7667b Mon Sep 17 00:00:00 2001 From: Julien Tournay Date: Thu, 10 Dec 2020 12:10:52 +0100 Subject: [PATCH 15/56] bigquery/types/SchemaUtil.scala Scala 3 --- .../scio/bigquery/types/TypeProvider.scala | 21 ---------------- .../scio/bigquery/types/NameProvider.scala | 24 +++++++++++++++++++ .../scio/bigquery/types/SchemaUtil.scala | 0 3 files changed, 24 insertions(+), 21 deletions(-) create mode 100644 scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/NameProvider.scala rename scio-google-cloud-platform/src/main/{scala-2 => scala}/com/spotify/scio/bigquery/types/SchemaUtil.scala (100%) diff --git a/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/TypeProvider.scala b/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/TypeProvider.scala index 643deb2943..c45be4ad4c 100644 --- a/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/TypeProvider.scala +++ b/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/TypeProvider.scala @@ -579,24 +579,3 @@ private[types] object TypeProvider { Files.asCharSink(genSrcFile, Charsets.UTF_8).write(prettyCode) } } - -private[types] object NameProvider { - private val m = MMap.empty[String, Int].withDefaultValue(0) - - /** - * Generate a unique name for a nested record. - * This is necessary since we create case classes for nested records and name them with their - * field names. - */ - def getUniqueName(name: String): String = m.synchronized { - val cName = toPascalCase(name) + '$' - m(cName) += 1 - cName + m(cName) - } - - private def toPascalCase(s: String): String = - s.split('_') - .filter(_.nonEmpty) - .map(t => s"${t(0).toUpper}${t.drop(1).toLowerCase}") - .mkString("") -} diff --git a/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/NameProvider.scala b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/NameProvider.scala new file mode 100644 index 0000000000..6f4c8ccba9 --- /dev/null +++ b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/NameProvider.scala @@ -0,0 +1,24 @@ +package com.spotify.scio.bigquery.types + +import scala.collection.mutable.{Map => MMap} + +private[types] object NameProvider { + private val m = MMap.empty[String, Int].withDefaultValue(0) + + /** + * Generate a unique name for a nested record. + * This is necessary since we create case classes for nested records and name them with their + * field names. + */ + def getUniqueName(name: String): String = m.synchronized { + val cName = toPascalCase(name) + '$' + m(cName) += 1 + cName + m(cName) + } + + private def toPascalCase(s: String): String = + s.split('_') + .filter(_.nonEmpty) + .map(t => s"${t(0).toUpper}${t.drop(1).toLowerCase}") + .mkString("") +} diff --git a/scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/SchemaUtil.scala b/scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/SchemaUtil.scala similarity index 100% rename from scio-google-cloud-platform/src/main/scala-2/com/spotify/scio/bigquery/types/SchemaUtil.scala rename to scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/types/SchemaUtil.scala From adc7ad3b6a453f3d3fbf0808c343fcf4d3d9405d Mon Sep 17 00:00:00 2001 From: Maxime Kjaer Date: Thu, 11 Mar 2021 16:01:42 +0100 Subject: [PATCH 16/56] Set up CI for Scala 3 migration (#1) * Temporarily silence other jobs * Refactor scala3 version into a variable * Create ci job testing and compiling each subproject * Disable fail-fast * Use RC1 syntax for givens * Fix isJavaBean * Fix shadowing of BitSetCoder by private class * Remove call to showAnsiColored which does not exist anymore * Fix ambiguous resolution of implicit * Disable subprojects that don't build * Disable scio-core Co-authored-by: vincenzobaz --- .github/workflows/ci-it.yml | 2 +- .github/workflows/ci.yml | 2 +- .github/workflows/deploy.yml | 4 +- .github/workflows/migration.yml | 58 +++++++++++++++++++ .github/workflows/release-drafter.yml | 5 +- .github/workflows/release.yml | 5 +- build.sbt | 5 +- project/plugins.sbt | 2 +- .../coders/macros/FallbackCoderMacros.scala | 2 +- .../scio/coders/instances/ScalaCoders.scala | 4 +- .../scio/schemas/SchemaMaterializer.scala | 4 +- .../com/spotify/scio/DerivationUtils.scala | 4 +- .../scala-3/com/spotify/scio/IsJava.scala | 25 ++++---- 13 files changed, 88 insertions(+), 34 deletions(-) create mode 100644 .github/workflows/migration.yml diff --git a/.github/workflows/ci-it.yml b/.github/workflows/ci-it.yml index d288916e32..2feee496c5 100644 --- a/.github/workflows/ci-it.yml +++ b/.github/workflows/ci-it.yml @@ -5,7 +5,7 @@ env: GOOGLE_APPLICATION_CREDENTIALS: scripts/gha-it.json GOOGLE_PROJECT_ID: data-integration-test -on: push +on: release jobs: it-test: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0760e5ac97..4b3da5b5b0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,6 @@ name: ci -on: [push, pull_request] +on: release jobs: checks: diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index c1145cdb95..94766adc68 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -5,9 +5,7 @@ env: GOOGLE_APPLICATION_CREDENTIALS: scripts/gha-it.json GOOGLE_PROJECT_ID: data-integration-test -on: - push: - tags: ["*"] +on: release jobs: publish-repl: diff --git a/.github/workflows/migration.yml b/.github/workflows/migration.yml new file mode 100644 index 0000000000..bbef522eb4 --- /dev/null +++ b/.github/workflows/migration.yml @@ -0,0 +1,58 @@ +name: Migration +on: push + +jobs: + test: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + task: + # - scio-sql/compile + # - scio-sql/test + # - scio-parquet/compile + # - scio-parquet/test + # - scio-elasticsearch7/compile + # - scio-elasticsearch7/test + # - scio-repl/compile + # - scio-repl/test + # - scio-jmh/compile + # - scio-jmh/test + # - scio-jdbc/compile + # - scio-jdbc/test + # - scio-extra/compile + # - scio-extra/test + # - scio-core/compile + # - scio-core/test + # - scio-examples/compile + # - scio-examples/test + # - scio-redis/compile + # - scio-redis/test + # - scio-schemas/compile + # - scio-schemas/test + # - scio-cassandra3/compile + # - scio-cassandra3/test + # - scio-tensorflow/compile + # - scio-tensorflow/test + - scio-macros/compile + - scio-macros/test + # - scio-test/compile + # - scio-test/test + # - scio-avro/compile + # - scio-avro/test + # - scio-google-cloud-platform/compile + # - scio-google-cloud-platform/test + # - scio-elasticsearch6/compile + # - scio-elasticsearch6/test + # - scio-smb/compile + # - scio-smb/test + # - steps/compile + # - steps/test + steps: + - uses: actions/checkout@v2.3.4 + - name: cache SBT + uses: coursier/cache-action@v5 + - name: java 8 setup + uses: olafurpg/setup-scala@v10 + - name: Compile + run: sbt "++3.0.0-RC1;${{ matrix.task }}" diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 896edaf4a3..d0134c4419 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -1,9 +1,6 @@ name: Release Drafter -on: - push: - branches: - - main +on: release jobs: update_release_draft: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2d6f5fbfbf..1b796d956b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,8 +1,5 @@ name: release -on: - push: - branches: [main] - tags: ["*"] +on: release jobs: publish: runs-on: ubuntu-latest diff --git a/build.sbt b/build.sbt index 00cbbf83e1..f0c3f7d5f1 100644 --- a/build.sbt +++ b/build.sbt @@ -25,6 +25,7 @@ import de.heikoseeberger.sbtheader.CommentCreator ThisBuild / turbo := true +val scala3Version = "3.0.0-RC1" val algebirdVersion = "0.13.7" val algebraVersion = "2.2.2" val annoy4sVersion = "0.10.0" @@ -143,7 +144,7 @@ val commonSettings = Def headerLicense := Some(HeaderLicense.ALv2("2020", "Spotify AB")), headerMappings := headerMappings.value + (HeaderFileType.scala -> keepExistingHeader, HeaderFileType.java -> keepExistingHeader), scalaVersion := "2.13.5", - crossScalaVersions := Seq("2.12.12", scalaVersion.value, "3.0.0-M2"), + crossScalaVersions := Seq("2.12.12", scalaVersion.value, scala3Version), scalacOptions ++= Scalac.commonsOptions.value, Compile / doc / scalacOptions := Scalac.docOptions.value, javacOptions ++= Seq("-source", "1.8", "-target", "1.8", "-Xlint:unchecked"), @@ -575,7 +576,7 @@ lazy val `scio-macros`: Project = project else Nil }, // Scala3 setting - crossScalaVersions += "3.0.0-M2" + crossScalaVersions += scala3Version ) lazy val `scio-avro`: Project = project diff --git a/project/plugins.sbt b/project/plugins.sbt index 84aadb0356..cb0a21c56e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -20,7 +20,7 @@ addSbtPlugin("com.thoughtworks.sbt-api-mappings" % "sbt-api-mappings" % "3.0.0") addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") -addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.4.4") +addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.5.3") libraryDependencies ++= Seq( "com.github.os72" % "protoc-jar" % "3.11.4", diff --git a/scio-core/src/main/scala-3/com/spotify/scio/coders/macros/FallbackCoderMacros.scala b/scio-core/src/main/scala-3/com/spotify/scio/coders/macros/FallbackCoderMacros.scala index 180b4e0cbb..e294fa0815 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/coders/macros/FallbackCoderMacros.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/coders/macros/FallbackCoderMacros.scala @@ -31,7 +31,7 @@ object FallbackCoderMacros { // val show = MacroSettings.showCoderFallback(c) == FeatureFlag.Enable val show = true - val fullTypeColored = Type.showAnsiColored[T] + val fullTypeColored = Type.show[T] val fullType = Type.show[T] val typeName: String = fullType.split('.').last // TODO: Type.showShort[T] ? diff --git a/scio-core/src/main/scala/com/spotify/scio/coders/instances/ScalaCoders.scala b/scio-core/src/main/scala/com/spotify/scio/coders/instances/ScalaCoders.scala index 9a373ea3d9..04c2ef785c 100644 --- a/scio-core/src/main/scala/com/spotify/scio/coders/instances/ScalaCoders.scala +++ b/scio-core/src/main/scala/com/spotify/scio/coders/instances/ScalaCoders.scala @@ -234,7 +234,7 @@ private class SortedSetCoder[T: Ordering](bc: BCoder[T]) extends SeqLikeCoder[So decode(inStream, SortedSet.newBuilder[T]) } -private class BitSetCoder extends AtomicCoder[BitSet] { +private class BitSetCoderInternal extends AtomicCoder[BitSet] { private[this] val lc = VarIntCoder.of() def decode(in: InputStream): BitSet = { @@ -452,7 +452,7 @@ trait ScalaCoders { implicit def noneCoder: Coder[None.type] = optionCoder[Nothing, Option](nothingCoder).asInstanceOf[Coder[None.type]] - implicit def bitSetCoder: Coder[BitSet] = Coder.beam(new BitSetCoder) + implicit def bitSetCoder: Coder[BitSet] = Coder.beam(new BitSetCoderInternal) implicit def seqCoder[T: Coder]: Coder[Seq[T]] = Coder.transform(Coder[T])(bc => Coder.beam(new SeqCoder[T](bc))) diff --git a/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala b/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala index 24d9da116a..69183e32da 100644 --- a/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala +++ b/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala @@ -198,7 +198,7 @@ object SchemaMaterializer { (bschema, toRow, fromRow) case _ => implicit val imp = schema - val (bschema, to, from) = materialize(implicitly[Schema[ScalarWrapper[T]]]) + val (bschema, to, from) = materialize(ScalarWrapper.schemaScalarWrapper[T]) def fromRow = new SerializableFunction[Row, T] { @@ -219,6 +219,6 @@ object SchemaMaterializer { case s @ (_: Record[T] | _: RawRecord[T]) => SchemaMaterializer.fieldType(s).getRowSchema case _ => - SchemaMaterializer.fieldType(Schema[ScalarWrapper[T]]).getRowSchema + SchemaMaterializer.fieldType(Schema[ScalarWrapper[T]](ScalarWrapper.schemaScalarWrapper[T])).getRowSchema } } diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala b/scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala index 8562727360..08294ac91d 100644 --- a/scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala +++ b/scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala @@ -21,13 +21,13 @@ import scala.compiletime._ import scala.deriving._ object DerivationUtils { - inline given mirrorFields[Fields <: Tuple] as List[String] = + inline given mirrorFields[Fields <: Tuple]: List[String] = inline erasedValue[Fields] match { case _: (field *: fields) => constValue[field].toString :: mirrorFields[fields] case _ => Nil } - inline given summonAllF[F[_], T <: Tuple] as Widen[T] = { + inline given summonAllF[F[_], T <: Tuple]: Widen[T] = { val res = inline erasedValue[T] match { case _: EmptyTuple => EmptyTuple diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala index a7d6ba4633..744a3a678c 100644 --- a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala +++ b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala @@ -28,7 +28,7 @@ object IsJavaBean { private def checkGetterAndSetters(using q: Quotes)(sym: q.reflect.Symbol): Unit = { import q.reflect._ - val methods: List[Symbol] = sym.classMethods + val methods: List[Symbol] = sym.declaredMethods val getters = methods.collect { @@ -59,15 +59,18 @@ object IsJavaBean { report.throwError(mess) } - val resType = info.returnTpt.tpe - val paramType = setter.paramss.head.head.tpt.tpe - - if (resType != paramType) { - val mess = - s"""JavaBean contained setter for field $name that had a mismatching type. - | found: $paramType - | expected: $resType""".stripMargin - report.throwError(mess) + val resType: TypeRepr = info.returnTpt.tpe + setter.paramss.head match { + case TypeParamClause(params: List[TypeDef]) => report.throwError(s"JavaBean setter for field $name has type parameters") + case TermParamClause(head :: _) => + val tpe = head.tpt.tpe + if (resType != tpe) { + val mess = + s"""JavaBean contained setter for field $name that had a mismatching type. + | found: $tpe + | expected: $resType""".stripMargin + report.throwError(mess) + } } } } @@ -80,7 +83,7 @@ object IsJavaBean { '{new IsJavaBean[T]{}} } - inline given isJavaBean[T] as IsJavaBean[T] = { + inline given isJavaBean[T]: IsJavaBean[T] = { ${ isJavaBeanImpl[T] } } From a8f4c3c8f249560e2fb12cf85f22321cf3cb58f2 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 29 Mar 2021 11:25:51 +0200 Subject: [PATCH 17/56] Fix Scala 3 compilation --- .../scala/com/spotify/scio/coders/instances/JavaCoders.scala | 2 +- scio-core/src/main/scala/com/spotify/scio/io/Taps.scala | 2 +- .../spotify/scio/io/dynamic/syntax/SCollecionSyntax.scala | 2 +- scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala | 5 ++--- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/scio-core/src/main/scala/com/spotify/scio/coders/instances/JavaCoders.scala b/scio-core/src/main/scala/com/spotify/scio/coders/instances/JavaCoders.scala index e712c86b60..074be83587 100644 --- a/scio-core/src/main/scala/com/spotify/scio/coders/instances/JavaCoders.scala +++ b/scio-core/src/main/scala/com/spotify/scio/coders/instances/JavaCoders.scala @@ -140,7 +140,7 @@ trait JavaCoders extends JavaBeanCoders { Coder.xmap(jInstantCoder)(java.sql.Timestamp.from, _.toInstant()) implicit def coderJEnum[E <: java.lang.Enum[E]: ClassTag]: Coder[E] = - Coder.xmap(Coder[String])( + Coder.xmap(Coder.stringCoder)( value => java.lang.Enum.valueOf(ScioUtil.classOf[E], value), _.name ) diff --git a/scio-core/src/main/scala/com/spotify/scio/io/Taps.scala b/scio-core/src/main/scala/com/spotify/scio/io/Taps.scala index 451636df80..130a4b217d 100644 --- a/scio-core/src/main/scala/com/spotify/scio/io/Taps.scala +++ b/scio-core/src/main/scala/com/spotify/scio/io/Taps.scala @@ -122,7 +122,7 @@ final private class PollingTaps(private[this] val backOff: BackOff) extends Taps } /** Companion object for [[Taps]]. */ -object Taps extends { +object Taps { import TapsSysProps._ /** Default taps algorithm. */ diff --git a/scio-core/src/main/scala/com/spotify/scio/io/dynamic/syntax/SCollecionSyntax.scala b/scio-core/src/main/scala/com/spotify/scio/io/dynamic/syntax/SCollecionSyntax.scala index 94411dc4fc..34cea8f0e4 100644 --- a/scio-core/src/main/scala/com/spotify/scio/io/dynamic/syntax/SCollecionSyntax.scala +++ b/scio-core/src/main/scala/com/spotify/scio/io/dynamic/syntax/SCollecionSyntax.scala @@ -48,7 +48,7 @@ object DynamicSCollectionOps { .withNumShards(numShards) .by(Functions.serializableFn(destinationFn)) .withDestinationCoder(StringUtf8Coder.of()) - .withNaming(Functions.serializableFn { destination: String => + .withNaming(Functions.serializableFn { (destination: String) => FileIO.Write.defaultNaming(s"$destination/part", suffix) }) } diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala index 744a3a678c..6b475f7627 100644 --- a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala +++ b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala @@ -77,9 +77,8 @@ object IsJavaBean { private def isJavaBeanImpl[T](using Quotes, Type[T]): Expr[IsJavaBean[T]] = { import quotes.reflect._ - val sym = TypeTree.of[T].symbol - // TODO: scala3 - check if symbol is a Java class ? - checkGetterAndSetters(sym) + val sym = TypeRepr.of[T].typeSymbol + if sym.flags.is(Flags.JavaDefined) then checkGetterAndSetters(sym) '{new IsJavaBean[T]{}} } From b99348cdb0a0226dc382edc9fbc1e50132f25535 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Wed, 31 Mar 2021 11:03:24 +0200 Subject: [PATCH 18/56] Enable core compilation --- .github/workflows/migration.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/migration.yml b/.github/workflows/migration.yml index bbef522eb4..cb3ed1ecce 100644 --- a/.github/workflows/migration.yml +++ b/.github/workflows/migration.yml @@ -22,7 +22,7 @@ jobs: # - scio-jdbc/test # - scio-extra/compile # - scio-extra/test - # - scio-core/compile + - scio-core/compile # - scio-core/test # - scio-examples/compile # - scio-examples/test From 1d7a02fd997035d68d975af93b6a9f8a41e891f7 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Tue, 6 Apr 2021 17:10:32 +0200 Subject: [PATCH 19/56] Upgrade sbt --- project/plugins.sbt | 1 - 1 file changed, 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index cb0a21c56e..5ffb63776d 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -20,7 +20,6 @@ addSbtPlugin("com.thoughtworks.sbt-api-mappings" % "sbt-api-mappings" % "3.0.0") addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1") addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.6.0") addSbtPlugin("com.geirsson" % "sbt-ci-release" % "1.5.7") -addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.5.3") libraryDependencies ++= Seq( "com.github.os72" % "protoc-jar" % "3.11.4", From 09dc7e77537c8e5395a72f86799b63fd1049b22d Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Tue, 6 Apr 2021 17:10:39 +0200 Subject: [PATCH 20/56] Upgrade scala --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index f0c3f7d5f1..96dd66ea1d 100644 --- a/build.sbt +++ b/build.sbt @@ -25,7 +25,7 @@ import de.heikoseeberger.sbtheader.CommentCreator ThisBuild / turbo := true -val scala3Version = "3.0.0-RC1" +val scala3Version = "3.0.0-RC2" val algebirdVersion = "0.13.7" val algebraVersion = "2.2.2" val annoy4sVersion = "0.10.0" From dacbcbc0f7363f8144e704208b93a2c6325346ff Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Wed, 7 Apr 2021 10:55:11 +0200 Subject: [PATCH 21/56] Use another form of type annotation to keep both 2.13 and 3 versions happy --- .../scala/com/spotify/scio/schemas/SchemaMaterializer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala b/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala index 69183e32da..0d460ebd13 100644 --- a/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala +++ b/scio-core/src/main/scala/com/spotify/scio/schemas/SchemaMaterializer.scala @@ -150,7 +150,7 @@ object SchemaMaterializer { schema .toList(v) .asScala - .map[schema.schema.Repr](dispatchEncode(schema.schema, fieldType.getCollectionElementType)) + .map(dispatchEncode(schema.schema, fieldType.getCollectionElementType): A => schema.schema.Repr) .asJava private def encode[F[_, _], A, B](schema: MapType[F, A, B], fieldType: BFieldType)( From e3ce7c7ffa54f005f630de08f3ee779b1e1708f6 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Wed, 7 Apr 2021 10:55:36 +0200 Subject: [PATCH 22/56] Remove widen https://github.com/lampepfl/dotty/pull/11569 --- .../src/main/scala-3/com/spotify/scio/DerivationUtils.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala b/scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala index 08294ac91d..f7dc1eb80e 100644 --- a/scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala +++ b/scio-macros/src/main/scala-3/com/spotify/scio/DerivationUtils.scala @@ -27,12 +27,12 @@ object DerivationUtils { case _ => Nil } - inline given summonAllF[F[_], T <: Tuple]: Widen[T] = { + inline given summonAllF[F[_], T <: Tuple]: T = { val res = inline erasedValue[T] match { case _: EmptyTuple => EmptyTuple case _: (t *: ts) => summonInline[F[t]] *: summonAllF[F, ts] } - res.asInstanceOf[Widen[T]] + res.asInstanceOf[T] } } From 285640b042b1a0a7025fe6019b5faa82206b09e3 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Wed, 7 Apr 2021 10:56:33 +0200 Subject: [PATCH 23/56] Adapt slash syntax everywhere and start translating withDottyCompat to new forms --- build.sbt | 312 +++++++++++++++++++++++++++++++++--------------------- 1 file changed, 191 insertions(+), 121 deletions(-) diff --git a/build.sbt b/build.sbt index 96dd66ea1d..6213661bb0 100644 --- a/build.sbt +++ b/build.sbt @@ -276,7 +276,7 @@ lazy val assemblySettings = Seq( lazy val macroSettings = Def.settings( libraryDependencies ++= { - if (!isDotty.value) + if (!scalaVersion.value.startsWith("3")) Seq("org.scala-lang" % "scala-reflect" % scalaVersion.value) else Nil }, @@ -397,12 +397,10 @@ lazy val `scio-core`: Project = project (ThisBuild / baseDirectory).value / "build.sbt", (ThisBuild / baseDirectory).value / "version.sbt" ), + // Java dependencies libraryDependencies ++= Seq( "com.esotericsoftware" % "kryo-shaded" % kryoVersion, "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion, - "com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion, - "com.github.alexarchambault" %% "case-app" % caseappVersion, - "com.github.alexarchambault" %% "case-app-annotations" % caseappVersion, "com.github.ben-manes.caffeine" % "caffeine" % caffeineVersion % "provided", "com.google.api-client" % "google-api-client" % googleClientsVersion, "com.google.apis" % "google-api-services-dataflow" % googleApiServicesDataflow, @@ -413,9 +411,6 @@ lazy val `scio-core`: Project = project "com.google.protobuf" % "protobuf-java" % protobufVersion, "com.twitter" % "chill-java" % chillVersion, "com.twitter" % "chill-protobuf" % chillVersion, - "com.twitter" %% "algebird-core" % algebirdVersion, - "com.twitter" %% "chill" % chillVersion, - "com.twitter" %% "chill-algebird" % chillVersion, "commons-io" % "commons-io" % commonsIoVersion, "io.grpc" % "grpc-auth" % grpcVersion, "io.grpc" % "grpc-core" % grpcVersion, @@ -424,7 +419,6 @@ lazy val `scio-core`: Project = project "io.grpc" % "grpc-stub" % grpcVersion, "io.netty" % "netty-handler" % nettyVersion, "joda-time" % "joda-time" % jodaTimeVersion, - ("me.lyh" %% "protobuf-generic" % protobufGenericVersion), "org.apache.avro" % "avro" % avroVersion, "org.apache.beam" % "beam-runners-core-construction-java" % beamVersion, "org.apache.beam" % "beam-runners-google-cloud-dataflow-java" % beamVersion % Provided, @@ -442,23 +436,43 @@ lazy val `scio-core`: Project = project "org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion, "org.apache.commons" % "commons-compress" % commonsCompressVersion, "org.apache.commons" % "commons-math3" % commonsMath3Version, - "org.scalatest" %% "scalatest" % scalatestVersion % Test, - "org.slf4j" % "slf4j-api" % slf4jVersion, - "org.typelevel" %% "algebra" % algebraVersion, - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion - ).map(_.withDottyCompat(scalaVersion.value)), + "org.slf4j" % "slf4j-api" % slf4jVersion + ), + // Scala dependencies not ported to Scala 3 yet + libraryDependencies ++= Seq( + "com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion, + "com.github.alexarchambault" %% "case-app" % caseappVersion, + "com.github.alexarchambault" %% "case-app-annotations" % caseappVersion, + "com.twitter" %% "algebird-core" % algebirdVersion, + "com.twitter" %% "chill" % chillVersion, + "com.twitter" %% "chill-algebird" % chillVersion, + "me.lyh" %% "protobuf-generic" % protobufGenericVersion, + "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + "org.typelevel" %% "algebra" % algebraVersion + ).map(_.cross(CrossVersion.for3Use2_13)), + // Scala dependencies available for 2.12, 2.13 and 3 + libraryDependencies ++= Seq( + "org.scalatest" %% "scalatest" % scalatestVersion % Test, + ), buildInfoKeys := Seq[BuildInfoKey](scalaVersion, version, "beamVersion" -> beamVersion), buildInfoPackage := "com.spotify.scio", libraryDependencies ++= { - if (!isDotty.value) + if (!scalaVersion.value.startsWith("3")) Seq( "com.chuusai" %% "shapeless" % shapelessVersion, "com.propensive" %% "magnolia" % magnoliaVersion ) else Nil }, + libraryDependencies ++= { + if (scalaVersion.value.startsWith("2.12")) + Seq( + "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + ) + else Nil + }, scalacOptions ++= { - if (isDotty.value) Seq("-source:3.0-migration") else Nil + if (scalaVersion.value.startsWith("3")) Seq("-source:3.0-migration") else Nil }, compileOrder := CompileOrder.JavaThenScala, ) @@ -479,14 +493,14 @@ lazy val `scio-sql`: Project = project .settings( description := "Scio - SQL extension", libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion, "org.apache.beam" % "beam-sdks-java-extensions-sql" % beamVersion, "org.apache.commons" % "commons-lang3" % commonsLang3Version, "org.apache.beam" % "beam-vendor-calcite-1_20_0" % beamVendorVersion - ).map(_.withDottyCompat(scalaVersion.value)), + ), scalacOptions ++= { - if (isDotty.value) Seq("-source:3.0-migration") else Nil + if (scalaVersion.value.startsWith("3")) Seq("-source:3.0-migration") else Nil }, Test / compileOrder := CompileOrder.JavaThenScala ) @@ -506,38 +520,45 @@ lazy val `scio-test`: Project = project .settings(macroSettings) .settings( description := "Scio helpers for ScalaTest", + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "org.apache.beam" % "beam-runners-direct-java" % beamVersion, "org.apache.beam" % "beam-sdks-java-io-google-cloud-platform" % beamVersion, "org.apache.beam" % "beam-runners-google-cloud-dataflow-java" % beamVersion % "test,it", "org.apache.beam" % "beam-sdks-java-core" % beamVersion % "test", "org.apache.beam" % "beam-sdks-java-core" % beamVersion % "test" classifier "tests", - "org.scalatest" %% "scalatest" % scalatestVersion, - "org.scalatestplus" %% "scalatestplus-scalacheck" % scalatestplusVersion % "test,it", - "org.scalacheck" %% "scalacheck" % scalacheckVersion % "test,it", - "com.spotify" %% "magnolify-datastore" % magnolifyVersion % "it", - "com.spotify" %% "magnolify-guava" % magnolifyVersion, // DataFlow testing requires junit and hamcrest "org.hamcrest" % "hamcrest-core" % hamcrestVersion, "org.hamcrest" % "hamcrest-library" % hamcrestVersion, - // Our BloomFilters are Algebird Monoids and hence uses tests from Algebird Test - "com.twitter" %% "algebird-test" % algebirdVersion % "test", "com.spotify" % "annoy" % annoyVersion % "test", "com.spotify.sparkey" % "sparkey" % sparkeyVersion % "test", "com.novocode" % "junit-interface" % junitInterfaceVersion, "junit" % "junit" % junitVersion % "test", - "com.lihaoyi" %% "pprint" % "0.6.5", - "com.chuusai" %% "shapeless" % shapelessVersion, "com.google.api.grpc" % "proto-google-cloud-bigtable-v2" % generatedGrpcBetaVersion, "com.google.protobuf" % "protobuf-java" % protobufVersion, - "com.twitter" %% "chill" % chillVersion, "commons-io" % "commons-io" % commonsIoVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion, "org.hamcrest" % "hamcrest" % hamcrestVersion, + ), + // Scala dependencies available for 2.12, 2.13 and 3 + libraryDependencies ++= Seq( + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + "org.scalatest" %% "scalatest" % scalatestVersion, + "org.scalacheck" %% "scalacheck" % scalacheckVersion % "test,it", + ), + // Scala dependencies not ported to Scala 3 yet + libraryDependencies ++= Seq( + "org.scalatestplus" %% "scalatestplus-scalacheck" % scalatestplusVersion % "test,it", + "com.spotify" %% "magnolify-datastore" % magnolifyVersion % "it", + "com.spotify" %% "magnolify-guava" % magnolifyVersion, + // Our BloomFilters are Algebird Monoids and hence uses tests from Algebird Test + "com.twitter" %% "algebird-test" % algebirdVersion % "test", + "com.lihaoyi" %% "pprint" % "0.6.5", + "com.chuusai" %% "shapeless" % shapelessVersion, + "com.twitter" %% "chill" % chillVersion, "org.scalactic" %% "scalactic" % "3.2.8", "com.propensive" %% "magnolia" % magnoliaVersion - ).map(_.withDottyCompat(scalaVersion.value)), + ).map(_.cross(CrossVersion.for3Use2_13)), Test / compileOrder := CompileOrder.JavaThenScala, Test / testGrouping := splitTests( (Test / definedTests).value, @@ -545,7 +566,7 @@ lazy val `scio-test`: Project = project (Test / forkOptions).value ), scalacOptions ++= { - if (isDotty.value) Seq("-source:3.0-migration") else Nil + if (scalaVersion.value.startsWith("3")) Seq("-source:3.0-migration") else Nil }, ) .configs(IntegrationTest) @@ -562,13 +583,15 @@ lazy val `scio-macros`: Project = project .settings(macroSettings) .settings( description := "Scio macros", + // Java dependencies libraryDependencies ++= Seq( "com.esotericsoftware" % "kryo-shaded" % kryoVersion, "org.apache.beam" % "beam-sdks-java-extensions-sql" % beamVersion, "org.apache.avro" % "avro" % avroVersion ), + // Scala 2 dependencies libraryDependencies ++= { - if (!isDotty.value) + if (!scalaVersion.value.startsWith("3")) Seq( "com.chuusai" %% "shapeless" % shapelessVersion, "com.propensive" %% "magnolia" % magnoliaVersion @@ -587,25 +610,29 @@ lazy val `scio-avro`: Project = project .settings(itSettings) .settings( description := "Scio add-on for working with Avro", + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, - "me.lyh" %% "protobuf-generic" % protobufGenericVersion, "org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion, "org.apache.beam" % "beam-sdks-java-io-google-cloud-platform" % beamVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion, - "com.twitter" %% "chill" % chillVersion, "com.google.protobuf" % "protobuf-java" % protobufVersion, "org.apache.avro" % "avro" % avroVersion exclude ("com.thoughtworks.paranamer", "paranamer"), "org.slf4j" % "slf4j-api" % slf4jVersion, "org.slf4j" % "slf4j-simple" % slf4jVersion % "test,it", - "org.scalatest" %% "scalatest" % scalatestVersion % "test,it", + ), + // Scala dependencies not ported to Scala 3 yet + libraryDependencies ++= Seq( + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + "me.lyh" %% "protobuf-generic" % protobufGenericVersion, + "com.twitter" %% "chill" % chillVersion, + "org.scalatest" %% "scalatest" % scalatestVersion % "test,it", "org.scalatestplus" %% "scalatestplus-scalacheck" % scalatestplusVersion % "test,it", "org.scalacheck" %% "scalacheck" % scalacheckVersion % "test,it", "com.spotify" %% "magnolify-cats" % magnolifyVersion % "test", "com.spotify" %% "magnolify-scalacheck" % magnolifyVersion % "test" - ).map(_.withDottyCompat(scalaVersion.value)), + ).map(_.cross(CrossVersion.for3Use2_13)), scalacOptions ++= { - if (isDotty.value) Seq("-source:3.0-migration") else Nil + if (scalaVersion.value.startsWith("3")) Seq("-source:3.0-migration") else Nil }, ) .dependsOn( @@ -622,6 +649,7 @@ lazy val `scio-google-cloud-platform`: Project = project .settings(beamRunnerSettings) .settings( description := "Scio add-on for Google Cloud Platform", + // Java dependecies libraryDependencies ++= Seq( "com.google.cloud" % "google-cloud-spanner" % googleCloudSpannerVersion excludeAll ( ExclusionRule(organization = "io.grpc") @@ -629,7 +657,6 @@ lazy val `scio-google-cloud-platform`: Project = project "com.google.cloud.bigtable" % "bigtable-client-core" % bigtableClientVersion excludeAll ( ExclusionRule(organization = "io.grpc") ), - "com.chuusai" %% "shapeless" % shapelessVersion, "com.google.api-client" % "google-api-client" % googleClientsVersion, "com.google.api.grpc" % "proto-google-cloud-bigquerystorage-v1beta1" % "0.98.0", "com.google.api.grpc" % "proto-google-cloud-bigtable-admin-v2" % generatedGrpcBetaVersion, @@ -648,9 +675,6 @@ lazy val `scio-google-cloud-platform`: Project = project "com.google.http-client" % "google-http-client-jackson2" % googleHttpClientsVersion, "com.google.http-client" % "google-http-client" % googleHttpClientsVersion, "com.google.protobuf" % "protobuf-java" % protobufVersion, - "com.spotify" %% "magnolify-cats" % magnolifyVersion % "test", - "com.spotify" %% "magnolify-scalacheck" % magnolifyVersion % "test", - "com.twitter" %% "chill" % chillVersion, "commons-io" % "commons-io" % commonsIoVersion, "joda-time" % "joda-time" % jodaTimeVersion, "junit" % "junit" % junitVersion % "test", @@ -661,12 +685,19 @@ lazy val `scio-google-cloud-platform`: Project = project "org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion, "org.hamcrest" % "hamcrest-core" % hamcrestVersion % "test,it", "org.hamcrest" % "hamcrest-library" % hamcrestVersion % "test", + "org.slf4j" % "slf4j-api" % slf4jVersion, + "org.slf4j" % "slf4j-simple" % slf4jVersion % "test,it" + ), + // Scala dependencies not ported to Scala 3 yet + libraryDependencies ++= Seq( + "com.chuusai" %% "shapeless" % shapelessVersion, + "com.spotify" %% "magnolify-cats" % magnolifyVersion % "test", + "com.spotify" %% "magnolify-scalacheck" % magnolifyVersion % "test", + "com.twitter" %% "chill" % chillVersion, "org.scalacheck" %% "scalacheck" % scalacheckVersion % "test,it", "org.scalatest" %% "scalatest" % scalatestVersion % "test,it", "org.scalatestplus" %% "scalatestplus-scalacheck" % scalatestplusVersion % "test,it", - "org.slf4j" % "slf4j-api" % slf4jVersion, - "org.slf4j" % "slf4j-simple" % slf4jVersion % "test,it" - ).map(_.withDottyCompat(scalaVersion.value)), + ).map(_.cross(CrossVersion.for3Use2_13)), compileOrder := CompileOrder.JavaThenScala, // required for Scala 3 ) .dependsOn( @@ -684,23 +715,27 @@ lazy val `scio-cassandra3`: Project = project .settings(itSettings) .settings( description := "Scio add-on for Apache Cassandra 3.x", + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "com.google.protobuf" % "protobuf-java" % protobufVersion, "com.google.guava" % "guava" % guavaVersion, - "com.twitter" %% "chill" % chillVersion, "com.datastax.cassandra" % "cassandra-driver-core" % "3.11.0", ("org.apache.cassandra" % "cassandra-all" % "3.11.10") .exclude("ch.qos.logback", "logback-classic") .exclude("org.slf4j", "log4j-over-slf4j"), "org.apache.hadoop" % "hadoop-common" % hadoopVersion, "org.apache.hadoop" % "hadoop-mapreduce-client-core" % hadoopVersion, - "org.scalatest" %% "scalatest" % scalatestVersion % Test, "org.apache.beam" % "beam-sdks-java-core" % beamVersion % Test, "com.esotericsoftware" % "kryo-shaded" % kryoVersion, "com.google.guava" % "guava" % guavaVersion, "com.twitter" % "chill-java" % chillVersion - ).map(_.withDottyCompat(scalaVersion.value)) + ), + // Scala dependencies available for 2.12, 2.13 and 3 + libraryDependencies ++= Seq( + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + ("com.twitter" %% "chill" % chillVersion).cross(CrossVersion.for3Use2_13), + "org.scalatest" %% "scalatest" % scalatestVersion % Test, + ) ) .dependsOn( `scio-core`, @@ -714,8 +749,8 @@ lazy val `scio-elasticsearch6`: Project = project .settings(publishSettings) .settings( description := "Scio add-on for writing to Elasticsearch", + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion, "joda-time" % "joda-time" % jodaTimeVersion, @@ -723,7 +758,8 @@ lazy val `scio-elasticsearch6`: Project = project "org.elasticsearch" % "elasticsearch" % elasticsearch6Version, "org.elasticsearch" % "elasticsearch-x-content" % elasticsearch6Version, "org.elasticsearch.client" % "transport" % elasticsearch6Version - ).map(_.withDottyCompat(scalaVersion.value)), + ), + //libraryDependencies += "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, compileOrder := CompileOrder.JavaThenScala, // required for Scala 3 ) .dependsOn( @@ -737,8 +773,8 @@ lazy val `scio-elasticsearch7`: Project = project .settings(publishSettings) .settings( description := "Scio add-on for writing to Elasticsearch", + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion, "joda-time" % "joda-time" % jodaTimeVersion, @@ -749,7 +785,8 @@ lazy val `scio-elasticsearch7`: Project = project "org.elasticsearch.client" % "elasticsearch-rest-high-level-client" % elasticsearch7Version, "org.apache.httpcomponents" % "httpcore" % httpCoreVersion, "org.elasticsearch" % "elasticsearch" % elasticsearch7Version - ).map(_.withDottyCompat(scalaVersion.value)), + ), + //libraryDependencies += "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, compileOrder := CompileOrder.JavaThenScala, // required for Scala 3 ) .dependsOn( @@ -765,8 +802,8 @@ lazy val `scio-extra`: Project = project .settings(macroSettings) .settings( description := "Scio extra utilities", + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion, "org.apache.beam" % "beam-sdks-java-io-google-cloud-platform" % beamVersion, "org.apache.beam" % "beam-sdks-java-extensions-sorter" % beamVersion, @@ -776,24 +813,28 @@ lazy val `scio-extra`: Project = project "org.apache.avro" % "avro" % avroVersion, "com.spotify" % "annoy" % annoyVersion, "com.spotify.sparkey" % "sparkey" % sparkeyVersion, - "com.twitter" %% "algebird-core" % algebirdVersion, "info.debatty" % "java-lsh" % javaLshVersion, + "com.github.ben-manes.caffeine" % "caffeine" % caffeineVersion % "test", + "org.slf4j" % "slf4j-api" % slf4jVersion, + "joda-time" % "joda-time" % jodaTimeVersion, + "net.java.dev.jna" % "jna" % jnaVersion, + "org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion, + ), + // Scala dependencies not ported to Scala 3 yet + libraryDependencies ++= Seq( + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + "com.twitter" %% "algebird-core" % algebirdVersion, "net.pishen" %% "annoy4s" % annoy4sVersion, "org.scalanlp" %% "breeze" % breezeVersion, - "com.github.ben-manes.caffeine" % "caffeine" % caffeineVersion % "test", "com.nrinaudo" %% "kantan.csv" % kantanCsvVersion, - "org.slf4j" % "slf4j-api" % slf4jVersion, "org.scalatest" %% "scalatest" % scalatestVersion % "test", "org.scalacheck" %% "scalacheck" % scalacheckVersion % "test", "com.chuusai" %% "shapeless" % shapelessVersion, - "joda-time" % "joda-time" % jodaTimeVersion, - "net.java.dev.jna" % "jna" % jnaVersion, - "org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion, "org.typelevel" %% "algebra" % algebraVersion, "io.circe" %% "circe-core" % circeVersion, "io.circe" %% "circe-generic" % circeVersion, "io.circe" %% "circe-parser" % circeVersion - ), + ).map(_.cross(CrossVersion.for3Use2_13)), Compile / sourceDirectories := (Compile / sourceDirectories).value .filterNot(_.getPath.endsWith("/src_managed/main")), Compile / managedSourceDirectories := (Compile / managedSourceDirectories).value @@ -816,6 +857,7 @@ lazy val `scio-jdbc`: Project = project .settings(publishSettings) .settings( description := "Scio add-on for JDBC", + // Java dependencies libraryDependencies ++= Seq( "org.apache.beam" % "beam-sdks-java-core" % beamVersion, "org.apache.beam" % "beam-sdks-java-io-jdbc" % beamVersion @@ -841,24 +883,17 @@ lazy val `scio-parquet`: Project = project }.value, javacOptions ++= Seq("-s", (sourceManaged.value / "main").toString), description := "Scio add-on for Parquet", + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, - "me.lyh" %% "parquet-avro" % parquetExtraVersion excludeAll ( - // parquet-avro depends on avro 1.10.x - ExclusionRule("org.apache.avro", "avro"), - ExclusionRule("org.apache.avro", "avro-compiler") - ), "org.apache.avro" % "avro" % avroVersion, "org.apache.avro" % "avro-compiler" % avroVersion, "me.lyh" % "parquet-tensorflow" % parquetExtraVersion, "com.google.cloud.bigdataoss" % "gcs-connector" % s"hadoop2-$bigdataossVersion", - "com.spotify" %% "magnolify-parquet" % magnolifyVersion, "org.apache.beam" % "beam-sdks-java-io-hadoop-format" % beamVersion, "org.apache.hadoop" % "hadoop-client" % hadoopVersion, "org.apache.parquet" % "parquet-avro" % parquetVersion exclude ( "org.apache.avro", "avro" ), - "com.twitter" %% "chill" % chillVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion, "org.apache.beam" % "beam-sdks-java-io-hadoop-common" % beamVersion, "org.apache.hadoop" % "hadoop-common" % hadoopVersion, @@ -867,7 +902,18 @@ lazy val `scio-parquet`: Project = project "org.apache.parquet" % "parquet-common" % parquetVersion, "org.apache.parquet" % "parquet-hadoop" % parquetVersion, "org.slf4j" % "slf4j-api" % slf4jVersion - ).map(_.withDottyCompat(scalaVersion.value)), + ), + // Scala dependencies not ported to Scala 3 yet + libraryDependencies ++= Seq( + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + "me.lyh" %% "parquet-avro" % parquetExtraVersion excludeAll ( + // parquet-avro depends on avro 1.10.x + ExclusionRule("org.apache.avro", "avro"), + ExclusionRule("org.apache.avro", "avro-compiler") + ), + "com.spotify" %% "magnolify-parquet" % magnolifyVersion, + "com.twitter" %% "chill" % chillVersion, + ).map(_.cross(CrossVersion.for3Use2_13)), compileOrder := CompileOrder.JavaThenScala ) .dependsOn( @@ -889,24 +935,28 @@ lazy val `scio-tensorflow`: Project = project .filterNot(_.getPath.endsWith("/src_managed/main")), Compile / managedSourceDirectories := (Compile / managedSourceDirectories).value .filterNot(_.getPath.endsWith("/src_managed/main")), + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion, "org.tensorflow" % "tensorflow-core-platform" % tensorFlowVersion, "org.apache.commons" % "commons-compress" % commonsCompressVersion, - "com.spotify" %% "featran-core" % featranVersion, - "com.spotify" %% "featran-scio" % featranVersion, - "com.spotify" %% "featran-tensorflow" % featranVersion, "com.spotify" % "zoltar-api" % zoltarVersion, "com.spotify" % "zoltar-tensorflow" % zoltarVersion, "org.slf4j" % "slf4j-api" % slf4jVersion, - "com.spotify" %% "magnolify-tensorflow" % magnolifyVersion % Test, "com.spotify" % "zoltar-core" % zoltarVersion, "org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion - ).map(_.withDottyCompat(scalaVersion.value)), + ), + // Scala dependencies not ported to Scala 3 yet + libraryDependencies ++= Seq( + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + "com.spotify" %% "featran-core" % featranVersion, + "com.spotify" %% "featran-scio" % featranVersion, + "com.spotify" %% "featran-tensorflow" % featranVersion, + "com.spotify" %% "magnolify-tensorflow" % magnolifyVersion % Test, + ).map(_.cross(CrossVersion.for3Use2_13)), compileOrder := CompileOrder.JavaThenScala, scalacOptions ++= { - if (isDotty.value) Seq("-source:3.0-migration") else Nil // Easily fixable + if (scalaVersion.value.startsWith("3")) Seq("-source:3.0-migration") else Nil // Easily fixable }, ) .dependsOn( @@ -923,10 +973,8 @@ lazy val `scio-schemas`: Project = project .settings( description := "Avro/Proto schemas for testing", publish / skip := true, - libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, - "org.apache.avro" % "avro" % avroVersion - ).map(_.withDottyCompat(scalaVersion.value)), + //libraryDependencies +="org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + libraryDependencies += "org.apache.avro" % "avro" % avroVersion, Compile / sourceDirectories := (Compile / sourceDirectories).value .filterNot(_.getPath.endsWith("/src_managed/main")), Compile / managedSourceDirectories := (Compile / managedSourceDirectories).value @@ -944,8 +992,8 @@ lazy val `scio-examples`: Project = project .settings(macroSettings) .settings( publish / skip := true, + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion, "org.apache.beam" % "beam-sdks-java-io-google-cloud-platform" % beamVersion, "org.apache.beam" % "beam-sdks-java-extensions-google-cloud-platform-core" % beamVersion, @@ -956,19 +1004,11 @@ lazy val `scio-examples`: Project = project "com.google.api.grpc" % "proto-google-cloud-bigtable-v2" % generatedGrpcBetaVersion, "com.google.cloud.sql" % "mysql-socket-factory" % "1.2.2", "com.google.apis" % "google-api-services-bigquery" % googleApiServicesBigQuery, - "com.spotify" %% "magnolify-avro" % magnolifyVersion, - "com.spotify" %% "magnolify-datastore" % magnolifyVersion, - "com.spotify" %% "magnolify-tensorflow" % magnolifyVersion, - "com.spotify" %% "magnolify-bigtable" % magnolifyVersion, "mysql" % "mysql-connector-java" % "8.0.24", "joda-time" % "joda-time" % jodaTimeVersion, "com.github.alexarchambault" %% "case-app" % caseappVersion, "org.slf4j" % "slf4j-api" % slf4jVersion, "org.slf4j" % "slf4j-simple" % slf4jVersion, - "org.scalacheck" %% "scalacheck" % scalacheckVersion % "test", - "com.chuusai" %% "shapeless" % shapelessVersion, - "com.github.alexarchambault" %% "case-app-annotations" % caseappVersion, - "com.github.alexarchambault" %% "case-app-util" % caseappVersion, "com.google.api-client" % "google-api-client" % googleClientsVersion, "com.google.apis" % "google-api-services-pubsub" % s"v1-rev20200713-$googleClientsVersion", "com.google.auth" % "google-auth-library-credentials" % googleAuthVersion, @@ -977,13 +1017,25 @@ lazy val `scio-examples`: Project = project "com.google.guava" % "guava" % guavaVersion, "com.google.oauth-client" % "google-oauth-client" % googleOauthClientVersion, "com.google.protobuf" % "protobuf-java" % protobufVersion, - "com.spotify" %% "magnolify-shared" % magnolifyVersion, - "com.twitter" %% "algebird-core" % algebirdVersion, "org.apache.beam" % "beam-sdks-java-extensions-sql" % beamVersion, "org.apache.httpcomponents" % "httpcore" % httpCoreVersion, - "org.elasticsearch" % "elasticsearch" % elasticsearch7Version, - "com.propensive" %% "magnolia" % magnoliaVersion + "org.elasticsearch" % "elasticsearch" % elasticsearch7Version ), + // Scala dependencies not ported to Scala 3 yet + libraryDependencies ++= Seq( + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + "com.spotify" %% "magnolify-avro" % magnolifyVersion, + "com.spotify" %% "magnolify-datastore" % magnolifyVersion, + "com.spotify" %% "magnolify-tensorflow" % magnolifyVersion, + "com.spotify" %% "magnolify-bigtable" % magnolifyVersion, + "org.scalacheck" %% "scalacheck" % scalacheckVersion % "test", + "com.chuusai" %% "shapeless" % shapelessVersion, + "com.github.alexarchambault" %% "case-app-annotations" % caseappVersion, + "com.github.alexarchambault" %% "case-app-util" % caseappVersion, + "com.spotify" %% "magnolify-shared" % magnolifyVersion, + "com.twitter" %% "algebird-core" % algebirdVersion, + "com.propensive" %% "magnolia" % magnoliaVersion + ).map(_.cross(CrossVersion.for3Use2_13)), // exclude problematic sources if we don't have GCP credentials unmanagedSources / excludeFilter := { if (BuildCredentials.exists) { @@ -1023,8 +1075,9 @@ lazy val `scio-repl`: Project = project .settings(macroSettings) .settings( scalacOptions := Scalac.replOptions.value, + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "org.apache.beam" % "beam-runners-direct-java" % beamVersion, "org.apache.beam" % "beam-sdks-java-io-google-cloud-platform" % beamVersion, "org.apache.beam" % "beam-sdks-java-extensions-google-cloud-platform-core" % beamVersion, @@ -1039,9 +1092,9 @@ lazy val `scio-repl`: Project = project "org.apache.commons" % "commons-text" % commonsTextVersion, "org.slf4j" % "slf4j-api" % slf4jVersion, "org.slf4j" % "slf4j-simple" % slf4jVersion, - "org.scala-lang" % "scala-compiler" % scalaVersion.value, - "com.nrinaudo" %% "kantan.csv" % kantanCsvVersion + "org.scala-lang" % "scala-compiler" % scalaVersion.value ), + libraryDependencies += ("com.nrinaudo" %% "kantan.csv" % kantanCsvVersion).cross(CrossVersion.for3Use2_13), libraryDependencies ++= { VersionNumber(scalaVersion.value) match { case v if v.matchesSemVer(SemanticSelector("2.12.x")) => @@ -1067,13 +1120,14 @@ lazy val `scio-jmh`: Project = project Jmh / sourceDirectory := (Test / sourceDirectory).value, Jmh / classDirectory := (Test / classDirectory).value, Jmh / dependencyClasspath := (Test / dependencyClasspath).value, + // Java dependencies libraryDependencies ++= directRunnerDependencies ++ Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "junit" % "junit" % junitVersion % "test", "org.hamcrest" % "hamcrest-core" % hamcrestVersion % "test", "org.hamcrest" % "hamcrest-library" % hamcrestVersion % "test", "org.slf4j" % "slf4j-nop" % slf4jVersion - ).map(_.withDottyCompat(scalaVersion.value)), + ), publish / skip := true ) .dependsOn( @@ -1090,8 +1144,8 @@ lazy val `scio-smb`: Project = project .settings(beamRunnerSettings) .settings( description := "Sort Merge Bucket source/sink implementations for Apache Beam", + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "org.apache.avro" % "avro" % avroVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion % "it,test" classifier "tests", @@ -1102,9 +1156,6 @@ lazy val `scio-smb`: Project = project "org.apache.avro", "avro" ), "org.apache.parquet" % "parquet-common" % parquetVersion, - "com.spotify" %% "magnolify-parquet" % magnolifyVersion, - // #3260 work around for sorter memory limit until we patch upstream - // "org.apache.beam" % "beam-sdks-java-extensions-sorter" % beamVersion, "org.apache.beam" % "beam-sdks-java-extensions-protobuf" % beamVersion, "com.google.apis" % "google-api-services-bigquery" % googleApiServicesBigQuery, "org.tensorflow" % "tensorflow-core-platform" % tensorFlowVersion, @@ -1116,7 +1167,6 @@ lazy val `scio-smb`: Project = project "org.hamcrest" % "hamcrest-library" % hamcrestVersion % Test, "com.novocode" % "junit-interface" % junitInterfaceVersion % Test, "junit" % "junit" % junitVersion % Test, - "com.chuusai" %% "shapeless" % shapelessVersion, "com.fasterxml.jackson.core" % "jackson-annotations" % jacksonVersion, "com.fasterxml.jackson.core" % "jackson-core" % jacksonVersion, "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion, @@ -1125,7 +1175,15 @@ lazy val `scio-smb`: Project = project "org.apache.beam" % "beam-vendor-guava-26_0-jre" % beamVendorVersion, "org.slf4j" % "slf4j-api" % slf4jVersion, "com.github.ben-manes.caffeine" % "caffeine" % caffeineVersion % "provided" - ).map(_.withDottyCompat(scalaVersion.value)), + ), + // Scala dependencies not ported to Scala 3 yet + libraryDependencies ++= Seq( + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + "com.spotify" %% "magnolify-parquet" % magnolifyVersion, + // #3260 work around for sorter memory limit until we patch upstream + // "org.apache.beam" % "beam-sdks-java-extensions-sorter" % beamVersion, + "com.chuusai" %% "shapeless" % shapelessVersion, + ).map(_.cross(CrossVersion.for3Use2_13)), javacOptions ++= { (Compile / sourceManaged).value.mkdirs() Seq("-s", (Compile / sourceManaged).value.getAbsolutePath) @@ -1149,14 +1207,18 @@ lazy val `scio-redis`: Project = project .settings(itSettings) .settings( description := "Scio integration with Redis", + // Java dependencies libraryDependencies ++= Seq( - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, "com.google.protobuf" % "protobuf-java" % protobufVersion, "com.google.guava" % "guava" % guavaVersion, "org.apache.beam" % "beam-sdks-java-core" % beamVersion, - "org.scalatest" %% "scalatest" % scalatestVersion % Test, "org.apache.beam" % "beam-sdks-java-io-redis" % beamVersion - ).map(_.withDottyCompat(scalaVersion.value)) + ), + // Scala dependencies not ported to Scala 3 yet + libraryDependencies ++= Seq( + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, + "org.scalatest" %% "scalatest" % scalatestVersion % Test, + ).map(_.cross(CrossVersion.for3Use2_13)) ) .dependsOn( `scio-core`, @@ -1297,13 +1359,14 @@ lazy val soccoSettings = if (sys.env.contains("SOCCO")) { //strict should only be enabled when updating/adding depedencies // ThisBuild / conflictManager := ConflictManager.strict //To update this list we need to check against the dependencies being evicted + +// Java overrides ThisBuild / dependencyOverrides ++= Seq( "org.threeten" % "threetenbp" % "1.4.1", "org.conscrypt" % "conscrypt-openjdk-uber" % "2.2.1", "com.fasterxml.jackson.core" % "jackson-annotations" % jacksonVersion, "com.fasterxml.jackson.core" % "jackson-core" % jacksonVersion, "com.fasterxml.jackson.core" % "jackson-databind" % jacksonVersion, - "com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion, "com.google.api-client" % "google-api-client" % googleClientsVersion, "com.google.api.grpc" % "proto-google-cloud-datastore-v1" % generatedDatastoreProtoVersion, "com.google.api.grpc" % "proto-google-common-protos" % "1.17.0", @@ -1332,7 +1395,6 @@ ThisBuild / dependencyOverrides ++= Seq( "com.google.oauth-client" % "google-oauth-client-java6" % googleOauthClientVersion, "com.google.protobuf" % "protobuf-java-util" % protobufVersion, "com.google.protobuf" % "protobuf-java" % protobufVersion, - "com.propensive" %% "magnolia" % magnoliaVersion, "com.squareup.okio" % "okio" % "1.13.0", "com.thoughtworks.paranamer" % "paranamer" % "2.8", "commons-cli" % "commons-cli" % "1.2", @@ -1341,9 +1403,6 @@ ThisBuild / dependencyOverrides ++= Seq( "commons-io" % "commons-io" % commonsIoVersion, "commons-lang" % "commons-lang" % "2.6", "commons-logging" % "commons-logging" % "1.2", - "io.circe" %% "circe-core" % circeVersion, - "io.circe" %% "circe-generic" % circeVersion, - "io.circe" %% "circe-parser" % circeVersion, "io.dropwizard.metrics" % "metrics-core" % metricsVersion, "io.dropwizard.metrics" % "metrics-jvm" % metricsVersion, "io.grpc" % "grpc-auth" % grpcVersion, @@ -1394,17 +1453,28 @@ ThisBuild / dependencyOverrides ++= Seq( "org.hamcrest" % "hamcrest-core" % hamcrestVersion, "org.objenesis" % "objenesis" % "2.5.1", "org.ow2.asm" % "asm" % "5.0.4", - "org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion, - "org.scala-lang.modules" %% "scala-xml" % "1.2.0", - "org.scalacheck" %% "scalacheck" % scalacheckVersion, - "org.scalactic" %% "scalactic" % scalatestVersion, - "org.scalatest" %% "scalatest" % scalatestVersion, "org.slf4j" % "slf4j-api" % slf4jVersion, "org.slf4j" % "slf4j-log4j12" % slf4jVersion, "org.tukaani" % "xz" % "1.8", - "org.typelevel" %% "algebra" % algebraVersion, - "org.typelevel" %% "cats-core" % catsVersion, "org.xerial.snappy" % "snappy-java" % "1.1.4", "org.yaml" % "snakeyaml" % "1.12", - "com.nrinaudo" %% "kantan.codecs" % kantanCodecsVersion +) + +ThisBuild / dependencyOverrides ++= Seq( + "com.fasterxml.jackson.module" %% "jackson-module-scala" % jacksonVersion, + "com.propensive" %% "magnolia" % magnoliaVersion, + "io.circe" %% "circe-core" % circeVersion, + "io.circe" %% "circe-generic" % circeVersion, + "io.circe" %% "circe-parser" % circeVersion, + "org.scala-lang.modules" %% "scala-xml" % "1.2.0", + "com.nrinaudo" %% "kantan.codecs" % kantanCodecsVersion, + //"org.scala-lang.modules" %% "scala-collection-compat" % scalaCollectionCompatVersion +).map(_.cross(CrossVersion.for3Use2_13)) + +ThisBuild / dependencyOverrides ++= Seq( + "org.typelevel" %% "algebra" % algebraVersion, + "org.typelevel" %% "cats-core" % catsVersion, + "org.scalacheck" %% "scalacheck" % scalacheckVersion, + "org.scalactic" %% "scalactic" % scalatestVersion, + "org.scalatest" %% "scalatest" % scalatestVersion ) From a6ddf9be524606c7f2e8197a205234ad0fca91fe Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Wed, 7 Apr 2021 10:59:38 +0200 Subject: [PATCH 24/56] Upgrade ci to rc2 --- .github/workflows/migration.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/migration.yml b/.github/workflows/migration.yml index cb3ed1ecce..0b7decf899 100644 --- a/.github/workflows/migration.yml +++ b/.github/workflows/migration.yml @@ -55,4 +55,4 @@ jobs: - name: java 8 setup uses: olafurpg/setup-scala@v10 - name: Compile - run: sbt "++3.0.0-RC1;${{ matrix.task }}" + run: sbt "++3.0.0-RC2;${{ matrix.task }}" From e54b55d8839b169d26af84ddc0f38b6da97f4cb2 Mon Sep 17 00:00:00 2001 From: Maxime Kjaer Date: Thu, 15 Apr 2021 13:20:50 +0200 Subject: [PATCH 25/56] Fix migration warnings in scio-core (#9) --- build.sbt | 3 --- .../scala/com/spotify/scio/ScioContext.scala | 10 +++++----- .../scala/com/spotify/scio/coders/Coder.scala | 16 +++++++++------- .../src/main/scala/com/spotify/scio/io/Tap.scala | 2 +- .../main/scala/com/spotify/scio/io/Taps.scala | 6 ++++-- .../scala/com/spotify/scio/schemas/Schema.scala | 2 +- .../com/spotify/scio/util/ProtobufUtil.scala | 1 + 7 files changed, 21 insertions(+), 19 deletions(-) diff --git a/build.sbt b/build.sbt index 6213661bb0..ea81fb5a7e 100644 --- a/build.sbt +++ b/build.sbt @@ -471,9 +471,6 @@ lazy val `scio-core`: Project = project ) else Nil }, - scalacOptions ++= { - if (scalaVersion.value.startsWith("3")) Seq("-source:3.0-migration") else Nil - }, compileOrder := CompileOrder.JavaThenScala, ) .dependsOn( diff --git a/scio-core/src/main/scala/com/spotify/scio/ScioContext.scala b/scio-core/src/main/scala/com/spotify/scio/ScioContext.scala index fdb49b8c88..3b177afe7f 100644 --- a/scio-core/src/main/scala/com/spotify/scio/ScioContext.scala +++ b/scio-core/src/main/scala/com/spotify/scio/ScioContext.scala @@ -167,7 +167,7 @@ object ContextAndArgs { def parse(args: Array[String]): F[Result] } - final case class DefaultParser[T <: PipelineOptions: ClassTag] private () + final case class DefaultParser[T <: PipelineOptions: ClassTag] private[scio] () extends ArgsParser[Try] { override type ArgsType = Args @@ -213,7 +213,7 @@ object ContextAndArgs { case _ => true } - CaseApp.detailedParseWithHelp[T](customArgs) match { + CaseApp.detailedParseWithHelp[T](customArgs.toIndexedSeq) match { case Left(error) => Failure(new Exception(error.message)) case Right((_, _, help, _)) if help => @@ -245,7 +245,7 @@ object ContextAndArgs { } } - final case class PipelineOptionsParser[T <: PipelineOptions: ClassTag] private () + final case class PipelineOptionsParser[T <: PipelineOptions: ClassTag] private[scio] () extends ArgsParser[Try] { override type ArgsType = T @@ -412,7 +412,7 @@ object ScioContext { } } yield s"--$str($$|=)".r - val patterns = registeredPatterns + "--help($$|=)".r + val patterns = registeredPatterns.union(Set("--help($$|=)".r)) // Split cmdlineArgs into 2 parts, optArgs for PipelineOptions and appArgs for Args val (optArgs, appArgs) = @@ -690,7 +690,7 @@ class ScioContext private[scio] ( BuildInfo.version, BuildInfo.scalaVersion, sc.optionsAs[ApplicationNameOptions].getAppName, - state.toString, + this.state.toString, getBeamMetrics ) diff --git a/scio-core/src/main/scala/com/spotify/scio/coders/Coder.scala b/scio-core/src/main/scala/com/spotify/scio/coders/Coder.scala index e5997e46b1..d04d98e03c 100644 --- a/scio-core/src/main/scala/com/spotify/scio/coders/Coder.scala +++ b/scio-core/src/main/scala/com/spotify/scio/coders/Coder.scala @@ -76,19 +76,20 @@ private[scio] object Ref { def unapply[T](c: Ref[T]): Option[(String, Coder[T])] = Option((c.typeName, c.value)) } -final case class RawBeam[T] private (beam: BCoder[T]) extends Coder[T] { +final case class RawBeam[T] private[coders] (beam: BCoder[T]) extends Coder[T] { override def toString: String = s"RawBeam($beam)" } -final case class Beam[T] private (beam: BCoder[T]) extends Coder[T] { +final case class Beam[T] private[coders] (beam: BCoder[T]) extends Coder[T] { override def toString: String = s"Beam($beam)" } -final case class Fallback[T] private (ct: ClassTag[T]) extends Coder[T] { +final case class Fallback[T] private[coders] (ct: ClassTag[T]) extends Coder[T] { override def toString: String = s"Fallback($ct)" } -final case class Transform[A, B] private (c: Coder[A], f: BCoder[A] => Coder[B]) extends Coder[B] { +final case class Transform[A, B] private[coders] (c: Coder[A], f: BCoder[A] => Coder[B]) + extends Coder[B] { override def toString: String = s"Transform($c, $f)" } -final case class Disjunction[T, Id] private ( +final case class Disjunction[T, Id] private[coders] ( typeName: String, idCoder: Coder[Id], id: T => Id, @@ -97,7 +98,7 @@ final case class Disjunction[T, Id] private ( override def toString: String = s"Disjunction($typeName, $coder)" } -final case class Record[T] private ( +final case class Record[T] private[coders] ( typeName: String, cs: Array[(String, Coder[Any])], construct: Seq[Any] => T, @@ -112,7 +113,8 @@ final case class Record[T] private ( } // KV are special in beam and need to be serialized using an instance of KvCoder. -final case class KVCoder[K, V] private (koder: Coder[K], voder: Coder[V]) extends Coder[KV[K, V]] { +final case class KVCoder[K, V] private[coders] (koder: Coder[K], voder: Coder[V]) + extends Coder[KV[K, V]] { override def toString: String = s"KVCoder($koder, $voder)" } diff --git a/scio-core/src/main/scala/com/spotify/scio/io/Tap.scala b/scio-core/src/main/scala/com/spotify/scio/io/Tap.scala index 5a91c1e16f..a57160383e 100644 --- a/scio-core/src/main/scala/com/spotify/scio/io/Tap.scala +++ b/scio-core/src/main/scala/com/spotify/scio/io/Tap.scala @@ -120,7 +120,7 @@ object MaterializeTap { new MaterializeTap(path, CoderMaterializer.beam(context, Coder[T])) } -final case class ClosedTap[T] private (private[scio] val underlying: Tap[T]) { +final case class ClosedTap[T] private[scio] (private[scio] val underlying: Tap[T]) { /** * Get access to the underlying Tap. The ScioContext has to be ran before. diff --git a/scio-core/src/main/scala/com/spotify/scio/io/Taps.scala b/scio-core/src/main/scala/com/spotify/scio/io/Taps.scala index 130a4b217d..49e0d16fa3 100644 --- a/scio-core/src/main/scala/com/spotify/scio/io/Taps.scala +++ b/scio-core/src/main/scala/com/spotify/scio/io/Taps.scala @@ -99,7 +99,9 @@ final private class PollingTaps(private[this] val backOff: BackOff) extends Taps import scala.concurrent.ExecutionContext.Implicits.global Future { val sleeper = Sleeper.DEFAULT - do { + var first = true + while (first || BackOffUtils.next(sleeper, backOff)) { + first = false if (polls.nonEmpty) { val tap = if (polls.size > 1) "taps" else "tap" logger.info(s"Polling for ${polls.size} $tap") @@ -112,7 +114,7 @@ final private class PollingTaps(private[this] val backOff: BackOff) extends Taps } polls = pending } - } while (BackOffUtils.next(sleeper, backOff)) + } polls.foreach(p => p.promise.failure(new TapNotAvailableException(p.name))) } } diff --git a/scio-core/src/main/scala/com/spotify/scio/schemas/Schema.scala b/scio-core/src/main/scala/com/spotify/scio/schemas/Schema.scala index 1a4b938de0..29522a57d5 100644 --- a/scio-core/src/main/scala/com/spotify/scio/schemas/Schema.scala +++ b/scio-core/src/main/scala/com/spotify/scio/schemas/Schema.scala @@ -142,7 +142,7 @@ object LogicalType { Some(logicalType.underlying) } -final case class Record[T] private ( +final case class Record[T] private[schemas] ( schemas: Array[(String, Schema[Any])], construct: Seq[Any] => T, destruct: T => Array[Any] diff --git a/scio-core/src/main/scala/com/spotify/scio/util/ProtobufUtil.scala b/scio-core/src/main/scala/com/spotify/scio/util/ProtobufUtil.scala index df59bf2c55..b21b7fba8b 100644 --- a/scio-core/src/main/scala/com/spotify/scio/util/ProtobufUtil.scala +++ b/scio-core/src/main/scala/com/spotify/scio/util/ProtobufUtil.scala @@ -46,6 +46,7 @@ object ProtobufUtil { */ def schemaMetadataOf[T <: Message: ClassTag]: Map[String, AnyRef] = { import me.lyh.protobuf.generic + import me.lyh.protobuf.generic.JsonSchema val schema = generic.Schema .of[Message](classTag[T].asInstanceOf[ClassTag[Message]]) .toJson From 84cb877619da8a1e6f44fdf2da2413a94060e767 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 29 Mar 2021 08:50:14 +0200 Subject: [PATCH 26/56] Add first implementation of macro interpreter --- .../scala-3/com/spotify/scio/schemas/To.scala | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index 1809cef330..6818028f4c 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -17,13 +17,33 @@ package com.spotify.scio.schemas +package com.spotify.scio.schemas.Schema import org.apache.beam.sdk.schemas.{SchemaCoder, Schema => BSchema} import scala.compiletime._ import scala.deriving._ import scala.quoted._ + object ToMacro { + + //given optionSchema[T](using t: Schema[T]): Schema[Option[T]] = + + def interpretSchema[T: Type](schemaExpr: Expr[Schema[T]])(using Quotes): Option[Schema[T]] = + schemaExpr match + case '{ Schema.optionSchema[t](using $tSchemaExpr) } => + for + tSchema <- interpretSchema($tSchemaExpr) + yield + Schema.optionSchema(using tSchema).asInstanceOf[Schema[T]] + case '{ Schema.mapSchema[k, v](using $keySchemaExpr, $valueSchemaExpr) } => + for + keySchema <- interpretSchema($keySchemaExpr) + valueSchema <- interpretSchema($valueSchemaExpr) + yield Schema.mapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] + case _ => None + + def safeImpl[I, O](si: Expr[Schema[I]])(implicit q: Quotes): Expr[To[I, O]] = { ??? } From a3186d6c3f360c5864d81521319607c4b2311b44 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 29 Mar 2021 12:58:27 +0200 Subject: [PATCH 27/56] Correct syntax --- .../src/main/scala-3/com/spotify/scio/schemas/To.scala | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index 6818028f4c..49724caed6 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -27,21 +27,19 @@ import scala.quoted._ object ToMacro { - //given optionSchema[T](using t: Schema[T]): Schema[Option[T]] = - def interpretSchema[T: Type](schemaExpr: Expr[Schema[T]])(using Quotes): Option[Schema[T]] = schemaExpr match case '{ Schema.optionSchema[t](using $tSchemaExpr) } => for - tSchema <- interpretSchema($tSchemaExpr) + tSchema <- interpretSchema(tSchemaExpr) yield Schema.optionSchema(using tSchema).asInstanceOf[Schema[T]] case '{ Schema.mapSchema[k, v](using $keySchemaExpr, $valueSchemaExpr) } => for - keySchema <- interpretSchema($keySchemaExpr) - valueSchema <- interpretSchema($valueSchemaExpr) + keySchema <- interpretSchema(keySchemaExpr) + valueSchema <- interpretSchema(valueSchemaExpr) yield Schema.mapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] - case _ => None + case _ => None def safeImpl[I, O](si: Expr[Schema[I]])(implicit q: Quotes): Expr[To[I, O]] = { From e55b1a1b65261a43e2f2017332d89570dd0879e7 Mon Sep 17 00:00:00 2001 From: Maxime Kjaer Date: Tue, 30 Mar 2021 23:36:21 +0200 Subject: [PATCH 28/56] Continue To.safe macro implementation --- .../scala-3/com/spotify/scio/schemas/To.scala | 54 ++++++++++++++----- 1 file changed, 41 insertions(+), 13 deletions(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index 49724caed6..7ac8e663ed 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -27,23 +27,52 @@ import scala.quoted._ object ToMacro { - def interpretSchema[T: Type](schemaExpr: Expr[Schema[T]])(using Quotes): Option[Schema[T]] = - schemaExpr match + def interpretSchema[T: Type](schemaExpr: Expr[Schema[T]])(using Quotes): Option[Schema[T]] = schemaExpr match { + case '{ Schema.stringSchema } => Some(Schema.stringSchema.asInstanceOf[Schema[T]]) + case '{ Schema.byteSchema } => Some(Schema.byteSchema.asInstanceOf[Schema[T]]) + case '{ Schema.bytesSchema } => Some(Schema.bytesSchema.asInstanceOf[Schema[T]]) + case '{ Schema.sortSchema } => Some(Schema.sortSchema.asInstanceOf[Schema[T]]) + case '{ Schema.intSchema } => Some(Schema.intSchema.asInstanceOf[Schema[T]]) + case '{ Schema.longSchema } => Some(Schema.longSchema.asInstanceOf[Schema[T]]) + case '{ Schema.floatSchema } => Some(Schema.floatSchema.asInstanceOf[Schema[T]]) + case '{ Schema.doubleSchema } => Some(Schema.doubleSchema.asInstanceOf[Schema[T]]) + case '{ Schema.bigDecimalSchema } => Some(Schema.bigDecimalSchema.asInstanceOf[Schema[T]]) + case '{ Schema.booleanSchema } => Some(Schema.booleanSchema.asInstanceOf[Schema[T]]) + case '{ Schema.optionSchema[t](using $tSchemaExpr) } => - for - tSchema <- interpretSchema(tSchemaExpr) - yield - Schema.optionSchema(using tSchema).asInstanceOf[Schema[T]] + for (tSchema <- interpretSchema(tSchemaExpr)) + yield Schema.optionSchema(using tSchema).asInstanceOf[Schema[T]] + case '{ Schema.mapSchema[k, v](using $keySchemaExpr, $valueSchemaExpr) } => - for + for { keySchema <- interpretSchema(keySchemaExpr) valueSchema <- interpretSchema(valueSchemaExpr) - yield Schema.mapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] + } yield Schema.mapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] + case _ => None + } + + def safeImpl[I, O](iSchema: Expr[Schema[I]], oSchema: Expr[Schema[O]])(implicit q: Quotes): Expr[To[I, O]] = { + import scala.quoted.quotes.reflect.report - def safeImpl[I, O](si: Expr[Schema[I]])(implicit q: Quotes): Expr[To[I, O]] = { - ??? + (interpretSchema(iSchema), interpretSchema(oSchema)) match { + case (None, None) => report.throwError( + s""" + |Could not interpret input schema: + | ${iSchema.show} + |Could not interpret output schema: + | ${oSchema.show} + |""".stripMargin + ) + case (None, _) => report.throwError("Could not interpret input schema: " + iSchema.show) + case (_, None) => report.throwError("Could not interpret output schema: " + oSchema.show) + case (Some(sIn), Some(sOut)) => + val schemaOut: BSchema = SchemaMaterializer.fieldType(sOut).getRowSchema() + val schemaIn: BSchema = SchemaMaterializer.fieldType(sIn).getRowSchema() + To.checkCompatibility(schemaIn, schemaOut)('{ To.unchecked[I, O] }) + .fold(message => report.throwError(message), identity) + } } } @@ -54,7 +83,6 @@ trait ToMacro { * at compile time. * @see To#unsafe */ - // TODO: scala3 - inline def safe[I, O](inline si: Schema[I], inline so: Schema[O]): To[I, O] = - ??? + inline def safe[I, O](inline iSchema: Schema[I], inline oSchema: Schema[O]): To[I, O] = + ${ ToMacro.safeImpl('iSchema, 'oSchema) } } From ccf83a81a9bc95bf00c9c0d791f704372853141b Mon Sep 17 00:00:00 2001 From: Maxime Kjaer Date: Tue, 30 Mar 2021 23:41:12 +0200 Subject: [PATCH 29/56] Update implicit parameter to Scala 3 syntax --- scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index 7ac8e663ed..f39ba3ddab 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -53,7 +53,7 @@ object ToMacro { } - def safeImpl[I, O](iSchema: Expr[Schema[I]], oSchema: Expr[Schema[O]])(implicit q: Quotes): Expr[To[I, O]] = { + def safeImpl[I, O](iSchema: Expr[Schema[I]], oSchema: Expr[Schema[O]])(using Quotes): Expr[To[I, O]] = { import scala.quoted.quotes.reflect.report (interpretSchema(iSchema), interpretSchema(oSchema)) match { From 55a9393756335e01be31637e5445205c4e63bcf2 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Fri, 19 Mar 2021 10:45:34 +0100 Subject: [PATCH 30/56] Remove withDottyCompath from scio-macros. Add withDottyCompat to overrides scio-macros depends only on java libs and scalatest. Scalatest is now available as a 3.0.0 lib as well, so withDottyCompat is not needed anymore. --- build.sbt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index ea81fb5a7e..9cea04e204 100644 --- a/build.sbt +++ b/build.sbt @@ -584,7 +584,8 @@ lazy val `scio-macros`: Project = project libraryDependencies ++= Seq( "com.esotericsoftware" % "kryo-shaded" % kryoVersion, "org.apache.beam" % "beam-sdks-java-extensions-sql" % beamVersion, - "org.apache.avro" % "avro" % avroVersion + "org.apache.avro" % "avro" % avroVersion, + "org.scalatest" %% "scalatest" % scalatestVersion % Test ), // Scala 2 dependencies libraryDependencies ++= { From b40e6268fec96fb7478629f5049db0f7d454bcfe Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Thu, 1 Apr 2021 15:46:59 +0200 Subject: [PATCH 31/56] Fix compilatin issue --- scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index f39ba3ddab..492e899c27 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -17,7 +17,6 @@ package com.spotify.scio.schemas -package com.spotify.scio.schemas.Schema import org.apache.beam.sdk.schemas.{SchemaCoder, Schema => BSchema} import scala.compiletime._ @@ -27,7 +26,7 @@ import scala.quoted._ object ToMacro { - def interpretSchema[T: Type](schemaExpr: Expr[Schema[T]])(using Quotes): Option[Schema[T]] = schemaExpr match { + def interpretSchema[T: scala.quoted.Type](schemaExpr: Expr[Schema[T]])(using Quotes): Option[Schema[T]] = schemaExpr match { case '{ Schema.stringSchema } => Some(Schema.stringSchema.asInstanceOf[Schema[T]]) case '{ Schema.byteSchema } => Some(Schema.byteSchema.asInstanceOf[Schema[T]]) case '{ Schema.bytesSchema } => Some(Schema.bytesSchema.asInstanceOf[Schema[T]]) From 7c4c7e44b7027db4a327f9f7a053e6c696e7e4a1 Mon Sep 17 00:00:00 2001 From: Maxime Kjaer Date: Fri, 2 Apr 2021 00:04:39 +0200 Subject: [PATCH 32/56] Make Scala 3 To.safe macro compile --- .../scala-3/com/spotify/scio/schemas/To.scala | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index 492e899c27..2c81e85adf 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -22,7 +22,7 @@ import org.apache.beam.sdk.schemas.{SchemaCoder, Schema => BSchema} import scala.compiletime._ import scala.deriving._ import scala.quoted._ - +import scala.reflect.ClassTag object ToMacro { @@ -52,8 +52,11 @@ object ToMacro { } - def safeImpl[I, O](iSchema: Expr[Schema[I]], oSchema: Expr[Schema[O]])(using Quotes): Expr[To[I, O]] = { - import scala.quoted.quotes.reflect.report + def safeImpl[I: scala.quoted.Type, O: scala.quoted.Type]( + iSchema: Expr[Schema[I]], + oSchema: Expr[Schema[O]] + )(using Quotes): Expr[To[I, O]] = { + import scala.quoted.quotes.reflect.{report, TypeRepr} (interpretSchema(iSchema), interpretSchema(oSchema)) match { case (None, None) => report.throwError( @@ -69,7 +72,12 @@ object ToMacro { case (Some(sIn), Some(sOut)) => val schemaOut: BSchema = SchemaMaterializer.fieldType(sOut).getRowSchema() val schemaIn: BSchema = SchemaMaterializer.fieldType(sIn).getRowSchema() - To.checkCompatibility(schemaIn, schemaOut)('{ To.unchecked[I, O] }) + val classTagOpt = Expr.summon[ClassTag[O]] + if (classTagOpt.isEmpty) { + report.throwError(s"Could not summon Expr[ClassTag[${TypeRepr.of[O].show}]]") + } + val classTag = classTagOpt.get + To.checkCompatibility(schemaIn, schemaOut)('{ To.unchecked[I, O](using $iSchema, $oSchema, $classTag) }) .fold(message => report.throwError(message), identity) } } From d90ed49c8ba0a92aeda4e312e128f1457ad16701 Mon Sep 17 00:00:00 2001 From: Maxime Kjaer Date: Wed, 7 Apr 2021 15:20:31 +0200 Subject: [PATCH 33/56] Make To.safe take implicit parameters in Scala 3 To match the Scala 2 signature --- scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index 2c81e85adf..3cc341cc07 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -90,6 +90,6 @@ trait ToMacro { * at compile time. * @see To#unsafe */ - inline def safe[I, O](inline iSchema: Schema[I], inline oSchema: Schema[O]): To[I, O] = + inline def safe[I, O](using inline iSchema: Schema[I], inline oSchema: Schema[O]): To[I, O] = ${ ToMacro.safeImpl('iSchema, 'oSchema) } } From 3a9c50652f52462077c5d5b627824684eac1ace9 Mon Sep 17 00:00:00 2001 From: Maxime Kjaer Date: Fri, 9 Apr 2021 10:48:17 +0200 Subject: [PATCH 34/56] Add To.safe test --- .../scala/com/spotify/scio/ToSafeSuite.scala | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala diff --git a/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala b/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala new file mode 100644 index 0000000000..dbf701f228 --- /dev/null +++ b/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala @@ -0,0 +1,17 @@ +package com.spotify.scio + +import com.spotify.scio.schemas.To + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +case class Source(b: Boolean) +case class Dest(b: Boolean) + +class ToSafeTest extends AnyFlatSpec with Matchers { + To.safe[Source, Dest].convert(Source(true)) + + // "To.safe" should "generate an unchecked conversion on compatible case class schemas" in { + // To.safe[Source, Dest].convert(Source(true)) shouldBe Dest(true) + // } +} From 7d87a585f62b1c441b60fadc1a671dca35350f24 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 12 Apr 2021 11:58:00 +0200 Subject: [PATCH 35/56] Relace expr-based schema interpreter with a reflection-based one --- .../scala-3/com/spotify/scio/schemas/To.scala | 77 ++++++++++++------- 1 file changed, 50 insertions(+), 27 deletions(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index 3cc341cc07..76030634de 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -18,6 +18,7 @@ package com.spotify.scio.schemas import org.apache.beam.sdk.schemas.{SchemaCoder, Schema => BSchema} +import BSchema.{ FieldType => BFieldType } import scala.compiletime._ import scala.deriving._ @@ -26,39 +27,13 @@ import scala.reflect.ClassTag object ToMacro { - def interpretSchema[T: scala.quoted.Type](schemaExpr: Expr[Schema[T]])(using Quotes): Option[Schema[T]] = schemaExpr match { - case '{ Schema.stringSchema } => Some(Schema.stringSchema.asInstanceOf[Schema[T]]) - case '{ Schema.byteSchema } => Some(Schema.byteSchema.asInstanceOf[Schema[T]]) - case '{ Schema.bytesSchema } => Some(Schema.bytesSchema.asInstanceOf[Schema[T]]) - case '{ Schema.sortSchema } => Some(Schema.sortSchema.asInstanceOf[Schema[T]]) - case '{ Schema.intSchema } => Some(Schema.intSchema.asInstanceOf[Schema[T]]) - case '{ Schema.longSchema } => Some(Schema.longSchema.asInstanceOf[Schema[T]]) - case '{ Schema.floatSchema } => Some(Schema.floatSchema.asInstanceOf[Schema[T]]) - case '{ Schema.doubleSchema } => Some(Schema.doubleSchema.asInstanceOf[Schema[T]]) - case '{ Schema.bigDecimalSchema } => Some(Schema.bigDecimalSchema.asInstanceOf[Schema[T]]) - case '{ Schema.booleanSchema } => Some(Schema.booleanSchema.asInstanceOf[Schema[T]]) - - case '{ Schema.optionSchema[t](using $tSchemaExpr) } => - for (tSchema <- interpretSchema(tSchemaExpr)) - yield Schema.optionSchema(using tSchema).asInstanceOf[Schema[T]] - - case '{ Schema.mapSchema[k, v](using $keySchemaExpr, $valueSchemaExpr) } => - for { - keySchema <- interpretSchema(keySchemaExpr) - valueSchema <- interpretSchema(valueSchemaExpr) - } yield Schema.mapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] - - case _ => None - } - - def safeImpl[I: scala.quoted.Type, O: scala.quoted.Type]( iSchema: Expr[Schema[I]], oSchema: Expr[Schema[O]] )(using Quotes): Expr[To[I, O]] = { import scala.quoted.quotes.reflect.{report, TypeRepr} - (interpretSchema(iSchema), interpretSchema(oSchema)) match { + (interpret[I] , interpret[O]) match { case (None, None) => report.throwError( s""" |Could not interpret input schema: @@ -81,6 +56,54 @@ object ToMacro { .fold(message => report.throwError(message), identity) } } + + private def sequence[T](ls: List[Option[T]]): Option[List[T]] = + if ls.exists(_.isEmpty) then None + else Some(ls.collect { case Some(x) => x }) + + private def interpret[T: scala.quoted.Type](using Quotes): Option[Schema[T]] = + Type.of[T] match { + case '[Boolean] => Some(Schema.jBooleanSchema.asInstanceOf[Schema[T]]) + case '[String] => Some(Schema.stringSchema.asInstanceOf[Schema[T]]) + case '[Byte] => Some(Schema.byteSchema.asInstanceOf[Schema[T]]) + case '[Array[Byte]]=> Some(Schema.bytesSchema.asInstanceOf[Schema[T]]) + case '[Short] => Some(Schema.sortSchema.asInstanceOf[Schema[T]]) + case '[Int] => Some(Schema.intSchema.asInstanceOf[Schema[T]]) + case '[Long] => Some(Schema.longSchema.asInstanceOf[Schema[T]]) + case '[Float] => Some(Schema.floatSchema.asInstanceOf[Schema[T]]) + case '[Double] => Some(Schema.doubleSchema.asInstanceOf[Schema[T]]) + case '[BigDecimal] => Some(Schema.bigDecimalSchema.asInstanceOf[Schema[T]]) + case '[List[u]] => + for (itemSchema <- interpret[u]) + yield Schema.listSchema(itemSchema).asInstanceOf[Schema[T]] + case '[Option[u]] => + for (tSchema <- interpret[u]) + yield Schema.optionSchema(using tSchema).asInstanceOf[Schema[T]] + case '[Map[k, v]] => + for { + keySchema <- interpret[k] + valueSchema <- interpret[v] + } yield Schema.mapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] + case _ => + import quotes.reflect._ + val tp = TypeRepr.of[T] + val caseClass: Symbol = tp.typeSymbol + val fields: List[Symbol] = caseClass.caseFields + + // if case class iterate and recurse, else sorry + if tp <:< TypeRepr.of[Product] && fields.nonEmpty then { + val schemasOpt: List[Option[(String, Schema[Any])]] = fields.map { (f: Symbol) => + assert(f.isValDef) + val fieldName = f.name + val fieldType: TypeRepr = tp.memberType(f) + fieldType.asType match { + // mattern match to create a bind <3 + case '[u] => interpret[u].asInstanceOf[Option[Schema[Any]]].map(s => (fieldName, s)) + } + } + sequence(schemasOpt).map(schemas => Record(schemas.toArray, null, null)) + } else None + } } trait ToMacro { From bb323dc0f575928b13ea196f8c61c4a987e0124d Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Fri, 16 Apr 2021 16:09:56 +0200 Subject: [PATCH 36/56] Add simple tests --- .../scala/com/spotify/scio/ToSafeSuite.scala | 25 ++++++++++++++++--- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala b/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala index dbf701f228..a2166c6a72 100644 --- a/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala +++ b/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala @@ -7,11 +7,28 @@ import org.scalatest.matchers.should.Matchers case class Source(b: Boolean) case class Dest(b: Boolean) +case class Mistake(b: Int) +case class Mistake2(c: Boolean) + +case class Sources(name: String, links: List[Array[Byte]]) +case class Destinations(name: String, links: List[Array[Byte]]) +case class DestinationsWrong(name: String, links: List[Array[Int]]) class ToSafeTest extends AnyFlatSpec with Matchers { - To.safe[Source, Dest].convert(Source(true)) + "To.safe" should "generate a conversion on compatible flat case class schemas" in { + To.safe[Source, Dest] + } + + "To.safe" should "fail on incompatible flat case class schemas" in { + "To.safe[Source, Mistake2]" shouldNot compile + "To.safe[Source, Mistake]" shouldNot compile + } + + "To.safe" should "generate a conversion on compatible nested case class schemas" in { + To.safe[Sources, Destinations] + } - // "To.safe" should "generate an unchecked conversion on compatible case class schemas" in { - // To.safe[Source, Dest].convert(Source(true)) shouldBe Dest(true) - // } + "To.safe" should "fail on incompatible nested case class schemas" in { + "To.safe[Sources, DestinationsWrong]" shouldNot compile + } } From 2556de817c13cc7f116380492a0db58c73f77b48 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Fri, 16 Apr 2021 16:11:06 +0200 Subject: [PATCH 37/56] Enable scio-core tests in ci --- .github/workflows/migration.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/migration.yml b/.github/workflows/migration.yml index 0b7decf899..1243f8f389 100644 --- a/.github/workflows/migration.yml +++ b/.github/workflows/migration.yml @@ -23,7 +23,7 @@ jobs: # - scio-extra/compile # - scio-extra/test - scio-core/compile - # - scio-core/test + - scio-core/test # - scio-examples/compile # - scio-examples/test # - scio-redis/compile From 91b6f0d230017a6a3c77121858802e9fb8c06a52 Mon Sep 17 00:00:00 2001 From: Maxime Kjaer Date: Mon, 19 Apr 2021 11:24:20 +0200 Subject: [PATCH 38/56] Use scalatest it in To.safe tests --- scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala b/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala index a2166c6a72..b62612b610 100644 --- a/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala +++ b/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala @@ -19,16 +19,16 @@ class ToSafeTest extends AnyFlatSpec with Matchers { To.safe[Source, Dest] } - "To.safe" should "fail on incompatible flat case class schemas" in { + it should "fail on incompatible flat case class schemas" in { "To.safe[Source, Mistake2]" shouldNot compile "To.safe[Source, Mistake]" shouldNot compile } - "To.safe" should "generate a conversion on compatible nested case class schemas" in { + it should "generate a conversion on compatible nested case class schemas" in { To.safe[Sources, Destinations] } - "To.safe" should "fail on incompatible nested case class schemas" in { + it should "fail on incompatible nested case class schemas" in { "To.safe[Sources, DestinationsWrong]" shouldNot compile } } From 78dfc9d92f40afc54a8d423416e103481a60858d Mon Sep 17 00:00:00 2001 From: Maxime Kjaer Date: Mon, 19 Apr 2021 12:12:06 +0200 Subject: [PATCH 39/56] Add missing cases to To.safe --- .../scala-3/com/spotify/scio/schemas/To.scala | 87 ++++++++++++++++--- 1 file changed, 73 insertions(+), 14 deletions(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index 76030634de..0da6f3863e 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -24,6 +24,7 @@ import scala.compiletime._ import scala.deriving._ import scala.quoted._ import scala.reflect.ClassTag +import scala.collection.mutable object ToMacro { @@ -63,27 +64,85 @@ object ToMacro { private def interpret[T: scala.quoted.Type](using Quotes): Option[Schema[T]] = Type.of[T] match { - case '[Boolean] => Some(Schema.jBooleanSchema.asInstanceOf[Schema[T]]) - case '[String] => Some(Schema.stringSchema.asInstanceOf[Schema[T]]) - case '[Byte] => Some(Schema.byteSchema.asInstanceOf[Schema[T]]) - case '[Array[Byte]]=> Some(Schema.bytesSchema.asInstanceOf[Schema[T]]) + case '[java.lang.Byte] => Some(Schema.jByteSchema.asInstanceOf[Schema[T]]) + case '[Array[java.lang.Byte]] => Some(Schema.jBytesSchema.asInstanceOf[Schema[T]]) + case '[java.lang.Short] => Some(Schema.jShortSchema.asInstanceOf[Schema[T]]) + case '[java.lang.Integer] => Some(Schema.jIntegerSchema.asInstanceOf[Schema[T]]) + case '[java.lang.Long] => Some(Schema.jLongSchema.asInstanceOf[Schema[T]]) + case '[java.lang.Float] => Some(Schema.jFloatSchema.asInstanceOf[Schema[T]]) + case '[java.lang.Double] => Some(Schema.jDoubleSchema.asInstanceOf[Schema[T]]) + case '[java.math.BigDecimal] => Some(Schema.jBigDecimalSchema.asInstanceOf[Schema[T]]) + case '[java.lang.Boolean] => Some(Schema.jBooleanSchema.asInstanceOf[Schema[T]]) + case '[java.util.List[u]] => + for (itemSchema) <- interpret[u] + yield Schema.jListSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[java.util.ArrayList[u]] => + for (itemSchema) <- interpret[u] + yield Schema.jArrayListSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[java.util.Map[k, v]] => + for { + keySchema <- interpret[k] + valueSchema <- interpret[v] + } yield Schema.jMapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] + // TODO javaBeanSchema + // TODO javaEnumSchema + case '[java.time.LocalDate] => Some(Schema.jLocalDate.asInstanceOf[Schema[T]]) + + case '[String] => Some(Schema.stringSchema.asInstanceOf[Schema[T]]) + case '[Byte] => Some(Schema.byteSchema.asInstanceOf[Schema[T]]) + case '[Array[Byte]] => Some(Schema.bytesSchema.asInstanceOf[Schema[T]]) case '[Short] => Some(Schema.sortSchema.asInstanceOf[Schema[T]]) - case '[Int] => Some(Schema.intSchema.asInstanceOf[Schema[T]]) - case '[Long] => Some(Schema.longSchema.asInstanceOf[Schema[T]]) - case '[Float] => Some(Schema.floatSchema.asInstanceOf[Schema[T]]) - case '[Double] => Some(Schema.doubleSchema.asInstanceOf[Schema[T]]) - case '[BigDecimal] => Some(Schema.bigDecimalSchema.asInstanceOf[Schema[T]]) - case '[List[u]] => - for (itemSchema <- interpret[u]) - yield Schema.listSchema(itemSchema).asInstanceOf[Schema[T]] + case '[Int] => Some(Schema.intSchema.asInstanceOf[Schema[T]]) + case '[Long] => Some(Schema.longSchema.asInstanceOf[Schema[T]]) + case '[Float] => Some(Schema.floatSchema.asInstanceOf[Schema[T]]) + case '[Double] => Some(Schema.doubleSchema.asInstanceOf[Schema[T]]) + case '[BigDecimal] => Some(Schema.bigDecimalSchema.asInstanceOf[Schema[T]]) + case '[Boolean] => Some(Schema.booleanSchema.asInstanceOf[Schema[T]]) case '[Option[u]] => - for (tSchema <- interpret[u]) - yield Schema.optionSchema(using tSchema).asInstanceOf[Schema[T]] + for (itemSchema <- interpret[u]) + yield Schema.optionSchema(using itemSchema).asInstanceOf[Schema[T]] + // TODO Array[T] + case '[List[u]] => + for (itemSchema <- interpret[u]) + yield Schema.listSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[Seq[u]] => + for (itemSchema <- interpret[u]) + yield Schema.seqSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[TraversableOnce[u]] => + for (itemSchema <- interpret[u]) + yield Schema.traversableOnceSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[Iterable[u]] => + for (itemSchema <- interpret[u]) + yield Schema.iterableSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[mutable.ArrayBuffer[u]] => + for (itemSchema <- interpret[u]) + yield Schema.arrayBufferSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[mutable.Buffer[u]] => + for (itemSchema <- interpret[u]) + yield Schema.bufferSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[Set[u]] => + for (itemSchema <- interpret[u]) + yield Schema.setSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[mutable.Set[u]] => + for (itemSchema <- interpret[u]) + yield Schema.mutableSetSchema(using itemSchema).asInstanceOf[Schema[T]] + // TODO SortedSet[T] + case '[mutable.ListBuffer[u]] => + for (itemSchema <- interpret[u]) + yield Schema.listBufferSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[Vector[u]] => + for (itemSchema <- interpret[u]) + yield Schema.vectorSchema(using itemSchema).asInstanceOf[Schema[T]] case '[Map[k, v]] => for { keySchema <- interpret[k] valueSchema <- interpret[v] } yield Schema.mapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] + case '[mutable.Map[k, v]] => + for { + keySchema <- interpret[k] + valueSchema <- interpret[v] + } yield Schema.mutableMapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] case _ => import quotes.reflect._ val tp = TypeRepr.of[T] From e227e8c193e32c802977679a14d0b77fc8321a01 Mon Sep 17 00:00:00 2001 From: Maxime Kjaer Date: Mon, 19 Apr 2021 12:50:18 +0200 Subject: [PATCH 40/56] Add tests for To.safe conversions of Java types --- .../scala/com/spotify/scio/ToSafeSuite.scala | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala b/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala index b62612b610..b575b12d57 100644 --- a/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala +++ b/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala @@ -5,6 +5,10 @@ import com.spotify.scio.schemas.To import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers +case class JavaListInt(l: java.util.List[java.lang.Integer]) +case class JavaListString(l: java.util.List[java.lang.String]) +case class ListInt(l: List[Int]) +case class JavaSource(b: java.lang.Boolean) case class Source(b: Boolean) case class Dest(b: Boolean) case class Mistake(b: Int) @@ -19,6 +23,21 @@ class ToSafeTest extends AnyFlatSpec with Matchers { To.safe[Source, Dest] } + it should "generate a conversion between java.lang.Boolean and Boolean" in { + To.safe[JavaSource, Source] + To.safe[Source, JavaSource] + } + + it should "generate a conversion between java.util.List[java.lang.Integer] and List[Int]" in { + To.safe[JavaListInt, ListInt] + To.safe[ListInt, JavaListInt] + } + + it should "fail on incompatible Java types" in { + "To.safe[JavaListString, JavaListInt]" shouldNot compile + "To.safe[JavaListString, ListInt]" shouldNot compile + } + it should "fail on incompatible flat case class schemas" in { "To.safe[Source, Mistake2]" shouldNot compile "To.safe[Source, Mistake]" shouldNot compile From fa5c956a4406f3cb0e2ca92a2294b14314e88677 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Tue, 27 Apr 2021 15:34:06 +0200 Subject: [PATCH 41/56] Prototype a compile time schema generation for java beans --- .../scala-3/com/spotify/scio/schemas/To.scala | 21 +++++++++++++++---- .../scala-3/com/spotify/scio/IsJava.scala | 2 +- 2 files changed, 18 insertions(+), 5 deletions(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index 0da6f3863e..a9858c9d60 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -25,6 +25,7 @@ import scala.deriving._ import scala.quoted._ import scala.reflect.ClassTag import scala.collection.mutable +import com.spotify.scio.IsJavaBean.checkGetterAndSetters object ToMacro { @@ -146,12 +147,11 @@ object ToMacro { case _ => import quotes.reflect._ val tp = TypeRepr.of[T] - val caseClass: Symbol = tp.typeSymbol - val fields: List[Symbol] = caseClass.caseFields + val tpSymbol: Symbol = tp.typeSymbol // if case class iterate and recurse, else sorry - if tp <:< TypeRepr.of[Product] && fields.nonEmpty then { - val schemasOpt: List[Option[(String, Schema[Any])]] = fields.map { (f: Symbol) => + if tp <:< TypeRepr.of[Product] && tpSymbol.caseFields.nonEmpty then { + val schemasOpt: List[Option[(String, Schema[Any])]] = tpSymbol.caseFields.map { (f: Symbol) => assert(f.isValDef) val fieldName = f.name val fieldType: TypeRepr = tp.memberType(f) @@ -161,6 +161,19 @@ object ToMacro { } } sequence(schemasOpt).map(schemas => Record(schemas.toArray, null, null)) + } else if tpSymbol.flags.is(Flags.JavaDefined) && scala.util.Try(checkGetterAndSetters(tpSymbol)).isSuccess then { + val schemasOpt = tpSymbol.declaredMethods.collect { + case s if s.name.toString.startsWith("get") && s.isDefDef=> + // AVOID .TREE ? It is already used in checkGetterAndSetter + val fieldName: String = s.name.toString.drop(3) + val fieldType: TypeRepr = s.tree.asInstanceOf[DefDef].returnTpt.tpe + + fieldType.asType match { + case '[u] => interpret[u].asInstanceOf[Option[Schema[Any]]].map(s => (fieldName, s)) + } + } + // RawRecord is used for JavaBeans, not Record + sequence(schemasOpt).map(schemas => Record(schemas.toArray, null, null)) } else None } } diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala index 6b475f7627..ff769003c2 100644 --- a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala +++ b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala @@ -26,7 +26,7 @@ sealed trait IsJavaBean[T] object IsJavaBean { - private def checkGetterAndSetters(using q: Quotes)(sym: q.reflect.Symbol): Unit = { + private[scio] def checkGetterAndSetters(using q: Quotes)(sym: q.reflect.Symbol): Unit = { import q.reflect._ val methods: List[Symbol] = sym.declaredMethods From 6658e8b406642e86ca6826e677a37c8d27a1eec9 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Tue, 27 Apr 2021 15:33:17 +0200 Subject: [PATCH 42/56] Simplify test --- .../src/test/scala/com/spotify/scio/ToSafeSuite.scala | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala b/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala index b575b12d57..10d1d13a55 100644 --- a/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala +++ b/scio-core/src/test/scala/com/spotify/scio/ToSafeSuite.scala @@ -50,4 +50,15 @@ class ToSafeTest extends AnyFlatSpec with Matchers { it should "fail on incompatible nested case class schemas" in { "To.safe[Sources, DestinationsWrong]" shouldNot compile } + + it should "work with java beans" in { + "To.safe[JavaBeanA, JavaBeanB]" shouldNot compile + "To.safe[JavaBeanB, JavaBeanA]" shouldNot compile + + "To.safe[JavaBeanB, JavaBeanC]" shouldNot compile + "To.safe[JavaBeanC, JavaBeanB]" shouldNot compile + + To.safe[JavaBeanA, JavaBeanC] + To.safe[JavaBeanC, JavaBeanA] + } } From afb37aa5167aac84195557465e26a6db3b5e097f Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 26 Apr 2021 15:47:57 +0200 Subject: [PATCH 43/56] Specify versions --- .github/workflows/migration.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/migration.yml b/.github/workflows/migration.yml index 1243f8f389..9295b21ab4 100644 --- a/.github/workflows/migration.yml +++ b/.github/workflows/migration.yml @@ -54,5 +54,7 @@ jobs: uses: coursier/cache-action@v5 - name: java 8 setup uses: olafurpg/setup-scala@v10 - - name: Compile + - name: Scala 2 + run: sbt "++2.13.5;${{ matrix.task }}" + - name: Scala 3 run: sbt "++3.0.0-RC2;${{ matrix.task }}" From 4aa8e5742e97a5fb86d3e8417e94c2f7ba9cb1c8 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Wed, 28 Apr 2021 07:39:40 +0200 Subject: [PATCH 44/56] Drop .tree usage in javabean schema derivation --- .../main/scala-3/com/spotify/scio/schemas/To.scala | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index a9858c9d60..30c0e5c3d4 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -164,12 +164,13 @@ object ToMacro { } else if tpSymbol.flags.is(Flags.JavaDefined) && scala.util.Try(checkGetterAndSetters(tpSymbol)).isSuccess then { val schemasOpt = tpSymbol.declaredMethods.collect { case s if s.name.toString.startsWith("get") && s.isDefDef=> - // AVOID .TREE ? It is already used in checkGetterAndSetter val fieldName: String = s.name.toString.drop(3) - val fieldType: TypeRepr = s.tree.asInstanceOf[DefDef].returnTpt.tpe - - fieldType.asType match { - case '[u] => interpret[u].asInstanceOf[Option[Schema[Any]]].map(s => (fieldName, s)) + val fieldType: TypeRepr = tp.memberType(s) + fieldType match { + case MethodType(_, _, returnTpt) => + returnTpt.asType match { + case '[u] => interpret[u].asInstanceOf[Option[Schema[Any]]].map(s => (fieldName, s)) + } } } // RawRecord is used for JavaBeans, not Record From 232995bf31aa5d5b5fb60a9036902e969af2e69d Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Wed, 28 Apr 2021 08:10:41 +0200 Subject: [PATCH 45/56] Drop usage of .tree in IsJavaBean macro --- .../scala-3/com/spotify/scio/schemas/To.scala | 2 +- .../scala-3/com/spotify/scio/IsJava.scala | 46 +++++++++---------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index 30c0e5c3d4..aee7ac9747 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -161,7 +161,7 @@ object ToMacro { } } sequence(schemasOpt).map(schemas => Record(schemas.toArray, null, null)) - } else if tpSymbol.flags.is(Flags.JavaDefined) && scala.util.Try(checkGetterAndSetters(tpSymbol)).isSuccess then { + } else if tpSymbol.flags.is(Flags.JavaDefined) && scala.util.Try(checkGetterAndSetters[T]).isSuccess then { val schemasOpt = tpSymbol.declaredMethods.collect { case s if s.name.toString.startsWith("get") && s.isDefDef=> val fieldName: String = s.name.toString.drop(3) diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala index ff769003c2..5246b43b65 100644 --- a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala +++ b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala @@ -26,30 +26,30 @@ sealed trait IsJavaBean[T] object IsJavaBean { - private[scio] def checkGetterAndSetters(using q: Quotes)(sym: q.reflect.Symbol): Unit = { - import q.reflect._ - val methods: List[Symbol] = sym.declaredMethods + private[scio] def checkGetterAndSetters[T: scala.quoted.Type](using Quotes): Unit = { + import quotes.reflect._ + val methods: List[Symbol] = TypeRepr.of[T].typeSymbol.declaredMethods - val getters = + val getters: List[(String, Symbol)] = methods.collect { - case s if s.name.toString.startsWith("get") => - (s.name.toString.drop(3), s.tree.asInstanceOf[DefDef]) + case s if s.name.toString.startsWith("get") && s.isDefDef => + (s.name.toString.drop(3), s) } - val setters = + val setters: Map[String, Symbol] = methods.collect { - case s if s.name.toString.startsWith("set") => - (s.name.toString.drop(3), s.tree.asInstanceOf[DefDef]) + case s if s.name.toString.startsWith("set") && s.isDefDef => + (s.name.toString.drop(3), s) }.toMap - if(getters.isEmpty) { + if (getters.isEmpty) then { val mess = - s"""Class ${sym.name} has not getter""" + s"""Class ${TypeRepr.of[T].typeSymbol.name} has not getter""" report.throwError(mess) } - getters.foreach { case (name, info) => - val setter: DefDef = + getters.foreach { case (name, getter) => + val setter: Symbol = setters // Map[String, DefDef] .get(name) .getOrElse { @@ -59,26 +59,26 @@ object IsJavaBean { report.throwError(mess) } - val resType: TypeRepr = info.returnTpt.tpe - setter.paramss.head match { - case TypeParamClause(params: List[TypeDef]) => report.throwError(s"JavaBean setter for field $name has type parameters") - case TermParamClause(head :: _) => - val tpe = head.tpt.tpe - if (resType != tpe) { + val getterType: TypeRepr = TypeRepr.of[T].memberType(getter) + val setterType: TypeRepr = TypeRepr.of[T].memberType(setter) + (getterType, setterType) match { + // MethodType(paramNames, paramTypes, returnType) + case (MethodType(_, Nil, getReturnType), MethodType(_, setReturnType :: Nil, _)) => + if getReturnType != setReturnType then { val mess = s"""JavaBean contained setter for field $name that had a mismatching type. - | found: $tpe - | expected: $resType""".stripMargin + | found: $setReturnType + | expected: $getReturnType""".stripMargin report.throwError(mess) } } } } + private def isJavaBeanImpl[T](using Quotes, Type[T]): Expr[IsJavaBean[T]] = { import quotes.reflect._ - val sym = TypeRepr.of[T].typeSymbol - if sym.flags.is(Flags.JavaDefined) then checkGetterAndSetters(sym) + if TypeRepr.of[T].typeSymbol.flags.is(Flags.JavaDefined) then checkGetterAndSetters[T] '{new IsJavaBean[T]{}} } From 47044fe188a8e7d510c453bde111e6fe3fe475b6 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Wed, 28 Apr 2021 08:18:50 +0200 Subject: [PATCH 46/56] Add JavaBeans --- .../scala/com/spotify/scio/JavaBeanA.java | 28 +++++++++++++++++++ .../scala/com/spotify/scio/JavaBeanB.java | 28 +++++++++++++++++++ .../scala/com/spotify/scio/JavaBeanC.java | 28 +++++++++++++++++++ 3 files changed, 84 insertions(+) create mode 100644 scio-core/src/test/scala/com/spotify/scio/JavaBeanA.java create mode 100644 scio-core/src/test/scala/com/spotify/scio/JavaBeanB.java create mode 100644 scio-core/src/test/scala/com/spotify/scio/JavaBeanC.java diff --git a/scio-core/src/test/scala/com/spotify/scio/JavaBeanA.java b/scio-core/src/test/scala/com/spotify/scio/JavaBeanA.java new file mode 100644 index 0000000000..4976dc920b --- /dev/null +++ b/scio-core/src/test/scala/com/spotify/scio/JavaBeanA.java @@ -0,0 +1,28 @@ +package com.spotify.scio; + +class JavaBeanA implements java.io.Serializable { + private String firstName = null; + private String lastName = null; + private int age = 0; + + public JavaBeanA() { + } + public String getFirstName(){ + return firstName; + } + public String getLastName(){ + return lastName; + } + public int getAge(){ + return age; + } + public void setFirstName(String firstName){ + this.firstName = firstName; + } + public void setLastName(String lastName){ + this.lastName = lastName; + } + public void setAge(int age){ + this.age = age; + } +} \ No newline at end of file diff --git a/scio-core/src/test/scala/com/spotify/scio/JavaBeanB.java b/scio-core/src/test/scala/com/spotify/scio/JavaBeanB.java new file mode 100644 index 0000000000..32da1cae96 --- /dev/null +++ b/scio-core/src/test/scala/com/spotify/scio/JavaBeanB.java @@ -0,0 +1,28 @@ +package com.spotify.scio; + +class JavaBeanB implements java.io.Serializable { + private String name = null; + private String uuid = null; + private int money = 0; + + public JavaBeanB() { + } + public String getName(){ + return name; + } + public String getUuid(){ + return uuid; + } + public int getMoney(){ + return money; + } + public void setName(String name){ + this.name = name; + } + public void setUuid(String uuid){ + this.uuid = uuid; + } + public void setMoney(int money){ + this.money = money; + } +} \ No newline at end of file diff --git a/scio-core/src/test/scala/com/spotify/scio/JavaBeanC.java b/scio-core/src/test/scala/com/spotify/scio/JavaBeanC.java new file mode 100644 index 0000000000..3c9949f4d6 --- /dev/null +++ b/scio-core/src/test/scala/com/spotify/scio/JavaBeanC.java @@ -0,0 +1,28 @@ +package com.spotify.scio; + +class JavaBeanC implements java.io.Serializable { + private String firstName = null; + private String lastName = null; + private int age = 0; + + public JavaBeanC() { + } + public String getFirstName(){ + return firstName; + } + public String getLastName(){ + return lastName; + } + public int getAge(){ + return age; + } + public void setFirstName(String firstName){ + this.firstName = firstName; + } + public void setLastName(String lastName){ + this.lastName = lastName; + } + public void setAge(int age){ + this.age = age; + } +} \ No newline at end of file From d4b9f8cd1c5e6d651323dee7cbe6d0c8c3c10c9d Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Wed, 28 Apr 2021 08:38:55 +0200 Subject: [PATCH 47/56] Disable Scala 2 test --- .github/workflows/migration.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/migration.yml b/.github/workflows/migration.yml index 9295b21ab4..17d28945eb 100644 --- a/.github/workflows/migration.yml +++ b/.github/workflows/migration.yml @@ -54,7 +54,7 @@ jobs: uses: coursier/cache-action@v5 - name: java 8 setup uses: olafurpg/setup-scala@v10 - - name: Scala 2 - run: sbt "++2.13.5;${{ matrix.task }}" +# - name: Scala 2 +# run: sbt "++2.13.5;${{ matrix.task }}" - name: Scala 3 run: sbt "++3.0.0-RC2;${{ matrix.task }}" From 75c4d4ee9915ed414d657704bc7417baa9e146e7 Mon Sep 17 00:00:00 2001 From: Maxime Kjaer Date: Wed, 5 May 2021 16:36:29 +0200 Subject: [PATCH 48/56] Reorder cases in To.safe interpreter More general types must be at the bottom --- .../scala-3/com/spotify/scio/schemas/To.scala | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala index aee7ac9747..db5e57b263 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/schemas/To.scala @@ -106,27 +106,18 @@ object ToMacro { case '[List[u]] => for (itemSchema <- interpret[u]) yield Schema.listSchema(using itemSchema).asInstanceOf[Schema[T]] - case '[Seq[u]] => - for (itemSchema <- interpret[u]) - yield Schema.seqSchema(using itemSchema).asInstanceOf[Schema[T]] - case '[TraversableOnce[u]] => - for (itemSchema <- interpret[u]) - yield Schema.traversableOnceSchema(using itemSchema).asInstanceOf[Schema[T]] - case '[Iterable[u]] => - for (itemSchema <- interpret[u]) - yield Schema.iterableSchema(using itemSchema).asInstanceOf[Schema[T]] case '[mutable.ArrayBuffer[u]] => for (itemSchema <- interpret[u]) yield Schema.arrayBufferSchema(using itemSchema).asInstanceOf[Schema[T]] case '[mutable.Buffer[u]] => for (itemSchema <- interpret[u]) yield Schema.bufferSchema(using itemSchema).asInstanceOf[Schema[T]] - case '[Set[u]] => - for (itemSchema <- interpret[u]) - yield Schema.setSchema(using itemSchema).asInstanceOf[Schema[T]] case '[mutable.Set[u]] => for (itemSchema <- interpret[u]) yield Schema.mutableSetSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[Set[u]] => + for (itemSchema <- interpret[u]) + yield Schema.setSchema(using itemSchema).asInstanceOf[Schema[T]] // TODO SortedSet[T] case '[mutable.ListBuffer[u]] => for (itemSchema <- interpret[u]) @@ -134,16 +125,25 @@ object ToMacro { case '[Vector[u]] => for (itemSchema <- interpret[u]) yield Schema.vectorSchema(using itemSchema).asInstanceOf[Schema[T]] - case '[Map[k, v]] => + case '[mutable.Map[k, v]] => for { keySchema <- interpret[k] valueSchema <- interpret[v] - } yield Schema.mapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] - case '[mutable.Map[k, v]] => + } yield Schema.mutableMapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] + case '[Map[k, v]] => for { keySchema <- interpret[k] valueSchema <- interpret[v] - } yield Schema.mutableMapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] + } yield Schema.mapSchema(using keySchema, valueSchema).asInstanceOf[Schema[T]] + case '[Seq[u]] => + for (itemSchema <- interpret[u]) + yield Schema.seqSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[TraversableOnce[u]] => + for (itemSchema <- interpret[u]) + yield Schema.traversableOnceSchema(using itemSchema).asInstanceOf[Schema[T]] + case '[Iterable[u]] => + for (itemSchema <- interpret[u]) + yield Schema.iterableSchema(using itemSchema).asInstanceOf[Schema[T]] case _ => import quotes.reflect._ val tp = TypeRepr.of[T] From c49e5e9bf89a428b57a146bae445073f6f0ab2be Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 7 Jun 2021 07:59:07 +0200 Subject: [PATCH 49/56] Update to Scala 3 --- build.sbt | 2 +- .../scala/com/spotify/scio/avro/AvroIO.scala | 2 +- .../avro/types/ConverterProviderSpec.scala | 41 +++++++++---------- .../kryo/JTraversableSerializer.scala | 2 +- .../hash/MutableScalableBloomFilter.scala | 2 +- .../schemas/instances/ScalaInstances.scala | 2 +- .../scio/transforms/ParallelismDoFns.scala | 2 +- 7 files changed, 26 insertions(+), 27 deletions(-) diff --git a/build.sbt b/build.sbt index 9cea04e204..4257112af2 100644 --- a/build.sbt +++ b/build.sbt @@ -25,7 +25,7 @@ import de.heikoseeberger.sbtheader.CommentCreator ThisBuild / turbo := true -val scala3Version = "3.0.0-RC2" +val scala3Version = "3.0.0" val algebirdVersion = "0.13.7" val algebraVersion = "2.2.2" val annoy4sVersion = "0.10.0" diff --git a/scio-avro/src/main/scala/com/spotify/scio/avro/AvroIO.scala b/scio-avro/src/main/scala/com/spotify/scio/avro/AvroIO.scala index 617b9da018..3459d98a1b 100644 --- a/scio-avro/src/main/scala/com/spotify/scio/avro/AvroIO.scala +++ b/scio-avro/src/main/scala/com/spotify/scio/avro/AvroIO.scala @@ -250,7 +250,7 @@ object AvroIO { private[avro] val DefaultMetadata: Map[String, AnyRef] = Map.empty } - final case class WriteParam private ( + final case class WriteParam private[avro] ( numShards: Int = WriteParam.DefaultNumShards, private val _suffix: String = WriteParam.DefaultSuffix, codec: CodecFactory = WriteParam.DefaultCodec, diff --git a/scio-avro/src/test/scala/com/spotify/scio/avro/types/ConverterProviderSpec.scala b/scio-avro/src/test/scala/com/spotify/scio/avro/types/ConverterProviderSpec.scala index 7295d15a22..7e00879e9b 100644 --- a/scio-avro/src/test/scala/com/spotify/scio/avro/types/ConverterProviderSpec.scala +++ b/scio-avro/src/test/scala/com/spotify/scio/avro/types/ConverterProviderSpec.scala @@ -20,7 +20,6 @@ package com.spotify.scio.avro.types import cats.Eq import cats.instances.all._ import com.google.protobuf.ByteString -import magnolify.cats.semiauto.EqDerivation import magnolify.scalacheck.auto._ import org.scalacheck._ import org.scalatest.propspec.AnyPropSpec @@ -41,22 +40,22 @@ class ConverterProviderSpec extends AnyPropSpec with ScalaCheckDrivenPropertyChe implicit val eqByteString: Eq[ByteString] = Eq.instance[ByteString](_ == _) property("round trip basic primitive types") { - forAll { r1: BasicFields => + forAll { (r1: BasicFields) => val r2 = AvroType.fromGenericRecord[BasicFields](AvroType.toGenericRecord[BasicFields](r1)) - EqDerivation[BasicFields].eqv(r1, r2) shouldBe true + EqGen.of[BasicFields].eqv(r1, r2) shouldBe true } } property("round trip optional primitive types") { - forAll { r1: OptionalFields => + forAll { (r1: OptionalFields) => val r2 = AvroType.fromGenericRecord[OptionalFields](AvroType.toGenericRecord[OptionalFields](r1)) - EqDerivation[OptionalFields].eqv(r1, r2) shouldBe true + EqGen.of[OptionalFields].eqv(r1, r2) shouldBe true } } property("skip null optional primitive types") { - forAll { o: OptionalFields => + forAll { (o: OptionalFields) => val r = AvroType.toGenericRecord[OptionalFields](o) // GenericRecord object should only contain a key if the corresponding Option[T] is defined o.boolF.isDefined shouldBe (r.get("boolF") != null) @@ -70,37 +69,37 @@ class ConverterProviderSpec extends AnyPropSpec with ScalaCheckDrivenPropertyChe } property("round trip primitive type arrays") { - forAll { r1: ArrayFields => + forAll { (r1: ArrayFields) => val r2 = AvroType.fromGenericRecord[ArrayFields](AvroType.toGenericRecord[ArrayFields](r1)) - EqDerivation[ArrayFields].eqv(r1, r2) shouldBe true + EqGen.of[ArrayFields].eqv(r1, r2) shouldBe true } } property("round trip primitive type maps") { - forAll { r1: MapFields => + forAll { (r1: MapFields) => val r2 = AvroType.fromGenericRecord[MapFields](AvroType.toGenericRecord[MapFields](r1)) - EqDerivation[MapFields].eqv(r1, r2) shouldBe true + EqGen.of[MapFields].eqv(r1, r2) shouldBe true } } property("round trip required nested types") { - forAll { r1: NestedFields => + forAll { (r1: NestedFields) => val r2 = AvroType.fromGenericRecord[NestedFields](AvroType.toGenericRecord[NestedFields](r1)) - EqDerivation[NestedFields].eqv(r1, r2) shouldBe true + EqGen.of[NestedFields].eqv(r1, r2) shouldBe true } } property("round trip optional nested types") { - forAll { r1: OptionalNestedFields => + forAll { (r1: OptionalNestedFields) => val r2 = AvroType.fromGenericRecord[OptionalNestedFields]( AvroType.toGenericRecord[OptionalNestedFields](r1) ) - EqDerivation[OptionalNestedFields].eqv(r1, r2) shouldBe true + EqGen.of[OptionalNestedFields].eqv(r1, r2) shouldBe true } } property("skip null optional nested types") { - forAll { o: OptionalNestedFields => + forAll { (o: OptionalNestedFields) => val r = AvroType.toGenericRecord[OptionalNestedFields](o) // TableRow object should only contain a key if the corresponding Option[T] is defined o.basic.isDefined shouldBe (r.get("basic") != null) @@ -111,28 +110,28 @@ class ConverterProviderSpec extends AnyPropSpec with ScalaCheckDrivenPropertyChe } property("round trip nested type arrays") { - forAll { r1: ArrayNestedFields => + forAll { (r1: ArrayNestedFields) => val r2 = AvroType.fromGenericRecord[ArrayNestedFields]( AvroType.toGenericRecord[ArrayNestedFields](r1) ) - EqDerivation[ArrayNestedFields].eqv(r1, r2) shouldBe true + EqGen.of[ArrayNestedFields].eqv(r1, r2) shouldBe true } } // FIXME: can't derive Eq for this // property("round trip nested type maps") { -// forAll { r1: MapNestedFields => +// forAll { (r1: MapNestedFields) => // val r2 = // AvroType.fromGenericRecord[MapNestedFields](AvroType.toGenericRecord[MapNestedFields](r1)) -// EqDerivation[MapNestedFields].eqv(r1, r2) shouldBe true +// EqGen.of[MapNestedFields].eqv(r1, r2) shouldBe true // } // } property("round trip byte array types") { - forAll { r1: ByteArrayFields => + forAll { (r1: ByteArrayFields) => val r2 = AvroType.fromGenericRecord[ByteArrayFields](AvroType.toGenericRecord[ByteArrayFields](r1)) - EqDerivation[ByteArrayFields].eqv(r1, r2) shouldBe true + EqGen.of[ByteArrayFields].eqv(r1, r2) shouldBe true } } } diff --git a/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/kryo/JTraversableSerializer.scala b/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/kryo/JTraversableSerializer.scala index e7fc5c1ef3..d89bbc7281 100644 --- a/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/kryo/JTraversableSerializer.scala +++ b/scio-core/src/main/scala-3/com/spotify/scio/coders/instances/kryo/JTraversableSerializer.scala @@ -73,7 +73,7 @@ abstract private[coders] class JWrapperCBF[T] extends Factory[T, Iterable[T]] { override def fromSpecific(it: IterableOnce[T]): Iterable[T] = { val b = new JIterableWrapperBuilder - it.foreach(b += _) + it.iterator.foreach(b += _) b.result() } diff --git a/scio-core/src/main/scala/com/spotify/scio/hash/MutableScalableBloomFilter.scala b/scio-core/src/main/scala/com/spotify/scio/hash/MutableScalableBloomFilter.scala index 38bcc67617..622f78447f 100644 --- a/scio-core/src/main/scala/com/spotify/scio/hash/MutableScalableBloomFilter.scala +++ b/scio-core/src/main/scala/com/spotify/scio/hash/MutableScalableBloomFilter.scala @@ -225,7 +225,7 @@ case class MutableScalableBloomFilter[T]( } def ++=(items: TraversableOnce[T]): MutableScalableBloomFilter[T] = { - items.foreach(i => this += i) // no bulk insert for guava BFs + items.iterator.foreach(i => this += i) // no bulk insert for guava BFs this } } diff --git a/scio-core/src/main/scala/com/spotify/scio/schemas/instances/ScalaInstances.scala b/scio-core/src/main/scala/com/spotify/scio/schemas/instances/ScalaInstances.scala index b86bc18e28..1be46954f6 100644 --- a/scio-core/src/main/scala/com/spotify/scio/schemas/instances/ScalaInstances.scala +++ b/scio-core/src/main/scala/com/spotify/scio/schemas/instances/ScalaInstances.scala @@ -69,7 +69,7 @@ trait ScalaInstances { ArrayType(s, _.asJava, _.asScala.toList) implicit def traversableOnceSchema[T](implicit s: Schema[T]): Schema[TraversableOnce[T]] = - ArrayType(s, _.toList.asJava, _.asScala.toList) + ArrayType(s, _.iterator.to(List).asJava, _.asScala.toList) implicit def iterableSchema[T](implicit s: Schema[T]): Schema[Iterable[T]] = ArrayType(s, _.toList.asJava, _.asScala.toList) diff --git a/scio-core/src/main/scala/com/spotify/scio/transforms/ParallelismDoFns.scala b/scio-core/src/main/scala/com/spotify/scio/transforms/ParallelismDoFns.scala index eb212ed164..13c0507845 100644 --- a/scio-core/src/main/scala/com/spotify/scio/transforms/ParallelismDoFns.scala +++ b/scio-core/src/main/scala/com/spotify/scio/transforms/ParallelismDoFns.scala @@ -50,7 +50,7 @@ class ParallelFlatMapFn[T, U](parallelism: Int)(f: T => TraversableOnce[U]) extends ParallelLimitedFn[T, U](parallelism: Int) { val g: T => TraversableOnce[U] = ClosureCleaner.clean(f) // defeat closure def parallelProcessElement(c: DoFn[T, U]#ProcessContext): Unit = { - val i = g(c.element()).toIterator + val i = g(c.element()).iterator while (i.hasNext) c.output(i.next()) } } From 93d192f46173aa4d564d38f3f394f7f4bc53c10f Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 7 Jun 2021 08:28:21 +0200 Subject: [PATCH 50/56] Fix IsJavaBean --- .../test/scala/com/spotify/scio/IsJavaTest.scala | 15 +++++++++++++++ .../main/scala-3/com/spotify/scio/IsJava.scala | 5 +++-- 2 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 scio-core/src/test/scala/com/spotify/scio/IsJavaTest.scala diff --git a/scio-core/src/test/scala/com/spotify/scio/IsJavaTest.scala b/scio-core/src/test/scala/com/spotify/scio/IsJavaTest.scala new file mode 100644 index 0000000000..8b0f53ac24 --- /dev/null +++ b/scio-core/src/test/scala/com/spotify/scio/IsJavaTest.scala @@ -0,0 +1,15 @@ +package com.spotify.scio + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class IsJavaTest extends AnyFlatSpec with Matchers { + "IsJavaBean" should "succeed for a java bean" in { + println(IsJavaBean[JavaBeanA]) + } + + it should "not compile for a case class" in { + case class Foo(s: String, i: Int) + "IsJavaBean[Foo]" shouldNot compile + } +} diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala index 5246b43b65..c90a3c08f3 100644 --- a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala +++ b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala @@ -78,8 +78,9 @@ object IsJavaBean { private def isJavaBeanImpl[T](using Quotes, Type[T]): Expr[IsJavaBean[T]] = { import quotes.reflect._ - if TypeRepr.of[T].typeSymbol.flags.is(Flags.JavaDefined) then checkGetterAndSetters[T] - '{new IsJavaBean[T]{}} + if TypeRepr.of[T].typeSymbol.flags.is(Flags.JavaDefined) && checkGetterAndSetters[T] then + '{new IsJavaBean[T]{}} + esle report.error(s"${summon[Type[T]].show} is not a Java Bean") } inline given isJavaBean[T]: IsJavaBean[T] = { From e08214480aa85dd5b61ac2fb729fe11350bc319e Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 7 Jun 2021 08:28:27 +0200 Subject: [PATCH 51/56] Update scalatest --- build.sbt | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/build.sbt b/build.sbt index 4257112af2..2cb6b8c86a 100644 --- a/build.sbt +++ b/build.sbt @@ -97,7 +97,7 @@ val protobufVersion = "3.15.8" val scalacheckVersion = "1.15.4" val scalaMacrosVersion = "2.1.1" val scalatestplusVersion = "3.1.0.0-RC2" -val scalatestVersion = "3.2.8" +val scalatestVersion = "3.2.9" val shapelessVersion = "2.3.4" val slf4jVersion = "1.7.30" val sparkeyVersion = "3.2.1" @@ -162,12 +162,12 @@ val commonSettings = Def Seq(Tests.Argument(TestFrameworks.ScalaTest, "-l", "org.scalatest.tags.Slow")) } }, - coverageExcludedPackages := (Seq( - "com\\.spotify\\.scio\\.examples\\..*", - "com\\.spotify\\.scio\\.repl\\..*", - "com\\.spotify\\.scio\\.util\\.MultiJoin" - ) ++ (2 to 10).map(x => s"com\\.spotify\\.scio\\.sql\\.Query${x}")).mkString(";"), - coverageHighlighting := true, + //coverageExcludedPackages := (Seq( + // "com\\.spotify\\.scio\\.examples\\..*", + // "com\\.spotify\\.scio\\.repl\\..*", + // "com\\.spotify\\.scio\\.util\\.MultiJoin" + //) ++ (2 to 10).map(x => s"com\\.spotify\\.scio\\.sql\\.Query${x}")).mkString(";"), + //coverageHighlighting := true, licenses := Seq("Apache 2" -> url("http://www.apache.org/licenses/LICENSE-2.0.txt")), homepage := Some(url("https://github.com/spotify/scio")), scmInfo := Some( From 997be919f81262a29c477dc064cd0aec2e0e176d Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 7 Jun 2021 08:28:34 +0200 Subject: [PATCH 52/56] Update sbt --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index f0be67b9f7..67d27a1dfe 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.5.1 +sbt.version=1.5.3 From 26f585721e4ae55080baafdd34232c686bba7387 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 7 Jun 2021 08:29:58 +0200 Subject: [PATCH 53/56] Disable scoverage plugin to compile in Scala 3 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 5ffb63776d..e86d18f395 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -3,7 +3,7 @@ addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.10") addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.4.3") addSbtPlugin("com.github.sbt" % "sbt-protobuf" % "0.7.0") addSbtPlugin("com.typesafe.sbt" % "sbt-ghpages" % "0.6.3") -addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.7.2") +//addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.7.2") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.4.0") addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.8.1") addSbtPlugin("com.eed3si9n" % "sbt-buildinfo" % "0.10.0") From 63f0dcc57312396463b9f914c7f3296af84c988d Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 7 Jun 2021 08:32:34 +0200 Subject: [PATCH 54/56] Migrate ci to 3.0.0 --- .github/workflows/migration.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/migration.yml b/.github/workflows/migration.yml index 17d28945eb..9e404d47fc 100644 --- a/.github/workflows/migration.yml +++ b/.github/workflows/migration.yml @@ -57,4 +57,4 @@ jobs: # - name: Scala 2 # run: sbt "++2.13.5;${{ matrix.task }}" - name: Scala 3 - run: sbt "++3.0.0-RC2;${{ matrix.task }}" + run: sbt "++3.0.0;${{ matrix.task }}" From 1004f2d43d4947a291b0ddfd7916640c775ffc65 Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 7 Jun 2021 10:55:15 +0200 Subject: [PATCH 55/56] Fix typo --- scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala index c90a3c08f3..f12a032618 100644 --- a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala +++ b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala @@ -17,6 +17,7 @@ package com.spotify.scio +import scala.util.{Try => STry} import scala.compiletime._ import scala.deriving._ import scala.quoted._ @@ -78,9 +79,9 @@ object IsJavaBean { private def isJavaBeanImpl[T](using Quotes, Type[T]): Expr[IsJavaBean[T]] = { import quotes.reflect._ - if TypeRepr.of[T].typeSymbol.flags.is(Flags.JavaDefined) && checkGetterAndSetters[T] then + if TypeRepr.of[T].typeSymbol.flags.is(Flags.JavaDefined) && STry(checkGetterAndSetters[T]).isSuccess then '{new IsJavaBean[T]{}} - esle report.error(s"${summon[Type[T]].show} is not a Java Bean") + else report.throwError("Not a Java Bean") } inline given isJavaBean[T]: IsJavaBean[T] = { From 1ec8f254d80c57b354643c6e5a436a3ff74640ea Mon Sep 17 00:00:00 2001 From: vincenzobaz Date: Mon, 7 Jun 2021 11:28:15 +0200 Subject: [PATCH 56/56] Use transparent to whitebox isjavabean --- scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala index f12a032618..bc86b1efa4 100644 --- a/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala +++ b/scio-macros/src/main/scala-3/com/spotify/scio/IsJava.scala @@ -81,10 +81,11 @@ object IsJavaBean { import quotes.reflect._ if TypeRepr.of[T].typeSymbol.flags.is(Flags.JavaDefined) && STry(checkGetterAndSetters[T]).isSuccess then '{new IsJavaBean[T]{}} - else report.throwError("Not a Java Bean") + else + report.throwError("Not a Java Bean") } - inline given isJavaBean[T]: IsJavaBean[T] = { + transparent inline given [T]: IsJavaBean[T] = { ${ isJavaBeanImpl[T] } }