Skip to content

Commit 0f7f916

Browse files
felipepessotoscottsand-db
authored andcommitted
[Spark] Change Scala version to match Spark 3.4
- [X] Spark - [ ] Standalone - [ ] Flink - [ ] Kernel - [ ] Other (fill in here) Matches Scala version of Spark 3.4: https://github.com/apache/spark/blob/59fcecb5a59df54ecb3c675d4f3722fc72c1466e/pom.xml#L171 https://github.com/scala/scala/releases/tag/v2.12.16 https://github.com/scala/scala/releases/tag/v2.12.17 https://github.com/scala/scala/releases/tag/v2.13.6 https://github.com/scala/scala/releases/tag/v2.13.7 https://github.com/scala/scala/releases/tag/v2.13.8 Fix delta-io#1909 Changes Scala version, which should be compatible: "As usual for our minor releases, Scala 2.12.17 is binary-compatible with the whole Scala 2.12 series." "As usual for our minor releases, Scala 2.13.8 is binary-compatible with the whole Scala 2.13 series." Closes delta-io#1936 Signed-off-by: Allison Portis <allison.portis@databricks.com> GitOrigin-RevId: 232abd3a2f7f8d7395e1cdeb21baecea096f15a6
1 parent bd345f7 commit 0f7f916

File tree

11 files changed

+29
-18
lines changed

11 files changed

+29
-18
lines changed

.github/workflows/connectors_test.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ jobs:
77
strategy:
88
matrix:
99
# These Scala versions must match those in the build.sbt
10-
scala: [2.13.5, 2.12.15]
10+
scala: [2.13.8, 2.12.17]
1111
steps:
1212
- uses: actions/checkout@v2
1313
- name: install java

.github/workflows/kernel_test.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ jobs:
44
test:
55
runs-on: ubuntu-20.04
66
env:
7-
SCALA_VERSION: 2.12.15
7+
SCALA_VERSION: 2.12.17
88
steps:
99
- uses: actions/checkout@v3
1010
- name: install java

.github/workflows/spark_test.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ jobs:
66
strategy:
77
matrix:
88
# These Scala versions must match those in the build.sbt
9-
scala: [2.12.15, 2.13.5]
9+
scala: [2.12.17, 2.13.8]
1010
env:
1111
SCALA_VERSION: ${{ matrix.scala }}
1212
steps:

benchmarks/build.sbt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
*/
1616

1717
name := "benchmarks"
18-
scalaVersion := "2.12.15"
18+
scalaVersion := "2.12.17"
1919

2020
lazy val root = (project in file("."))
2121
.settings(

build.sbt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,15 +21,15 @@ import Mima._
2121
import Unidoc._
2222

2323
// Scala versions
24-
val scala212 = "2.12.15"
25-
val scala213 = "2.13.5"
24+
val scala212 = "2.12.17"
25+
val scala213 = "2.13.8"
2626
val all_scala_versions = Seq(scala212, scala213)
2727

2828
// Due to how publishArtifact is determined for javaOnlyReleaseSettings, incl. storage
2929
// It was necessary to change default_scala_version to scala213 in build.sbt
3030
// to build the project with Scala 2.13 only
3131
// As a setting, it's possible to set it on command line easily
32-
// sbt 'set default_scala_version := 2.13.5' [commands]
32+
// sbt 'set default_scala_version := 2.13.8' [commands]
3333
// FIXME Why not use scalaVersion?
3434
val default_scala_version = settingKey[String]("Default Scala version")
3535
Global / default_scala_version := scala212

connectors/.github/workflows/test.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ jobs:
66
runs-on: ubuntu-20.04
77
strategy:
88
matrix:
9-
scala: [2.13.8, 2.12.8, 2.11.12]
9+
scala: [2.13.8, 2.12.17, 2.11.12]
1010
steps:
1111
- uses: actions/checkout@v2
1212
- name: install java

connectors/examples/build.sbt

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,11 @@ name := "examples"
1818
organization := "com.examples"
1919
organizationName := "examples"
2020

21-
scalaVersion := "2.12.8"
21+
scalaVersion := "2.12.17"
2222
version := "0.1.0"
2323

2424
lazy val commonSettings = Seq(
25-
crossScalaVersions := Seq("2.13.8", "2.12.8", "2.11.12"),
25+
crossScalaVersions := Seq("2.13.8", "2.12.17", "2.11.12"),
2626
resolvers += Resolver.mavenLocal,
2727
libraryDependencies ++= Seq(
2828
"io.delta" %% "delta-standalone" % getStandaloneVersion(),
@@ -47,14 +47,14 @@ lazy val extraMavenRepo = sys.env.get("EXTRA_MAVEN_REPO").toSeq.map { repo =>
4747

4848
lazy val convertToDelta = (project in file("convert-to-delta")) settings (
4949
name := "convert",
50-
scalaVersion := "2.12.8",
50+
scalaVersion := "2.12.17",
5151
commonSettings,
5252
extraMavenRepo
5353
)
5454

5555
lazy val helloWorld = (project in file("hello-world")) settings (
5656
name := "hello",
57-
scalaVersion := "2.12.8",
57+
scalaVersion := "2.12.17",
5858
commonSettings,
5959
extraMavenRepo
6060
)
@@ -63,7 +63,7 @@ val flinkVersion = "1.16.1"
6363
val flinkHadoopVersion = "3.1.0"
6464
lazy val flinkExample = (project in file("flink-example")) settings (
6565
name := "flink",
66-
scalaVersion := "2.12.8",
66+
scalaVersion := "2.12.17",
6767
commonSettings,
6868
extraMavenRepo,
6969
resolvers += Resolver.mavenLocal,

connectors/examples/run_examples.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,7 @@ def __exit__(self, tpe, value, traceback):
108108
by running the following commands in the root connectors folder.
109109
110110
build/sbt '++2.11.12 publishM2'
111-
build/sbt '++2.12.8 publishM2'
111+
build/sbt '++2.12.17 publishM2'
112112
build/sbt '++2.13.8 publishM2'
113113
"""
114114

@@ -146,5 +146,5 @@ def __exit__(self, tpe, value, traceback):
146146
run_maven_proj(path.join(root_dir, dir), className, args.version, args.maven_repo, "2.13")
147147

148148
run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.11.12")
149-
run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.12.8")
149+
run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.12.17")
150150
run_sbt_proj(root_dir, proj, className, args.version, args.maven_repo, "2.13.8")

examples/scala/build.sbt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ name := "example"
1818
organization := "com.example"
1919
organizationName := "example"
2020

21-
val scala212 = "2.12.15"
21+
val scala212 = "2.12.17"
2222
val scala213 = "2.13.8"
2323
val deltaVersion = "2.1.0"
2424

project/plugins.sbt

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,18 @@ addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.8.3")
3030

3131
addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "3.9.15")
3232

33-
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.0")
33+
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "2.0.6")
34+
//Upgrade sbt-scoverage to 2.0.3+ because 2.0.0 is not compatible to Scala 2.12.17:
35+
//sbt.librarymanagement.ResolveException: Error downloading org.scoverage:scalac-scoverage-plugin_2.12.17:2.0.0
36+
37+
//It caused a conflict issue:
38+
//[error] java.lang.RuntimeException: found version conflict(s) in library dependencies; some are suspected to be binary incompatible:
39+
//[error]
40+
//[error] * org.scala-lang.modules:scala-xml_2.12:2.1.0 (early-semver) is selected over 1.0.6
41+
//[error] +- org.scoverage:scalac-scoverage-reporter_2.12:2.0.7 (depends on 2.1.0)
42+
//[error] +- org.scalariform:scalariform_2.12:0.2.0 (depends on 1.0.6)
43+
//The following fix the conflict:
44+
libraryDependencySchemes += "org.scala-lang.modules" %% "scala-xml" % VersionScheme.Always % "test"
3445

3546
addSbtPlugin("net.aichler" % "sbt-jupiter-interface" % "0.9.1")
3647

run-tests.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ def run_sbt_tests(root_dir, test_group, coverage, scala_version=None):
6767
cmd += ["+ %s" % test_cmd] # build/sbt ... "+ project/test" ...
6868
else:
6969
# when no scala version is specified, run test with only the specified scala version
70-
cmd += ["++ %s" % scala_version, test_cmd] # build/sbt ... "++ 2.13.5" "project/test" ...
70+
cmd += ["++ %s" % scala_version, test_cmd] # build/sbt ... "++ 2.13.8" "project/test" ...
7171

7272
if is_running_spark_tests:
7373
cmd += ["unidoc"]

0 commit comments

Comments
 (0)