Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
d1712fc
first commit
LuciferYang Jan 29, 2023
22d6c58
add connect-client-integration-tests to modules.py
LuciferYang Jan 29, 2023
309347b
remove assembly
LuciferYang Jan 29, 2023
839c4d3
more test
LuciferYang Jan 30, 2023
762dbe0
add test
LuciferYang Jan 30, 2023
aea7bb1
exlcue repl
LuciferYang Jan 30, 2023
615f0af
remove deps
LuciferYang Jan 30, 2023
abc4b85
with profile
LuciferYang Jan 30, 2023
f3ec10c
add empty line
LuciferYang Jan 30, 2023
5572dfb
use package
LuciferYang Jan 30, 2023
8fca8e7
add 213 teset
LuciferYang Jan 30, 2023
a427f4d
revert to use install
LuciferYang Jan 30, 2023
3f772ac
Merge branch 'apache:master' into jvm-it-2
LuciferYang Jan 30, 2023
e498d00
remove condition
LuciferYang Jan 30, 2023
589b1b3
rename
LuciferYang Jan 30, 2023
51f8cee
rename
LuciferYang Jan 30, 2023
e3b41ce
add scala-compiler test deps
LuciferYang Jan 30, 2023
878f057
fix sbt compile
LuciferYang Jan 30, 2023
0c9fee3
test scala -2.13
LuciferYang Jan 30, 2023
70b1e21
fix maven found jar
LuciferYang Jan 30, 2023
a331c72
find jar
LuciferYang Jan 30, 2023
e0f9ad5
revert to scala 2.12
LuciferYang Jan 30, 2023
0c512d5
use install
LuciferYang Jan 30, 2023
155e589
Merge branch 'upmaster' into jvm-it-2
LuciferYang Jan 31, 2023
f485ce4
fix location
LuciferYang Feb 1, 2023
a249f06
add ServicesResourceTransformer for maven
LuciferYang Feb 1, 2023
47cda99
move to connect.client
LuciferYang Feb 1, 2023
a41a806
Merge branch 'upmaster' into jvm-it-2
LuciferYang Feb 2, 2023
b1fc936
remove profile
LuciferYang Feb 2, 2023
50900c8
mima
LuciferYang Feb 2, 2023
406382e
Merge branch 'upmaster' into jvm-it-2
LuciferYang Feb 3, 2023
b342042
move mima test
LuciferYang Feb 3, 2023
7143c4e
ignore simple udf test
LuciferYang Feb 3, 2023
408c15a
mima version
LuciferYang Feb 3, 2023
790f31f
udpate sparkbuild
LuciferYang Feb 3, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
66 changes: 66 additions & 0 deletions .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ jobs:
\"java-11-17\": \"true\",
\"lint\" : \"true\",
\"k8s-integration-tests\" : \"true\",
\"connect-jvm-e2e-tests\" : \"true\",
}"
echo $precondition # For debugging
# Remove `\n` to avoid "Invalid format" error
Expand Down Expand Up @@ -689,6 +690,71 @@ jobs:
./build/mvn $MAVEN_CLI_OPTS -DskipTests -Pyarn -Pmesos -Pkubernetes -Pvolcano -Phive -Phive-thriftserver -Phadoop-cloud -Djava.version=${JAVA_VERSION/-ea} install
rm -rf ~/.m2/repository/org/apache/spark

connect-jvm-e2e-tests:
needs: precondition
if: fromJson(needs.precondition.outputs.required).connect-jvm-e2e-tests == 'true' &&
(inputs.branch != 'branch-3.2' && inputs.branch != 'branch-3.3')
name: Connect JVM Client E2E Tests with Maven
strategy:
fail-fast: false
matrix:
java:
- ${{ inputs.java }}
runs-on: ubuntu-22.04
steps:
- name: Checkout Spark repository
uses: actions/checkout@v3
with:
fetch-depth: 0
repository: apache/spark
ref: ${{ inputs.branch }}
- name: Sync the current branch with the latest in Apache Spark
if: github.repository != 'apache/spark'
run: |
git fetch https://github.com/$GITHUB_REPOSITORY.git ${GITHUB_REF#refs/heads/}
git -c user.name='Apache Spark Test Account' -c user.email='sparktestacc@gmail.com' merge --no-commit --progress --squash FETCH_HEAD
git -c user.name='Apache Spark Test Account' -c user.email='sparktestacc@gmail.com' commit -m "Merged commit" --allow-empty
- name: Cache Scala, SBT and Maven
uses: actions/cache@v3
with:
path: |
build/apache-maven-*
build/scala-*
build/*.jar
~/.sbt
key: build-${{ hashFiles('**/pom.xml', 'project/build.properties', 'build/mvn', 'build/sbt', 'build/sbt-launch-lib.bash', 'build/spark-build-info') }}
restore-keys: |
build-
- name: Cache Maven local repository
uses: actions/cache@v3
with:
path: ~/.m2/repository
key: java${{ matrix.java }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
java${{ matrix.java }}-maven-
- name: Install Java ${{ matrix.java }}
uses: actions/setup-java@v3
with:
distribution: temurin
java-version: ${{ matrix.java }}
- name: Build and Test with Maven
Copy link
Contributor Author

@LuciferYang LuciferYang Jan 30, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The reason for using maven for build and test is that in the current dependency configuration way, I don't know how SBT fetch assembly connect client jar

run: |
export SCALA_VERSION=2.12
if [[ "SCALA_PROFILE" == "scala2.13" ]] ; then
export SCALA_VERSION=2.13;
./dev/change-scala-version.sh 2.13
fi
export MAVEN_OPTS="-Xss64m -Xmx2g -XX:ReservedCodeCacheSize=1g -Dorg.slf4j.simpleLogger.defaultLogLevel=WARN"
export MAVEN_CLI_OPTS="--no-transfer-progress"
export JAVA_VERSION=${{ matrix.java }}
# It uses Maven's 'install' intentionally, see https://github.com/apache/spark/pull/26414.
./build/mvn $MAVEN_CLI_OPTS -DskipTests -Djava.version=${JAVA_VERSION/-ea} install -pl repl -am -Pscala-$SCALA_VERSION
./build/mvn $MAVEN_CLI_OPTS -DskipTests -Djava.version=${JAVA_VERSION/-ea} install -pl connector/connect/common -Pscala-$SCALA_VERSION
./build/mvn $MAVEN_CLI_OPTS -DskipTests -Djava.version=${JAVA_VERSION/-ea} install -pl connector/connect/server -Pscala-$SCALA_VERSION
./build/mvn $MAVEN_CLI_OPTS -DskipTests -Djava.version=${JAVA_VERSION/-ea} install -pl connector/connect/client/jvm -Pscala-$SCALA_VERSION
./build/mvn $MAVEN_CLI_OPTS -Djava.version=${JAVA_VERSION/-ea} clean test -pl connector/connect/client/jvm-e2e-tests -Pscala-$SCALA_VERSION
rm -rf ~/.m2/repository/org/apache/spark

scala-213:
needs: precondition
if: fromJson(needs.precondition.outputs.required).scala-213 == 'true'
Expand Down
69 changes: 69 additions & 0 deletions connector/connect/client/jvm-e2e-tests/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Licensed to the Apache Software Foundation (ASF) under one or more
~ contributor license agreements. See the NOTICE file distributed with
~ this work for additional information regarding copyright ownership.
~ The ASF licenses this file to You under the Apache License, Version 2.0
~ (the "License"); you may not use this file except in compliance with
~ the License. You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.apache.spark</groupId>
<artifactId>spark-parent_2.12</artifactId>
<version>3.5.0-SNAPSHOT</version>
<relativePath>../../../../pom.xml</relativePath>
</parent>

<artifactId>spark-connect-client-jvm-e2e-tests_2.12</artifactId>
<packaging>jar</packaging>
<name>Spark Project Connect Client E2E Tests</name>
<url>https://spark.apache.org/</url>
<properties>
<sbt.project.name>connect-client-jvm-e2e-tests</sbt.project.name>
<guava.version>31.0.1-jre</guava.version>
<mima.version>1.1.0</mima.version>
</properties>

<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-connect-client-jvm_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-connect-common_${scala.binary.version}</artifactId>
<scope>test</scope>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${scala.version}</version>
<scope>test</scope>
</dependency>
<!-- Use mima to perform the compatibility check -->
<dependency>
<groupId>com.typesafe</groupId>
<artifactId>mima-core_${scala.binary.version}</artifactId>
<version>${mima.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
</build>
</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

# Set everything to be logged to the file target/unit-tests.log
rootLogger.level = info
rootLogger.appenderRef.file.ref = ${sys:test.appender:-File}

appender.file.type = File
appender.file.name = File
appender.file.fileName = target/unit-tests.log
appender.file.layout.type = PatternLayout
appender.file.layout.pattern = %d{yy/MM/dd HH:mm:ss.SSS} %t %p %c{1}: %m%n%ex

# Tests that launch java subprocesses can set the "test.appender" system property to
# "console" to avoid having the child process's logs overwrite the unit test's
# log file.
appender.console.type = Console
appender.console.name = console
appender.console.target = SYSTEM_ERR
appender.console.layout.type = PatternLayout
appender.console.layout.pattern = %t: %m%n%ex

# Ignore messages below warning level from Jetty, because it's a bit verbose
logger.jetty.name = org.sparkproject.jetty
logger.jetty.level = warn
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ class ClientE2ETestSuite extends RemoteSparkSession {
assert(array(2).getLong(0) == 2)
}

test("simple udf test") {
ignore("simple udf test") {

def dummyUdf(x: Int): Int = x + 5
val myUdf = udf(dummyUdf _)
Expand Down
8 changes: 0 additions & 8 deletions connector/connect/client/jvm/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
<properties>
<sbt.project.name>connect-client-jvm</sbt.project.name>
<guava.version>31.0.1-jre</guava.version>
<mima.version>1.1.0</mima.version>
</properties>

<dependencies>
Expand Down Expand Up @@ -93,13 +92,6 @@
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<!-- Use mima to perform the compatibility check -->
<dependency>
<groupId>com.typesafe</groupId>
<artifactId>mima-core_${scala.binary.version}</artifactId>
<version>${mima.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
Expand Down
1 change: 1 addition & 0 deletions dev/sparktestsupport/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,7 @@ def __hash__(self):
sbt_test_goals=[
"connect/test",
"connect-client-jvm/test",
"connect-client-jvm-e2e-tests/test",
],
)

Expand Down
1 change: 1 addition & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@
<module>connector/connect/server</module>
<module>connector/connect/common</module>
<module>connector/connect/client/jvm</module>
<module>connector/connect/client/jvm-e2e-tests</module>
<module>connector/protobuf</module>
<!-- See additional modules enabled by profiles below -->
</modules>
Expand Down
36 changes: 31 additions & 5 deletions project/SparkBuild.scala
Original file line number Diff line number Diff line change
Expand Up @@ -55,13 +55,14 @@ object BuildCommons {
val connectCommon = ProjectRef(buildLocation, "connect-common")
val connect = ProjectRef(buildLocation, "connect")
val connectClient = ProjectRef(buildLocation, "connect-client-jvm")
val connectClientE2ETests = ProjectRef(buildLocation, "connect-client-jvm-e2e-tests")

val allProjects@Seq(
core, graphx, mllib, mllibLocal, repl, networkCommon, networkShuffle, launcher, unsafe, tags, sketch, kvstore, _*
) = Seq(
"core", "graphx", "mllib", "mllib-local", "repl", "network-common", "network-shuffle", "launcher", "unsafe",
"tags", "sketch", "kvstore"
).map(ProjectRef(buildLocation, _)) ++ sqlProjects ++ streamingProjects ++ Seq(connectCommon, connect, connectClient)
).map(ProjectRef(buildLocation, _)) ++ sqlProjects ++ streamingProjects ++ Seq(connectCommon, connect, connectClient, connectClientE2ETests)

val optionallyEnabledProjects@Seq(kubernetes, mesos, yarn,
sparkGangliaLgpl, streamingKinesisAsl,
Expand Down Expand Up @@ -403,7 +404,8 @@ object SparkBuild extends PomBuild {
val mimaProjects = allProjects.filterNot { x =>
Seq(
spark, hive, hiveThriftServer, repl, networkCommon, networkShuffle, networkYarn,
unsafe, tags, tokenProviderKafka010, sqlKafka010, connectCommon, connect, connectClient, protobuf
unsafe, tags, tokenProviderKafka010, sqlKafka010, connectCommon, connect, connectClient,
connectClientE2ETests, protobuf
).contains(x)
}

Expand Down Expand Up @@ -447,6 +449,7 @@ object SparkBuild extends PomBuild {
enable(SparkConnectCommon.settings)(connectCommon)
enable(SparkConnect.settings)(connect)
enable(SparkConnectClient.settings)(connectClient)
enable(SparkConnectClientE2ETests.settings)(connectClientE2ETests)

/* Protobuf settings */
enable(SparkProtobuf.settings)(protobuf)
Expand Down Expand Up @@ -851,9 +854,6 @@ object SparkConnectClient {
)
},

// Make sure the connect server assembly jar is available for testing.
test := ((Test / test) dependsOn (LocalProject("connect") / assembly)).value,

(assembly / test) := { },

(assembly / logLevel) := Level.Info,
Expand Down Expand Up @@ -895,6 +895,32 @@ object SparkConnectClient {
}
}

object SparkConnectClientE2ETests {
val buildTestDeps = TaskKey[Unit]("buildTestDeps", "Build needed dependencies for test.")
lazy val settings = Seq(
buildTestDeps := {
(LocalProject("sql") / Compile / Keys.`package`).value
(LocalProject("connect") / assembly).value
(LocalProject("connect-client-jvm") / assembly).value
},
test := ((Test / test) dependsOn (buildTestDeps)).value,
// Make sure the connect server assembly jar is available for testing.
test := ((Test / test) dependsOn (LocalProject("connect") / assembly)).value,
libraryDependencies ++= {
val guavaVersion =
SbtPomKeys.effectivePom.value.getProperties.get("guava.version").asInstanceOf[String]
Seq("com.google.guava" % "guava" % guavaVersion)
},
dependencyOverrides ++= {
val guavaVersion =
SbtPomKeys.effectivePom.value.getProperties.get("guava.version").asInstanceOf[String]
Seq(
"com.google.guava" % "guava" % guavaVersion
)
}
)
}

object SparkProtobuf {
import BuildCommons.protoVersion

Expand Down