From af2bdeedfdcf2bdfab64c2b9f231ddafe6c40dbd Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Sun, 26 Feb 2023 17:57:09 -0800 Subject: [PATCH 1/2] [SPARK-42587][CONNECT][TESTS] Use wrapper versions for SBT and Maven in module tests --- .../spark/sql/connect/client/CompatibilitySuite.scala | 6 +++--- .../sql/connect/client/util/IntegrationTestUtils.scala | 7 ++++--- .../spark/sql/connect/client/util/RemoteSparkSession.scala | 2 +- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala index ad91accd5da7f..59655c45996f5 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala @@ -39,14 +39,14 @@ import org.apache.spark.sql.connect.client.util.IntegrationTestUtils._ * spark-sql * spark-connect-client-jvm * }}} - * To build the above artifact, use e.g. `sbt package` or `mvn clean install -DskipTests`. + * To build the above artifact, use e.g. `build/sbt package` or `build/mvn clean install -DskipTests`. * * When debugging this test, if any changes to the client API, the client jar need to be built * before running the test. An example workflow with SBT for this test: * 1. Compatibility test has reported an unexpected client API change. * 1. Fix the wrong client API. - * 1. Build the client jar: `sbt package` - * 1. Run the test again: `sbt "testOnly + * 1. Build the client jar: `build/sbt package` + * 1. Run the test again: `build/sbt "testOnly * org.apache.spark.sql.connect.client.CompatibilitySuite"` */ class CompatibilitySuite extends ConnectFunSuite { diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala index 2725422c29924..6c465c83b080d 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala @@ -40,8 +40,9 @@ object IntegrationTestUtils { private[connect] def debug(error: Throwable): Unit = if (isDebug) error.printStackTrace() /** - * Find a jar in the Spark project artifacts. It requires a build first (e.g. sbt package, mvn - * clean install -DskipTests) so that this method can find the jar in the target folders. + * Find a jar in the Spark project artifacts. It requires a build first (e.g. build/sbt package, + * build/mvn clean install -DskipTests) so that this method can find the jar in the target + * folders. * * @return * the jar @@ -52,7 +53,7 @@ object IntegrationTestUtils { targetDir.exists(), s"Fail to locate the target folder: '${targetDir.getCanonicalPath}'. " + s"SPARK_HOME='${new File(sparkHome).getCanonicalPath}'. " + - "Make sure the spark project jars has been built (e.g. using sbt package)" + + "Make sure the spark project jars has been built (e.g. using build/sbt package)" + "and the env variable `SPARK_HOME` is set correctly.") val jars = recursiveListFiles(targetDir).filter { f => // SBT jar diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala index 2d8d9b02d4fed..9fe53c1e3ec97 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala @@ -32,7 +32,7 @@ import org.apache.spark.util.Utils /** * An util class to start a local spark connect server in a different process for local E2E tests. - * Pre-running the tests, the spark connect artifact needs to be built using e.g. `sbt package`. + * Pre-running the tests, the spark connect artifact needs to be built using e.g. `build/sbt package`. * It is designed to start the server once but shared by all tests. It is equivalent to use the * following command to start the connect server via command line: * From 958cd47f6c4d60403790a2f53e6658044e427d2d Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Sun, 26 Feb 2023 18:06:18 -0800 Subject: [PATCH 2/2] Fix scalastyle --- .../apache/spark/sql/connect/client/CompatibilitySuite.scala | 3 ++- .../spark/sql/connect/client/util/RemoteSparkSession.scala | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala index 59655c45996f5..35cecaa20d75d 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala @@ -39,7 +39,8 @@ import org.apache.spark.sql.connect.client.util.IntegrationTestUtils._ * spark-sql * spark-connect-client-jvm * }}} - * To build the above artifact, use e.g. `build/sbt package` or `build/mvn clean install -DskipTests`. + * To build the above artifact, use e.g. `build/sbt package` or + * `build/mvn clean install -DskipTests`. * * When debugging this test, if any changes to the client API, the client jar need to be built * before running the test. An example workflow with SBT for this test: diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala index 9fe53c1e3ec97..8f91ad31764bd 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala @@ -32,7 +32,8 @@ import org.apache.spark.util.Utils /** * An util class to start a local spark connect server in a different process for local E2E tests. - * Pre-running the tests, the spark connect artifact needs to be built using e.g. `build/sbt package`. + * Pre-running the tests, the spark connect artifact needs to be built using e.g. + * `build/sbt package`. * It is designed to start the server once but shared by all tests. It is equivalent to use the * following command to start the connect server via command line: *