diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala index ad91accd5da7f..35cecaa20d75d 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala @@ -39,14 +39,15 @@ import org.apache.spark.sql.connect.client.util.IntegrationTestUtils._ * spark-sql * spark-connect-client-jvm * }}} - * To build the above artifact, use e.g. `sbt package` or `mvn clean install -DskipTests`. + * To build the above artifact, use e.g. `build/sbt package` or + * `build/mvn clean install -DskipTests`. * * When debugging this test, if any changes to the client API, the client jar need to be built * before running the test. An example workflow with SBT for this test: * 1. Compatibility test has reported an unexpected client API change. * 1. Fix the wrong client API. - * 1. Build the client jar: `sbt package` - * 1. Run the test again: `sbt "testOnly + * 1. Build the client jar: `build/sbt package` + * 1. Run the test again: `build/sbt "testOnly * org.apache.spark.sql.connect.client.CompatibilitySuite"` */ class CompatibilitySuite extends ConnectFunSuite { diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala index 2725422c29924..6c465c83b080d 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/IntegrationTestUtils.scala @@ -40,8 +40,9 @@ object IntegrationTestUtils { private[connect] def debug(error: Throwable): Unit = if (isDebug) error.printStackTrace() /** - * Find a jar in the Spark project artifacts. It requires a build first (e.g. sbt package, mvn - * clean install -DskipTests) so that this method can find the jar in the target folders. + * Find a jar in the Spark project artifacts. It requires a build first (e.g. build/sbt package, + * build/mvn clean install -DskipTests) so that this method can find the jar in the target + * folders. * * @return * the jar @@ -52,7 +53,7 @@ object IntegrationTestUtils { targetDir.exists(), s"Fail to locate the target folder: '${targetDir.getCanonicalPath}'. " + s"SPARK_HOME='${new File(sparkHome).getCanonicalPath}'. " + - "Make sure the spark project jars has been built (e.g. using sbt package)" + + "Make sure the spark project jars has been built (e.g. using build/sbt package)" + "and the env variable `SPARK_HOME` is set correctly.") val jars = recursiveListFiles(targetDir).filter { f => // SBT jar diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala index 2d8d9b02d4fed..8f91ad31764bd 100644 --- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala +++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala @@ -32,7 +32,8 @@ import org.apache.spark.util.Utils /** * An util class to start a local spark connect server in a different process for local E2E tests. - * Pre-running the tests, the spark connect artifact needs to be built using e.g. `sbt package`. + * Pre-running the tests, the spark connect artifact needs to be built using e.g. + * `build/sbt package`. * It is designed to start the server once but shared by all tests. It is equivalent to use the * following command to start the connect server via command line: *