diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala index 5103aa8a207d..25df3339e62f 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala @@ -181,7 +181,7 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils { "--conf", s"spark.sql.test.version.index=$index", "--driver-java-options", s"-Dderby.system.home=${wareHousePath.getCanonicalPath}", tempPyFile.getCanonicalPath) - runSparkSubmit(args, Some(sparkHome.getCanonicalPath)) + runSparkSubmit(args, Some(sparkHome.getCanonicalPath), false) } tempPyFile.delete() diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/SparkSubmitTestUtils.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/SparkSubmitTestUtils.scala index 68ed97d6d1f5..889f81b05639 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/SparkSubmitTestUtils.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/SparkSubmitTestUtils.scala @@ -38,7 +38,10 @@ trait SparkSubmitTestUtils extends SparkFunSuite with TimeLimits { // NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly. // This is copied from org.apache.spark.deploy.SparkSubmitSuite - protected def runSparkSubmit(args: Seq[String], sparkHomeOpt: Option[String] = None): Unit = { + protected def runSparkSubmit( + args: Seq[String], + sparkHomeOpt: Option[String] = None, + isSparkTesting: Boolean = true): Unit = { val sparkHome = sparkHomeOpt.getOrElse( sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))) val history = ArrayBuffer.empty[String] @@ -53,7 +56,14 @@ trait SparkSubmitTestUtils extends SparkFunSuite with TimeLimits { val builder = new ProcessBuilder(commands: _*).directory(new File(sparkHome)) val env = builder.environment() - env.put("SPARK_TESTING", "1") + if (isSparkTesting) { + env.put("SPARK_TESTING", "1") + } else { + env.remove("SPARK_TESTING") + env.remove("SPARK_SQL_TESTING") + env.remove("SPARK_PREPEND_CLASSES") + env.remove("SPARK_DIST_CLASSPATH") + } env.put("SPARK_HOME", sparkHome) def captureOutput(source: String)(line: String): Unit = {