diff --git a/docs/core-migration-guide.md b/docs/core-migration-guide.md index cde6e070c5e3..33406d010bb9 100644 --- a/docs/core-migration-guide.md +++ b/docs/core-migration-guide.md @@ -25,7 +25,7 @@ license: | ## Upgrading from Core 2.4 to 3.0 - The `org.apache.spark.ExecutorPlugin` interface and related configuration has been replaced with - `org.apache.spark.plugin.SparkPlugin`, which adds new functionality. Plugins using the old + `org.apache.spark.api.plugin.SparkPlugin`, which adds new functionality. Plugins using the old interface must be modified to extend the new interfaces. Check the [Monitoring](monitoring.html) guide for more details. diff --git a/docs/sql-migration-guide.md b/docs/sql-migration-guide.md index db24299ad25a..e0dd9b472894 100644 --- a/docs/sql-migration-guide.md +++ b/docs/sql-migration-guide.md @@ -216,6 +216,10 @@ license: | * The decimal string representation can be different between Hive 1.2 and Hive 2.3 when using `TRANSFORM` operator in SQL for script transformation, which depends on hive's behavior. In Hive 1.2, the string representation omits trailing zeroes. But in Hive 2.3, it is always padded to 18 digits with trailing zeroes if necessary. +## Upgrading from Spark SQL 2.4.5 to 2.4.6 + + - In Spark 2.4.6, the `RESET` command does not reset the static SQL configuration values to the default. It only clears the runtime SQL configuration values. + ## Upgrading from Spark SQL 2.4.4 to 2.4.5 - Since Spark 2.4.5, `TRUNCATE TABLE` command tries to set back original permission and ACLs during re-creating the table/partition paths. To restore the behaviour of earlier versions, set `spark.sql.truncateTable.ignorePermissionAcl.enabled` to `true`. diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala index d20252825c55..9618ff606263 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/StaticSQLConf.scala @@ -47,6 +47,9 @@ object StaticSQLConf { .internal() .version("2.1.0") .stringConf + // System preserved database should not exists in metastore. However it's hard to guarantee it + // for every session, because case-sensitivity differs. Here we always lowercase it to make our + // life easier. .transform(_.toLowerCase(Locale.ROOT)) .createWithDefault("global_temp") diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala index 14b8ea66c807..47119ab903da 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SharedState.scala @@ -153,9 +153,6 @@ private[sql] class SharedState( * A manager for global temporary views. */ lazy val globalTempViewManager: GlobalTempViewManager = { - // System preserved database should not exists in metastore. However it's hard to guarantee it - // for every session, because case-sensitivity differs. Here we always lowercase it to make our - // life easier. val globalTempDB = conf.get(GLOBAL_TEMP_DATABASE) if (externalCatalog.databaseExists(globalTempDB)) { throw new SparkException( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala index 10b17571d2aa..f2386413bea1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala @@ -22,6 +22,7 @@ import org.scalatest.BeforeAndAfterEach import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite} import org.apache.spark.internal.config.UI.UI_ENABLED import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.sql.internal.StaticSQLConf.GLOBAL_TEMP_DATABASE /** * Test cases for the builder pattern of [[SparkSession]]. @@ -152,4 +153,19 @@ class SparkSessionBuilderSuite extends SparkFunSuite with BeforeAndAfterEach { session.sparkContext.hadoopConfiguration.unset(mySpecialKey) } } + + test("SPARK-31234: RESET command will not change static sql configs and " + + "spark context conf values in SessionState") { + val session = SparkSession.builder() + .master("local") + .config(GLOBAL_TEMP_DATABASE.key, value = "globalTempDB-SPARK-31234") + .config("spark.app.name", "test-app-SPARK-31234") + .getOrCreate() + + assert(session.sessionState.conf.getConfString("spark.app.name") === "test-app-SPARK-31234") + assert(session.sessionState.conf.getConf(GLOBAL_TEMP_DATABASE) === "globaltempdb-spark-31234") + session.sql("RESET") + assert(session.sessionState.conf.getConfString("spark.app.name") === "test-app-SPARK-31234") + assert(session.sessionState.conf.getConf(GLOBAL_TEMP_DATABASE) === "globaltempdb-spark-31234") + } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index f389465d4b48..feccf52d0d3b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -116,7 +116,7 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { } } - test("reset will not change static sql configs and spark core configs") { + test("SPARK-31234: reset will not change static sql configs and spark core configs") { val conf = spark.sparkContext.getConf.getAll.toMap val appName = conf.get("spark.app.name") val driverHost = conf.get("spark.driver.host")