diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 8cc87a68d45d1..8519ad5967cba 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -76,6 +76,13 @@ "Could not load Protobuf class with name . ." ] }, + "CANNOT_MODIFY_CONFIG" : { + "message" : [ + "Cannot modify the value of the Spark config: .", + "See also '/sql-migration-guide.html#ddl-statements'." + ], + "sqlState" : "46110" + }, "CANNOT_PARSE_DECIMAL" : { "message" : [ "Cannot parse decimal." @@ -3503,12 +3510,6 @@ "Cannot modify the value of a static config: ." ] }, - "_LEGACY_ERROR_TEMP_1326" : { - "message" : [ - "Cannot modify the value of a Spark config: .", - "See also '/sql-migration-guide.html#ddl-statements'." - ] - }, "_LEGACY_ERROR_TEMP_1327" : { "message" : [ "Command execution is not supported in runner ." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 634e4ac094d93..2f40592361be4 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -2989,8 +2989,8 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def cannotModifyValueOfSparkConfigError(key: String, docroot: String): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_1326", - messageParameters = Map("key" -> key, "docroot" -> docroot)) + errorClass = "CANNOT_MODIFY_CONFIG", + messageParameters = Map("key" -> toSQLConf(key), "docroot" -> docroot)) } def commandExecutionInRunnerUnsupportedError(runner: String): Throwable = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index b3b2912cd6c0c..30f4fdfbbcff3 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -22,6 +22,7 @@ import java.util.TimeZone import org.apache.hadoop.fs.Path import org.apache.logging.log4j.Level +import org.apache.spark.SPARK_DOC_ROOT import org.apache.spark.sql._ import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.MIT @@ -204,9 +205,11 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { sql("RESET spark.app.id") assert(spark.conf.get("spark.app.id") === appId, "Should not change spark core ones") // spark core conf w/ entry registered - val e1 = intercept[AnalysisException](sql("RESET spark.executor.cores")) - val str_match = "Cannot modify the value of a Spark config: spark.executor.cores" - assert(e1.getMessage.contains(str_match)) + checkError( + exception = intercept[AnalysisException](sql("RESET spark.executor.cores")), + errorClass = "CANNOT_MODIFY_CONFIG", + parameters = Map("key" -> "\"spark.executor.cores\"", "docroot" -> SPARK_DOC_ROOT) + ) // user defined settings sql("SET spark.abc=xyz")