Skip to content
13 changes: 7 additions & 6 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,13 @@
"Could not load Protobuf class with name <protobufClassName>. <explanation>."
]
},
"CANNOT_MODIFY_CONFIG" : {
"message" : [
"Cannot modify the value of the Spark config: <key>.",
"See also '<docroot>/sql-migration-guide.html#ddl-statements'."
],
"sqlState" : "46110"
},
"CANNOT_PARSE_DECIMAL" : {
"message" : [
"Cannot parse decimal."
Expand Down Expand Up @@ -3503,12 +3510,6 @@
"Cannot modify the value of a static config: <key>."
]
},
"_LEGACY_ERROR_TEMP_1326" : {
"message" : [
"Cannot modify the value of a Spark config: <key>.",
"See also '<docroot>/sql-migration-guide.html#ddl-statements'."
]
},
"_LEGACY_ERROR_TEMP_1327" : {
"message" : [
"Command execution is not supported in runner <runner>."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2989,8 +2989,8 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase {

def cannotModifyValueOfSparkConfigError(key: String, docroot: String): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1326",
messageParameters = Map("key" -> key, "docroot" -> docroot))
errorClass = "CANNOT_MODIFY_CONFIG",
messageParameters = Map("key" -> toSQLConf(key), "docroot" -> docroot))
}

def commandExecutionInRunnerUnsupportedError(runner: String): Throwable = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ import java.util.TimeZone
import org.apache.hadoop.fs.Path
import org.apache.logging.log4j.Level

import org.apache.spark.SPARK_DOC_ROOT
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.MIT
Expand Down Expand Up @@ -204,9 +205,11 @@ class SQLConfSuite extends QueryTest with SharedSparkSession {
sql("RESET spark.app.id")
assert(spark.conf.get("spark.app.id") === appId, "Should not change spark core ones")
// spark core conf w/ entry registered
val e1 = intercept[AnalysisException](sql("RESET spark.executor.cores"))
val str_match = "Cannot modify the value of a Spark config: spark.executor.cores"
assert(e1.getMessage.contains(str_match))
checkError(
exception = intercept[AnalysisException](sql("RESET spark.executor.cores")),
errorClass = "CANNOT_MODIFY_CONFIG",
parameters = Map("key" -> "\"spark.executor.cores\"", "docroot" -> SPARK_DOC_ROOT)
)

// user defined settings
sql("SET spark.abc=xyz")
Expand Down