diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index c58f9b9fb3857..9ddeb2a9283d2 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -730,6 +730,23 @@ "The JOIN with LATERAL correlation is not allowed because an OUTER subquery cannot correlate to its join partner. Remove the LATERAL correlation or use an INNER JOIN, or LEFT OUTER JOIN instead." ] }, + "INVALID_OPTIONS" : { + "message" : [ + "Invalid options:" + ], + "subClass" : { + "NON_MAP_FUNCTION" : { + "message" : [ + "Must use the `map()` function for options." + ] + }, + "NON_STRING_TYPE" : { + "message" : [ + "A type of keys and values in `map()` must be string, but got ." + ] + } + } + }, "INVALID_PANDAS_UDF_PLACEMENT" : { "message" : [ "The group aggregate pandas UDF cannot be invoked together with as other, non-pandas aggregate functions." @@ -2185,16 +2202,6 @@ "Schema should be struct type but got ." ] }, - "_LEGACY_ERROR_TEMP_1095" : { - "message" : [ - "A type of keys and values in map() must be string, but got ." - ] - }, - "_LEGACY_ERROR_TEMP_1096" : { - "message" : [ - "Must use a map() function for options." - ] - }, "_LEGACY_ERROR_TEMP_1097" : { "message" : [ "The field for corrupt records must be string type and nullable." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 7772dd5e9a3ec..b366d0606c97b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -1013,13 +1013,13 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def keyValueInMapNotStringError(m: CreateMap): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_1095", - messageParameters = Map("map" -> m.dataType.catalogString)) + errorClass = "INVALID_OPTIONS.NON_STRING_TYPE", + messageParameters = Map("mapType" -> toSQLType(m.dataType))) } def nonMapFunctionNotAllowedError(): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_1096", + errorClass = "INVALID_OPTIONS.NON_MAP_FUNCTION", messageParameters = Map.empty) } diff --git a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out index 0b5a63c28e446..200ddd837e1fc 100644 --- a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out @@ -66,7 +66,7 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1096", + "errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION", "queryContext" : [ { "objectType" : "", "objectName" : "", @@ -84,9 +84,9 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1095", + "errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE", "messageParameters" : { - "map" : "map" + "mapType" : "\"MAP\"" }, "queryContext" : [ { "objectType" : "", @@ -222,7 +222,7 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1096", + "errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION", "queryContext" : [ { "objectType" : "", "objectName" : "", @@ -233,6 +233,7 @@ org.apache.spark.sql.AnalysisException } + -- !query select to_csv(named_struct('a', 1, 'b', 2), map('mode', 1)) -- !query schema @@ -240,9 +241,9 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1095", + "errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE", "messageParameters" : { - "map" : "map" + "mapType" : "\"MAP\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out index ab1465350d814..a9c4dd0b9fd0a 100644 --- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out @@ -70,7 +70,7 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1096", + "errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION", "queryContext" : [ { "objectType" : "", "objectName" : "", @@ -88,9 +88,9 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1095", + "errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE", "messageParameters" : { - "map" : "map" + "mapType" : "\"MAP\"" }, "queryContext" : [ { "objectType" : "", @@ -192,7 +192,7 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1096", + "errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION", "queryContext" : [ { "objectType" : "", "objectName" : "", @@ -210,9 +210,9 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "_LEGACY_ERROR_TEMP_1095", + "errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE", "messageParameters" : { - "map" : "map" + "mapType" : "\"MAP\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala index d2c6055fe3630..56bdefc98badb 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala @@ -395,16 +395,31 @@ class JsonFunctionsSuite extends QueryTest with SharedSparkSession { df2.selectExpr("to_json(a, map('timestampFormat', 'dd/MM/yyyy HH:mm'))"), Row("""{"_1":"26/08/2015 18:00"}""") :: Nil) - val errMsg1 = intercept[AnalysisException] { - df2.selectExpr("to_json(a, named_struct('a', 1))") - } - assert(errMsg1.getMessage.startsWith("Must use a map() function for options")) + checkError( + exception = intercept[AnalysisException] { + df2.selectExpr("to_json(a, named_struct('a', 1))") + }, + errorClass = "INVALID_OPTIONS.NON_MAP_FUNCTION", + parameters = Map.empty, + context = ExpectedContext( + fragment = "to_json(a, named_struct('a', 1))", + start = 0, + stop = 31 + ) + ) - val errMsg2 = intercept[AnalysisException] { - df2.selectExpr("to_json(a, map('a', 1))") - } - assert(errMsg2.getMessage.startsWith( - "A type of keys and values in map() must be string, but got")) + checkError( + exception = intercept[AnalysisException] { + df2.selectExpr("to_json(a, map('a', 1))") + }, + errorClass = "INVALID_OPTIONS.NON_STRING_TYPE", + parameters = Map("mapType" -> "\"MAP\""), + context = ExpectedContext( + fragment = "to_json(a, map('a', 1))", + start = 0, + stop = 22 + ) + ) } test("SPARK-19967 Support from_json in SQL") { @@ -441,15 +456,30 @@ class JsonFunctionsSuite extends QueryTest with SharedSparkSession { df3.selectExpr("""from_json(value, 'time InvalidType')""") } assert(errMsg2.getMessage.contains("DataType invalidtype is not supported")) - val errMsg3 = intercept[AnalysisException] { - df3.selectExpr("from_json(value, 'time Timestamp', named_struct('a', 1))") - } - assert(errMsg3.getMessage.startsWith("Must use a map() function for options")) - val errMsg4 = intercept[AnalysisException] { - df3.selectExpr("from_json(value, 'time Timestamp', map('a', 1))") - } - assert(errMsg4.getMessage.startsWith( - "A type of keys and values in map() must be string, but got")) + checkError( + exception = intercept[AnalysisException] { + df3.selectExpr("from_json(value, 'time Timestamp', named_struct('a', 1))") + }, + errorClass = "INVALID_OPTIONS.NON_MAP_FUNCTION", + parameters = Map.empty, + context = ExpectedContext( + fragment = "from_json(value, 'time Timestamp', named_struct('a', 1))", + start = 0, + stop = 55 + ) + ) + checkError( + exception = intercept[AnalysisException] { + df3.selectExpr("from_json(value, 'time Timestamp', map('a', 1))") + }, + errorClass = "INVALID_OPTIONS.NON_STRING_TYPE", + parameters = Map("mapType" -> "\"MAP\""), + context = ExpectedContext( + fragment = "from_json(value, 'time Timestamp', map('a', 1))", + start = 0, + stop = 46 + ) + ) } test("SPARK-24027: from_json - map") {