Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 17 additions & 10 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -730,6 +730,23 @@
"The <joinType> JOIN with LATERAL correlation is not allowed because an OUTER subquery cannot correlate to its join partner. Remove the LATERAL correlation or use an INNER JOIN, or LEFT OUTER JOIN instead."
]
},
"INVALID_OPTIONS" : {
"message" : [
"Invalid options:"
],
"subClass" : {
"NON_MAP_FUNCTION" : {
"message" : [
"Must use the `map()` function for options."
]
},
"NON_STRING_TYPE" : {
"message" : [
"A type of keys and values in `map()` must be string, but got <mapType>."
]
}
}
},
"INVALID_PANDAS_UDF_PLACEMENT" : {
"message" : [
"The group aggregate pandas UDF <functionList> cannot be invoked together with as other, non-pandas aggregate functions."
Expand Down Expand Up @@ -2185,16 +2202,6 @@
"Schema should be struct type but got <dataType>."
]
},
"_LEGACY_ERROR_TEMP_1095" : {
"message" : [
"A type of keys and values in map() must be string, but got <map>."
]
},
"_LEGACY_ERROR_TEMP_1096" : {
"message" : [
"Must use a map() function for options."
]
},
"_LEGACY_ERROR_TEMP_1097" : {
"message" : [
"The field for corrupt records must be string type and nullable."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1013,13 +1013,13 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase {

def keyValueInMapNotStringError(m: CreateMap): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1095",
messageParameters = Map("map" -> m.dataType.catalogString))
errorClass = "INVALID_OPTIONS.NON_STRING_TYPE",
messageParameters = Map("mapType" -> toSQLType(m.dataType)))
}

def nonMapFunctionNotAllowedError(): Throwable = {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Be sure the names are consistent. not or non

new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1096",
errorClass = "INVALID_OPTIONS.NON_MAP_FUNCTION",
messageParameters = Map.empty)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1096",
"errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION",
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
Expand All @@ -84,9 +84,9 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1095",
"errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE",
"messageParameters" : {
"map" : "map<string,int>"
"mapType" : "\"MAP<STRING, INT>\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down Expand Up @@ -222,7 +222,7 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1096",
"errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION",
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
Expand All @@ -233,16 +233,17 @@ org.apache.spark.sql.AnalysisException
}



-- !query
select to_csv(named_struct('a', 1, 'b', 2), map('mode', 1))
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1095",
"errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE",
"messageParameters" : {
"map" : "map<string,int>"
"mapType" : "\"MAP<STRING, INT>\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1096",
"errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION",
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
Expand All @@ -88,9 +88,9 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1095",
"errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE",
"messageParameters" : {
"map" : "map<string,int>"
"mapType" : "\"MAP<STRING, INT>\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down Expand Up @@ -192,7 +192,7 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1096",
"errorClass" : "INVALID_OPTIONS.NON_MAP_FUNCTION",
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
Expand All @@ -210,9 +210,9 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1095",
"errorClass" : "INVALID_OPTIONS.NON_STRING_TYPE",
"messageParameters" : {
"map" : "map<string,int>"
"mapType" : "\"MAP<STRING, INT>\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -395,16 +395,31 @@ class JsonFunctionsSuite extends QueryTest with SharedSparkSession {
df2.selectExpr("to_json(a, map('timestampFormat', 'dd/MM/yyyy HH:mm'))"),
Row("""{"_1":"26/08/2015 18:00"}""") :: Nil)

val errMsg1 = intercept[AnalysisException] {
df2.selectExpr("to_json(a, named_struct('a', 1))")
}
assert(errMsg1.getMessage.startsWith("Must use a map() function for options"))
checkError(
exception = intercept[AnalysisException] {
df2.selectExpr("to_json(a, named_struct('a', 1))")
},
errorClass = "INVALID_OPTIONS.NON_MAP_FUNCTION",
parameters = Map.empty,
context = ExpectedContext(
fragment = "to_json(a, named_struct('a', 1))",
start = 0,
stop = 31
)
)

val errMsg2 = intercept[AnalysisException] {
df2.selectExpr("to_json(a, map('a', 1))")
}
assert(errMsg2.getMessage.startsWith(
"A type of keys and values in map() must be string, but got"))
checkError(
exception = intercept[AnalysisException] {
df2.selectExpr("to_json(a, map('a', 1))")
},
errorClass = "INVALID_OPTIONS.NON_STRING_TYPE",
parameters = Map("mapType" -> "\"MAP<STRING, INT>\""),
context = ExpectedContext(
fragment = "to_json(a, map('a', 1))",
start = 0,
stop = 22
)
)
}

test("SPARK-19967 Support from_json in SQL") {
Expand Down Expand Up @@ -441,15 +456,30 @@ class JsonFunctionsSuite extends QueryTest with SharedSparkSession {
df3.selectExpr("""from_json(value, 'time InvalidType')""")
}
assert(errMsg2.getMessage.contains("DataType invalidtype is not supported"))
val errMsg3 = intercept[AnalysisException] {
df3.selectExpr("from_json(value, 'time Timestamp', named_struct('a', 1))")
}
assert(errMsg3.getMessage.startsWith("Must use a map() function for options"))
val errMsg4 = intercept[AnalysisException] {
df3.selectExpr("from_json(value, 'time Timestamp', map('a', 1))")
}
assert(errMsg4.getMessage.startsWith(
"A type of keys and values in map() must be string, but got"))
checkError(
exception = intercept[AnalysisException] {
df3.selectExpr("from_json(value, 'time Timestamp', named_struct('a', 1))")
},
errorClass = "INVALID_OPTIONS.NON_MAP_FUNCTION",
parameters = Map.empty,
context = ExpectedContext(
fragment = "from_json(value, 'time Timestamp', named_struct('a', 1))",
start = 0,
stop = 55
)
)
checkError(
exception = intercept[AnalysisException] {
df3.selectExpr("from_json(value, 'time Timestamp', map('a', 1))")
},
errorClass = "INVALID_OPTIONS.NON_STRING_TYPE",
parameters = Map("mapType" -> "\"MAP<STRING, INT>\""),
context = ExpectedContext(
fragment = "from_json(value, 'time Timestamp', map('a', 1))",
start = 0,
stop = 46
)
)
}

test("SPARK-24027: from_json - map<string, int>") {
Expand Down