diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 000eb92f1cb6c..2696c53c665fc 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -1140,6 +1140,11 @@ } } }, + "UNSUPPORTED_TYPED_LITERAL" : { + "message" : [ + "Literals of the type are not supported. Supported types are ." + ] + }, "UNTYPED_SCALA_UDF" : { "message" : [ "You're using untyped Scala UDF, which does not have the input type information. Spark may blindly pass null to the Scala closure with primitive-type argument, and the closure will see the default value of the Java type for the null argument, e.g. `udf((x: Int) => x, IntegerType)`, the result is 0 for null input. To get rid of this error, you could:", @@ -1248,11 +1253,6 @@ "Cannot parse the INTERVAL value: ." ] }, - "_LEGACY_ERROR_TEMP_0021" : { - "message" : [ - "Literals of type '' are currently not supported." - ] - }, "_LEGACY_ERROR_TEMP_0022" : { "message" : [ "." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index eca7bbe3e2936..fb6d9bb126829 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -2397,7 +2397,11 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] with SQLConfHelper wit s"contains illegal character for hexBinary: $padding$value"); } case other => - throw QueryParsingErrors.literalValueTypeUnsupportedError(other, ctx) + throw QueryParsingErrors.literalValueTypeUnsupportedError( + unsupportedType = other, + supportedTypes = + Seq("DATE", "TIMESTAMP_NTZ", "TIMESTAMP_LTZ", "TIMESTAMP", "INTERVAL", "X"), + ctx) } } catch { case e: IllegalArgumentException => diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala index 83313968082c0..e5f8f34d47a78 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala @@ -234,10 +234,14 @@ private[sql] object QueryParsingErrors extends QueryErrorsBase { } def literalValueTypeUnsupportedError( - valueType: String, ctx: TypeConstructorContext): Throwable = { + unsupportedType: String, + supportedTypes: Seq[String], + ctx: TypeConstructorContext): Throwable = { new ParseException( - errorClass = "_LEGACY_ERROR_TEMP_0021", - messageParameters = Map("valueType" -> valueType), + errorClass = "UNSUPPORTED_TYPED_LITERAL", + messageParameters = Map( + "unsupportedType" -> toSQLType(unsupportedType), + "supportedTypes" -> supportedTypes.map(toSQLType).mkString(", ")), ctx) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala index e1316c5b132e6..884e782736cde 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala @@ -658,8 +658,11 @@ class ExpressionParserSuite extends AnalysisTest { checkError( exception = parseException("GEO '(10,-6)'"), - errorClass = "_LEGACY_ERROR_TEMP_0021", - parameters = Map("valueType" -> "GEO"), + errorClass = "UNSUPPORTED_TYPED_LITERAL", + parameters = Map( + "unsupportedType" -> "\"GEO\"", + "supportedTypes" -> + """"DATE", "TIMESTAMP_NTZ", "TIMESTAMP_LTZ", "TIMESTAMP", "INTERVAL", "X""""), context = ExpectedContext( fragment = "GEO '(10,-6)'", start = 0, diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out index 0f205430cf1b0..108cfd19debc1 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/literals.sql.out @@ -442,9 +442,10 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0021", + "errorClass" : "UNSUPPORTED_TYPED_LITERAL", "messageParameters" : { - "valueType" : "GEO" + "supportedTypes" : "\"DATE\", \"TIMESTAMP_NTZ\", \"TIMESTAMP_LTZ\", \"TIMESTAMP\", \"INTERVAL\", \"X\"", + "unsupportedType" : "\"GEO\"" }, "queryContext" : [ { "objectType" : "", diff --git a/sql/core/src/test/resources/sql-tests/results/literals.sql.out b/sql/core/src/test/resources/sql-tests/results/literals.sql.out index 0f205430cf1b0..108cfd19debc1 100644 --- a/sql/core/src/test/resources/sql-tests/results/literals.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/literals.sql.out @@ -442,9 +442,10 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException { - "errorClass" : "_LEGACY_ERROR_TEMP_0021", + "errorClass" : "UNSUPPORTED_TYPED_LITERAL", "messageParameters" : { - "valueType" : "GEO" + "supportedTypes" : "\"DATE\", \"TIMESTAMP_NTZ\", \"TIMESTAMP_LTZ\", \"TIMESTAMP\", \"INTERVAL\", \"X\"", + "unsupportedType" : "\"GEO\"" }, "queryContext" : [ { "objectType" : "",