From e394d1ad4415fce47ce49977d2cd78b25bdb2bcb Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Tue, 24 May 2022 14:15:38 +0300 Subject: [PATCH 1/2] [SPARK-39255][SQL] Improve error messages In the PR, I propose to improve errors of the following error classes: 1. NON_PARTITION_COLUMN - `a non-partition column name` -> `the non-partition column` 2. UNSUPPORTED_SAVE_MODE - `a not existent path` -> `a non existent path`. 3. INVALID_FIELD_NAME. Quote ids to follow the rules https://github.com/apache/spark/pull/36621. 4. FAILED_SET_ORIGINAL_PERMISSION_BACK. It is renamed to FAILED_PERMISSION_RESET_ORIGINAL. 5. NON_LITERAL_PIVOT_VALUES - Wrap error's expression by double quotes. The PR adds new helper method `toSQLExpr()` for that. 6. CAST_INVALID_INPUT - Add the recommendation: `... Correct the syntax for the value before casting it, or change the type to one appropriate for the value.` To improve user experience with Spark SQL by making error message more clear. Yes, it changes user-facing error messages. By running the affected test suites: ``` $ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite" $ build/sbt "sql/testOnly *QueryCompilationErrorsDSv2Suite" $ build/sbt "sql/testOnly *QueryCompilationErrorsSuite" $ build/sbt "sql/testOnly *QueryExecutionAnsiErrorsSuite" $ build/sbt "sql/testOnly *QueryExecutionErrorsSuite" $ build/sbt "sql/testOnly *QueryParsingErrorsSuite*" ``` Closes #36635 from MaxGekk/error-class-improve-msg-3. Lead-authored-by: Max Gekk Co-authored-by: Maxim Gekk Signed-off-by: Max Gekk (cherry picked from commit 625afb4e1aefda59191d79b31f8c94941aedde1e) Signed-off-by: Max Gekk --- .../main/resources/error/error-classes.json | 12 ++-- .../sql/errors/QueryCompilationErrors.scala | 4 +- .../spark/sql/errors/QueryErrorsBase.scala | 10 ++- .../sql/errors/QueryExecutionErrors.scala | 2 +- .../spark/sql/types/StructTypeSuite.scala | 22 +++--- .../sql-tests/results/ansi/cast.sql.out | 68 +++++++++---------- .../sql-tests/results/ansi/date.sql.out | 6 +- .../ansi/datetime-parsing-invalid.sql.out | 4 +- .../sql-tests/results/ansi/interval.sql.out | 20 +++--- .../results/ansi/string-functions.sql.out | 8 +-- .../resources/sql-tests/results/pivot.sql.out | 2 +- .../results/postgreSQL/boolean.sql.out | 32 ++++----- .../results/postgreSQL/float4.sql.out | 8 +-- .../results/postgreSQL/float8.sql.out | 8 +-- .../sql-tests/results/postgreSQL/text.sql.out | 4 +- .../results/postgreSQL/window_part2.sql.out | 2 +- .../results/postgreSQL/window_part3.sql.out | 2 +- .../results/postgreSQL/window_part4.sql.out | 2 +- .../timestampNTZ/timestamp-ansi.sql.out | 2 +- .../sql-tests/results/udf/udf-pivot.sql.out | 2 +- 20 files changed, 115 insertions(+), 105 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 60a432163b5a9..463bf798e49c3 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -26,7 +26,7 @@ "message" : [ "Cannot use a mixture of aggregate function and group aggregate pandas UDF" ] }, "CAST_INVALID_INPUT" : { - "message" : [ "The value of the type cannot be cast to because it is malformed. To return NULL instead, use `try_cast`. If necessary set to \"false\" to bypass this error." ], + "message" : [ "The value of the type cannot be cast to because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set to \"false\" to bypass this error." ], "sqlState" : "42000" }, "CAST_OVERFLOW" : { @@ -55,9 +55,6 @@ "message" : [ "Failed to rename to as destination already exists" ], "sqlState" : "22023" }, - "FAILED_SET_ORIGINAL_PERMISSION_BACK" : { - "message" : [ "Failed to set original permission back to the created path: . Exception: " ] - }, "GRAPHITE_SINK_INVALID_PROTOCOL" : { "message" : [ "Invalid Graphite protocol: " ] }, @@ -129,11 +126,11 @@ "sqlState" : "42000" }, "NON_LITERAL_PIVOT_VALUES" : { - "message" : [ "Literal expressions required for pivot values, found ''" ], + "message" : [ "Literal expressions required for pivot values, found ." ], "sqlState" : "42000" }, "NON_PARTITION_COLUMN" : { - "message" : [ "PARTITION clause cannot contain a non-partition column name: " ], + "message" : [ "PARTITION clause cannot contain the non-partition column: ." ], "sqlState" : "42000" }, "PARSE_CHAR_MISSING_LENGTH" : { @@ -156,6 +153,9 @@ "message" : [ "Failed to rename as was not found" ], "sqlState" : "22023" }, + "RESET_PERMISSION_TO_ORIGINAL" : { + "message" : [ "Failed to set original permission back to the created path: . Exception: " ] + }, "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER" : { "message" : [ "The second argument of '' function needs to be an integer." ], "sqlState" : "22023" diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 2d2dba63e3a59..70ef344fda59b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -82,7 +82,7 @@ object QueryCompilationErrors extends QueryErrorsBase { def nonLiteralPivotValError(pivotVal: Expression): Throwable = { new AnalysisException( errorClass = "NON_LITERAL_PIVOT_VALUES", - messageParameters = Array(pivotVal.toString)) + messageParameters = Array(toSQLExpr(pivotVal))) } def pivotValDataTypeMismatchError(pivotVal: Expression, pivotCol: Expression): Throwable = { @@ -2371,7 +2371,7 @@ object QueryCompilationErrors extends QueryErrorsBase { def invalidFieldName(fieldName: Seq[String], path: Seq[String], context: Origin): Throwable = { new AnalysisException( errorClass = "INVALID_FIELD_NAME", - messageParameters = Array(fieldName.quoted, path.quoted), + messageParameters = Array(toSQLId(fieldName), toSQLId(path)), origin = context) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala index 52ffa6d32fd9b..758a0d34b2689 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala @@ -19,8 +19,8 @@ package org.apache.spark.sql.errors import java.util.Locale -import org.apache.spark.sql.catalyst.expressions.Literal -import org.apache.spark.sql.catalyst.util.quoteIdentifier +import org.apache.spark.sql.catalyst.expressions.{Expression, Literal} +import org.apache.spark.sql.catalyst.util.{quoteIdentifier, toPrettySQL} import org.apache.spark.sql.types.{DataType, DoubleType, FloatType} /** @@ -39,6 +39,8 @@ import org.apache.spark.sql.types.{DataType, DoubleType, FloatType} * For example: "spark.sql.ansi.enabled". * 6. Any values of datasource options or SQL configs shall be double quoted. * For example: "true", "CORRECTED". + * 7. SQL expressions shall be wrapped by double quotes. + * For example: "earnings + 1". */ trait QueryErrorsBase { // Converts an error class parameter to its SQL representation @@ -84,4 +86,8 @@ trait QueryErrorsBase { def toDSOption(option: String): String = { quoteByDefault(option) } + + def toSQLExpr(e: Expression): String = { + quoteByDefault(toPrettySQL(e)) + } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 487be632f62db..22dc100a43476 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -1720,7 +1720,7 @@ object QueryExecutionErrors extends QueryErrorsBase { permission: FsPermission, path: Path, e: Throwable): Throwable = { - new SparkSecurityException(errorClass = "FAILED_SET_ORIGINAL_PERMISSION_BACK", + new SparkSecurityException(errorClass = "RESET_PERMISSION_TO_ORIGINAL", Array(permission.toString, path.toString, e.getMessage)) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala index 16f122334f370..0352943086d93 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala @@ -319,7 +319,8 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper { var e = intercept[AnalysisException] { check(Seq("S1", "S12", "S123"), None) } - assert(e.getMessage.contains("Field name S1.S12.S123 is invalid: s1.s12 is not a struct")) + assert(e.getMessage.contains( + "Field name `S1`.`S12`.`S123` is invalid: `s1`.`s12` is not a struct")) // ambiguous name e = intercept[AnalysisException] { @@ -333,17 +334,19 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper { e = intercept[AnalysisException] { check(Seq("m1", "key"), None) } - assert(e.getMessage.contains("Field name m1.key is invalid: m1 is not a struct")) + assert(e.getMessage.contains("Field name `m1`.`key` is invalid: `m1` is not a struct")) checkCollection(Seq("m1", "key"), Some(Seq("m1") -> StructField("key", IntegerType, false))) checkCollection(Seq("M1", "value"), Some(Seq("m1") -> StructField("value", IntegerType))) e = intercept[AnalysisException] { checkCollection(Seq("M1", "key", "name"), None) } - assert(e.getMessage.contains("Field name M1.key.name is invalid: m1.key is not a struct")) + assert(e.getMessage.contains( + "Field name `M1`.`key`.`name` is invalid: `m1`.`key` is not a struct")) e = intercept[AnalysisException] { checkCollection(Seq("M1", "value", "name"), None) } - assert(e.getMessage.contains("Field name M1.value.name is invalid: m1.value is not a struct")) + assert(e.getMessage.contains( + "Field name `M1`.`value`.`name` is invalid: `m1`.`value` is not a struct")) // map of struct checkCollection(Seq("M2", "key", "A"), @@ -355,24 +358,25 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper { e = intercept[AnalysisException] { checkCollection(Seq("m2", "key", "A", "name"), None) } - assert(e.getMessage.contains("Field name m2.key.A.name is invalid: m2.key.a is not a struct")) + assert(e.getMessage.contains( + "Field name `m2`.`key`.`A`.`name` is invalid: `m2`.`key`.`a` is not a struct")) e = intercept[AnalysisException] { checkCollection(Seq("M2", "value", "b", "name"), None) } assert(e.getMessage.contains( - "Field name M2.value.b.name is invalid: m2.value.b is not a struct")) + "Field name `M2`.`value`.`b`.`name` is invalid: `m2`.`value`.`b` is not a struct")) // simple array type e = intercept[AnalysisException] { check(Seq("A1", "element"), None) } - assert(e.getMessage.contains("Field name A1.element is invalid: a1 is not a struct")) + assert(e.getMessage.contains("Field name `A1`.`element` is invalid: `a1` is not a struct")) checkCollection(Seq("A1", "element"), Some(Seq("a1") -> StructField("element", IntegerType))) e = intercept[AnalysisException] { checkCollection(Seq("A1", "element", "name"), None) } assert(e.getMessage.contains( - "Field name A1.element.name is invalid: a1.element is not a struct")) + "Field name `A1`.`element`.`name` is invalid: `a1`.`element` is not a struct")) // array of struct checkCollection(Seq("A2", "element", "C"), @@ -382,7 +386,7 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper { checkCollection(Seq("a2", "element", "C", "name"), None) } assert(e.getMessage.contains( - "Field name a2.element.C.name is invalid: a2.element.c is not a struct")) + "Field name `a2`.`element`.`C`.`name` is invalid: `a2`.`element`.`c` is not a struct")) } test("SPARK-36807: Merge ANSI interval types to a tightest common type") { diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out index 654433c0ca561..6286afecbef80 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out @@ -8,7 +8,7 @@ SELECT CAST('1.23' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '1.23' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1.23' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('1.23' AS int) ^^^^^^^^^^^^^^^^^^^ @@ -20,7 +20,7 @@ SELECT CAST('1.23' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '1.23' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1.23' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('1.23' AS long) ^^^^^^^^^^^^^^^^^^^^ @@ -32,7 +32,7 @@ SELECT CAST('-4.56' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '-4.56' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '-4.56' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('-4.56' AS int) ^^^^^^^^^^^^^^^^^^^^ @@ -44,7 +44,7 @@ SELECT CAST('-4.56' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '-4.56' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '-4.56' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('-4.56' AS long) ^^^^^^^^^^^^^^^^^^^^^ @@ -56,7 +56,7 @@ SELECT CAST('abc' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'abc' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'abc' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('abc' AS int) ^^^^^^^^^^^^^^^^^^ @@ -68,7 +68,7 @@ SELECT CAST('abc' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'abc' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'abc' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('abc' AS long) ^^^^^^^^^^^^^^^^^^^ @@ -80,7 +80,7 @@ SELECT CAST('abc' AS float) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'abc' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'abc' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('abc' AS float) ^^^^^^^^^^^^^^^^^^^^ @@ -92,7 +92,7 @@ SELECT CAST('abc' AS double) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'abc' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'abc' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('abc' AS double) ^^^^^^^^^^^^^^^^^^^^^ @@ -104,7 +104,7 @@ SELECT CAST('1234567890123' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '1234567890123' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1234567890123' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('1234567890123' AS int) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -116,7 +116,7 @@ SELECT CAST('12345678901234567890123' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '12345678901234567890123' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '12345678901234567890123' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('12345678901234567890123' AS long) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -128,7 +128,7 @@ SELECT CAST('' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('' AS int) ^^^^^^^^^^^^^^^ @@ -140,7 +140,7 @@ SELECT CAST('' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('' AS long) ^^^^^^^^^^^^^^^^ @@ -152,7 +152,7 @@ SELECT CAST('' AS float) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('' AS float) ^^^^^^^^^^^^^^^^^ @@ -164,7 +164,7 @@ SELECT CAST('' AS double) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('' AS double) ^^^^^^^^^^^^^^^^^^ @@ -192,7 +192,7 @@ SELECT CAST('123.a' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '123.a' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '123.a' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('123.a' AS int) ^^^^^^^^^^^^^^^^^^^^ @@ -204,7 +204,7 @@ SELECT CAST('123.a' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '123.a' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '123.a' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('123.a' AS long) ^^^^^^^^^^^^^^^^^^^^^ @@ -216,7 +216,7 @@ SELECT CAST('123.a' AS float) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '123.a' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '123.a' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('123.a' AS float) ^^^^^^^^^^^^^^^^^^^^^^ @@ -228,7 +228,7 @@ SELECT CAST('123.a' AS double) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '123.a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '123.a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('123.a' AS double) ^^^^^^^^^^^^^^^^^^^^^^^ @@ -248,7 +248,7 @@ SELECT CAST('-2147483649' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '-2147483649' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '-2147483649' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('-2147483649' AS int) ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -268,7 +268,7 @@ SELECT CAST('2147483648' AS int) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '2147483648' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '2147483648' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('2147483648' AS int) ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -288,7 +288,7 @@ SELECT CAST('-9223372036854775809' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '-9223372036854775809' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '-9223372036854775809' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('-9223372036854775809' AS long) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -308,7 +308,7 @@ SELECT CAST('9223372036854775808' AS long) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '9223372036854775808' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '9223372036854775808' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT CAST('9223372036854775808' AS long) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -567,7 +567,7 @@ select cast('1中文' as tinyint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '1中文' of the type "STRING" cannot be cast to "TINYINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1中文' of the type "STRING" cannot be cast to "TINYINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast('1中文' as tinyint) ^^^^^^^^^^^^^^^^^^^^^^ @@ -579,7 +579,7 @@ select cast('1中文' as smallint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '1中文' of the type "STRING" cannot be cast to "SMALLINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1中文' of the type "STRING" cannot be cast to "SMALLINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast('1中文' as smallint) ^^^^^^^^^^^^^^^^^^^^^^^ @@ -591,7 +591,7 @@ select cast('1中文' as INT) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '1中文' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1中文' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast('1中文' as INT) ^^^^^^^^^^^^^^^^^^ @@ -603,7 +603,7 @@ select cast('中文1' as bigint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '中文1' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '中文1' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast('中文1' as bigint) ^^^^^^^^^^^^^^^^^^^^^ @@ -615,7 +615,7 @@ select cast('1中文' as bigint) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '1中文' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1中文' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast('1中文' as bigint) ^^^^^^^^^^^^^^^^^^^^^ @@ -646,7 +646,7 @@ struct<> -- !query output org.apache.spark.SparkRuntimeException The value ' - xyz ' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. + xyz ' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast('\t\n xyz \t\r' as boolean) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -678,7 +678,7 @@ select cast('xyz' as decimal(4, 2)) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'xyz' of the type "STRING" cannot be cast to "DECIMAL(4,2)" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'xyz' of the type "STRING" cannot be cast to "DECIMAL(4,2)" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast('xyz' as decimal(4, 2)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -698,7 +698,7 @@ select cast('a' as date) struct<> -- !query output org.apache.spark.SparkDateTimeException -The value 'a' of the type "STRING" cannot be cast to "DATE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'a' of the type "STRING" cannot be cast to "DATE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast('a' as date) ^^^^^^^^^^^^^^^^^ @@ -718,7 +718,7 @@ select cast('a' as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -The value 'a' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'a' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast('a' as timestamp) ^^^^^^^^^^^^^^^^^^^^^^ @@ -738,7 +738,7 @@ select cast('a' as timestamp_ntz) struct<> -- !query output org.apache.spark.SparkDateTimeException -The value 'a' of the type "STRING" cannot be cast to "TIMESTAMP_NTZ" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'a' of the type "STRING" cannot be cast to "TIMESTAMP_NTZ" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast('a' as timestamp_ntz) ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -750,7 +750,7 @@ select cast(cast('inf' as double) as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -The value Infinity of the type "DOUBLE" cannot be cast to "TIMESTAMP" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value Infinity of the type "DOUBLE" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast(cast('inf' as double) as timestamp) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -762,7 +762,7 @@ select cast(cast('inf' as float) as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -The value Infinity of the type "DOUBLE" cannot be cast to "TIMESTAMP" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value Infinity of the type "DOUBLE" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast(cast('inf' as float) as timestamp) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out index 2cf50284d6639..0bb5de24831fc 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out @@ -232,7 +232,7 @@ select next_day("xx", "Mon") struct<> -- !query output org.apache.spark.SparkDateTimeException -The value 'xx' of the type "STRING" cannot be cast to "DATE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'xx' of the type "STRING" cannot be cast to "DATE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select next_day("xx", "Mon") ^^^^^^^^^^^^^^^^^^^^^ @@ -327,7 +327,7 @@ select date_add('2011-11-11', '1.2') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '1.2' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1.2' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select date_add('2011-11-11', '1.2') ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -438,7 +438,7 @@ select date_sub(date'2011-11-11', '1.2') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value '1.2' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1.2' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select date_sub(date'2011-11-11', '1.2') ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out index d1eb604d4fcd2..c823ca55f3b0d 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out @@ -242,7 +242,7 @@ select cast("Unparseable" as timestamp) struct<> -- !query output org.apache.spark.SparkDateTimeException -The value 'Unparseable' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'Unparseable' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast("Unparseable" as timestamp) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -254,7 +254,7 @@ select cast("Unparseable" as date) struct<> -- !query output org.apache.spark.SparkDateTimeException -The value 'Unparseable' of the type "STRING" cannot be cast to "DATE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'Unparseable' of the type "STRING" cannot be cast to "DATE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select cast("Unparseable" as date) ^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 5d2ead16511f1..cefa7cf20ac87 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -122,7 +122,7 @@ select interval 2 second * 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select interval 2 second * 'a' ^^^^^^^^^^^^^^^^^^^^^^^ @@ -134,7 +134,7 @@ select interval 2 second / 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select interval 2 second / 'a' ^^^^^^^^^^^^^^^^^^^^^^^ @@ -146,7 +146,7 @@ select interval 2 year * 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select interval 2 year * 'a' ^^^^^^^^^^^^^^^^^^^^^ @@ -158,7 +158,7 @@ select interval 2 year / 'a' struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select interval 2 year / 'a' ^^^^^^^^^^^^^^^^^^^^^ @@ -186,7 +186,7 @@ select 'a' * interval 2 second struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select 'a' * interval 2 second ^^^^^^^^^^^^^^^^^^^^^^^ @@ -198,7 +198,7 @@ select 'a' * interval 2 year struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'a' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select 'a' * interval 2 year ^^^^^^^^^^^^^^^^^^^^^ @@ -1516,7 +1516,7 @@ select '4 11:11' - interval '4 22:12' day to minute struct<> -- !query output org.apache.spark.SparkDateTimeException -The value '4 11:11' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '4 11:11' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select '4 11:11' - interval '4 22:12' day to minute ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1528,7 +1528,7 @@ select '4 12:12:12' + interval '4 22:12' day to minute struct<> -- !query output org.apache.spark.SparkDateTimeException -The value '4 12:12:12' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '4 12:12:12' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select '4 12:12:12' + interval '4 22:12' day to minute ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1566,7 +1566,7 @@ select str - interval '4 22:12' day to minute from interval_view struct<> -- !query output org.apache.spark.SparkDateTimeException -The value '1' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select str - interval '4 22:12' day to minute from interval_view ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1578,7 +1578,7 @@ select str + interval '4 22:12' day to minute from interval_view struct<> -- !query output org.apache.spark.SparkDateTimeException -The value '1' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select str + interval '4 22:12' day to minute from interval_view ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out index ad388e211f588..5621759421019 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out @@ -82,7 +82,7 @@ select left("abcd", -2), left("abcd", 0), left("abcd", 'a') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'a' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'a' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 42) == ...t("abcd", -2), left("abcd", 0), left("abcd", 'a') ^^^^^^^^^^^^^^^^^ @@ -110,7 +110,7 @@ select right("abcd", -2), right("abcd", 0), right("abcd", 'a') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'a' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'a' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 44) == ...("abcd", -2), right("abcd", 0), right("abcd", 'a') ^^^^^^^^^^^^^^^^^^ @@ -419,7 +419,7 @@ SELECT lpad('hi', 'invalid_length') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'invalid_length' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'invalid_length' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT lpad('hi', 'invalid_length') ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -431,7 +431,7 @@ SELECT rpad('hi', 'invalid_length') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'invalid_length' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'invalid_length' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT rpad('hi', 'invalid_length') ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out index 54086bcc54e6c..0a42750d24571 100644 --- a/sql/core/src/test/resources/sql-tests/results/pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/pivot.sql.out @@ -339,7 +339,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -Literal expressions required for pivot values, found 'course#x' +Literal expressions required for pivot values, found "course". -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out index fe23273c4d9a9..a2d0ba73e5d30 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out @@ -56,7 +56,7 @@ SELECT boolean('test') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -The value 'test' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'test' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('test') AS error ^^^^^^^^^^^^^^^ @@ -76,7 +76,7 @@ SELECT boolean('foo') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -The value 'foo' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'foo' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('foo') AS error ^^^^^^^^^^^^^^ @@ -104,7 +104,7 @@ SELECT boolean('yeah') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -The value 'yeah' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'yeah' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('yeah') AS error ^^^^^^^^^^^^^^^ @@ -132,7 +132,7 @@ SELECT boolean('nay') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -The value 'nay' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'nay' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('nay') AS error ^^^^^^^^^^^^^^ @@ -144,7 +144,7 @@ SELECT boolean('on') AS true struct<> -- !query output org.apache.spark.SparkRuntimeException -The value 'on' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'on' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('on') AS true ^^^^^^^^^^^^^ @@ -156,7 +156,7 @@ SELECT boolean('off') AS `false` struct<> -- !query output org.apache.spark.SparkRuntimeException -The value 'off' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'off' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('off') AS `false` ^^^^^^^^^^^^^^ @@ -168,7 +168,7 @@ SELECT boolean('of') AS `false` struct<> -- !query output org.apache.spark.SparkRuntimeException -The value 'of' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'of' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('of') AS `false` ^^^^^^^^^^^^^ @@ -180,7 +180,7 @@ SELECT boolean('o') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -The value 'o' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'o' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('o') AS error ^^^^^^^^^^^^ @@ -192,7 +192,7 @@ SELECT boolean('on_') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -The value 'on_' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'on_' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('on_') AS error ^^^^^^^^^^^^^^ @@ -204,7 +204,7 @@ SELECT boolean('off_') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -The value 'off_' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'off_' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('off_') AS error ^^^^^^^^^^^^^^^ @@ -224,7 +224,7 @@ SELECT boolean('11') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -The value '11' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '11' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('11') AS error ^^^^^^^^^^^^^ @@ -244,7 +244,7 @@ SELECT boolean('000') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -The value '000' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '000' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('000') AS error ^^^^^^^^^^^^^^ @@ -256,7 +256,7 @@ SELECT boolean('') AS error struct<> -- !query output org.apache.spark.SparkRuntimeException -The value '' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean('') AS error ^^^^^^^^^^^ @@ -365,7 +365,7 @@ SELECT boolean(string(' tru e ')) AS invalid struct<> -- !query output org.apache.spark.SparkRuntimeException -The value ' tru e ' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value ' tru e ' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean(string(' tru e ')) AS invalid ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -377,7 +377,7 @@ SELECT boolean(string('')) AS invalid struct<> -- !query output org.apache.spark.SparkRuntimeException -The value '' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT boolean(string('')) AS invalid ^^^^^^^^^^^^^^^^^^^ @@ -524,7 +524,7 @@ INSERT INTO BOOLTBL2 struct<> -- !query output org.apache.spark.sql.AnalysisException -failed to evaluate expression CAST('XXX' AS BOOLEAN): The value 'XXX' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +failed to evaluate expression CAST('XXX' AS BOOLEAN): The value 'XXX' of the type "STRING" cannot be cast to "BOOLEAN" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 2, position 11) == VALUES (boolean('XXX')) ^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out index a1399062419c9..34ab90a26f1a4 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out @@ -96,7 +96,7 @@ SELECT float('N A N') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'N A N' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'N A N' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT float('N A N') ^^^^^^^^^^^^^^ @@ -108,7 +108,7 @@ SELECT float('NaN x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'NaN x' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'NaN x' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT float('NaN x') ^^^^^^^^^^^^^^ @@ -120,7 +120,7 @@ SELECT float(' INFINITY x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value ' INFINITY x' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value ' INFINITY x' of the type "STRING" cannot be cast to "FLOAT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT float(' INFINITY x') ^^^^^^^^^^^^^^^^^^^^^^^ @@ -156,7 +156,7 @@ SELECT float(decimal('nan')) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'nan' of the type "STRING" cannot be cast to "DECIMAL(10,0)" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'nan' of the type "STRING" cannot be cast to "DECIMAL(10,0)" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 13) == SELECT float(decimal('nan')) ^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out index 270332cd19664..33aec5bfaf100 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out @@ -128,7 +128,7 @@ SELECT double('N A N') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'N A N' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'N A N' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT double('N A N') ^^^^^^^^^^^^^^^ @@ -140,7 +140,7 @@ SELECT double('NaN x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'NaN x' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'NaN x' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT double('NaN x') ^^^^^^^^^^^^^^^ @@ -152,7 +152,7 @@ SELECT double(' INFINITY x') struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value ' INFINITY x' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value ' INFINITY x' of the type "STRING" cannot be cast to "DOUBLE" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == SELECT double(' INFINITY x') ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -188,7 +188,7 @@ SELECT double(decimal('nan')) struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'nan' of the type "STRING" cannot be cast to "DECIMAL(10,0)" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'nan' of the type "STRING" cannot be cast to "DECIMAL(10,0)" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 14) == SELECT double(decimal('nan')) ^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out index ed218c1a52c3c..a3f149211966a 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out @@ -65,7 +65,7 @@ select string('four: ') || 2+2 struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'four: 2' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'four: 2' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select string('four: ') || 2+2 ^^^^^^^^^^^^^^^^^^^^^^^ @@ -77,7 +77,7 @@ select 'four: ' || 2+2 struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'four: 2' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'four: 2' of the type "STRING" cannot be cast to "BIGINT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 7) == select 'four: ' || 2+2 ^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out index 58633790cf793..4da230c2e5a55 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out @@ -462,7 +462,7 @@ window w as (order by f_numeric range between struct<> -- !query output org.apache.spark.SparkNumberFormatException -The value 'NaN' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value 'NaN' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 3, position 12) == window w as (order by f_numeric range between ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out index 68f9d532a1cd5..25125281a74c8 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out @@ -72,7 +72,7 @@ insert into datetimes values struct<> -- !query output org.apache.spark.sql.AnalysisException -failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): The value '11:00 BST' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): The value '11:00 BST' of the type "STRING" cannot be cast to "TIMESTAMP" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 2, position 23) == (1, timestamp '11:00', cast ('11:00 BST' as timestamp), cast ('1 year' as timestamp), ... ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out index f3f4a448df69c..f341f475fcdf0 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out @@ -501,7 +501,7 @@ FROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b) struct<> -- !query output org.apache.spark.sql.AnalysisException -failed to evaluate expression CAST('nan' AS INT): The value 'nan' of the type "STRING" cannot be cast to "INT" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +failed to evaluate expression CAST('nan' AS INT): The value 'nan' of the type "STRING" cannot be cast to "INT" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 3, position 28) == FROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b) ^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out index e374f92c74e93..531f89003bdc1 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out @@ -332,7 +332,7 @@ select to_timestamp(1) struct<> -- !query output org.apache.spark.SparkDateTimeException -The value '1' of the type "STRING" cannot be cast to "TIMESTAMP_NTZ" because it is malformed. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. +The value '1' of the type "STRING" cannot be cast to "TIMESTAMP_NTZ" because it is malformed. Correct the value as per the syntax, or change its target type. To return NULL instead, use `try_cast`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out index 7b986a25be089..0dccf39d435f9 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-pivot.sql.out @@ -339,7 +339,7 @@ PIVOT ( struct<> -- !query output org.apache.spark.sql.AnalysisException -Literal expressions required for pivot values, found 'course#x' +Literal expressions required for pivot values, found "course". -- !query From 5d946ec6d53e28f379b0d8d0d1a62b712241a961 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 25 May 2022 08:18:07 +0300 Subject: [PATCH 2/2] Fix InsertIntoTests --- .../org/apache/spark/sql/connector/InsertIntoTests.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/InsertIntoTests.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/InsertIntoTests.scala index fc98cfd5138e1..85904bbf12373 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/InsertIntoTests.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/InsertIntoTests.scala @@ -259,7 +259,7 @@ trait InsertIntoSQLOnlyTests verifyTable(t1, spark.emptyDataFrame) assert(exc.getMessage.contains( - "PARTITION clause cannot contain a non-partition column name")) + "PARTITION clause cannot contain the non-partition column")) assert(exc.getMessage.contains("id")) assert(exc.getErrorClass == "NON_PARTITION_COLUMN") } @@ -276,7 +276,7 @@ trait InsertIntoSQLOnlyTests verifyTable(t1, spark.emptyDataFrame) assert(exc.getMessage.contains( - "PARTITION clause cannot contain a non-partition column name")) + "PARTITION clause cannot contain the non-partition column")) assert(exc.getMessage.contains("data")) assert(exc.getErrorClass == "NON_PARTITION_COLUMN") }