Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 9 additions & 9 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,15 @@
"sqlState" : "42000"
},
"ARITHMETIC_OVERFLOW" : {
"message" : [ "<message>.<alternative> If necessary set <config> to false (except for ANSI interval type) to bypass this error.<context>" ],
"message" : [ "<message>.<alternative> If necessary set <config> to \"false\" (except for ANSI interval type) to bypass this error.<context>" ],
"sqlState" : "22003"
},
"CANNOT_CAST_DATATYPE" : {
"message" : [ "Cannot cast <sourceType> to <targetType>." ],
"sqlState" : "22005"
},
"CANNOT_CHANGE_DECIMAL_PRECISION" : {
"message" : [ "<value> cannot be represented as Decimal(<precision>, <scale>). If necessary set <config> to false to bypass this error.<details>" ],
"message" : [ "<value> cannot be represented as Decimal(<precision>, <scale>). If necessary set <config> to \"false\" to bypass this error.<details>" ],
"sqlState" : "22005"
},
"CANNOT_PARSE_DECIMAL" : {
Expand All @@ -26,11 +26,11 @@
"message" : [ "Cannot use a mixture of aggregate function and group aggregate pandas UDF" ]
},
"CAST_INVALID_INPUT" : {
"message" : [ "The value <value> of the type <sourceType> cannot be cast to <targetType> because it is malformed. To return NULL instead, use `try_cast`. If necessary set <config> to false to bypass this error.<details>" ],
"message" : [ "The value <value> of the type <sourceType> cannot be cast to <targetType> because it is malformed. To return NULL instead, use `try_cast`. If necessary set <config> to \"false\" to bypass this error.<details>" ],
"sqlState" : "42000"
},
"CAST_OVERFLOW" : {
"message" : [ "The value <value> of the type <sourceType> cannot be cast to <targetType> due to an overflow. To return NULL instead, use `try_cast`. If necessary set <config> to false to bypass this error." ],
"message" : [ "The value <value> of the type <sourceType> cannot be cast to <targetType> due to an overflow. To return NULL instead, use `try_cast`. If necessary set <config> to \"false\" to bypass this error." ],
"sqlState" : "22005"
},
"CONCURRENT_QUERY" : {
Expand All @@ -41,7 +41,7 @@
"sqlState" : "22008"
},
"DIVIDE_BY_ZERO" : {
"message" : [ "Division by zero. To return NULL instead, use `try_divide`. If necessary set <config> to false (except for ANSI interval type) to bypass this error.<details>" ],
"message" : [ "Division by zero. To return NULL instead, use `try_divide`. If necessary set <config> to \"false\" (except for ANSI interval type) to bypass this error.<details>" ],
"sqlState" : "22012"
},
"DUPLICATE_KEY" : {
Expand Down Expand Up @@ -93,17 +93,17 @@
"message" : [ "<message>" ]
},
"INVALID_ARRAY_INDEX" : {
"message" : [ "The index <indexValue> is out of bounds. The array has <arraySize> elements. If necessary set <config> to false to bypass this error." ]
"message" : [ "The index <indexValue> is out of bounds. The array has <arraySize> elements. If necessary set <config> to \"false\" to bypass this error." ]
},
"INVALID_ARRAY_INDEX_IN_ELEMENT_AT" : {
"message" : [ "The index <indexValue> is out of bounds. The array has <arraySize> elements. To return NULL instead, use `try_element_at`. If necessary set <config> to false to bypass this error." ]
"message" : [ "The index <indexValue> is out of bounds. The array has <arraySize> elements. To return NULL instead, use `try_element_at`. If necessary set <config> to \"false\" to bypass this error." ]
},
"INVALID_FIELD_NAME" : {
"message" : [ "Field name <fieldName> is invalid: <path> is not a struct." ],
"sqlState" : "42000"
},
"INVALID_FRACTION_OF_SECOND" : {
"message" : [ "The fraction of sec must be zero. Valid range is [0, 60]. If necessary set <config> to false to bypass this error. " ],
"message" : [ "The fraction of sec must be zero. Valid range is [0, 60]. If necessary set <config> to \"false\" to bypass this error. " ],
"sqlState" : "22023"
},
"INVALID_JSON_SCHEMA_MAP_TYPE" : {
Expand All @@ -118,7 +118,7 @@
"sqlState" : "42000"
},
"MAP_KEY_DOES_NOT_EXIST" : {
"message" : [ "Key <keyValue> does not exist. To return NULL instead, use 'try_element_at'. If necessary set <config> to false to bypass this error.<details>" ]
"message" : [ "Key <keyValue> does not exist. To return NULL instead, use `try_element_at`. If necessary set <config> to \"false\" to bypass this error.<details>" ]
},
"MISSING_COLUMN" : {
"message" : [ "Column '<columnName>' does not exist. Did you mean one of the following? [<proposal>]" ],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,8 +125,8 @@ class SparkThrowableSuite extends SparkFunSuite {

// Does not fail with too many args (expects 0 args)
assert(getMessage("DIVIDE_BY_ZERO", Array("foo", "bar", "baz")) ==
"Division by zero. To return NULL instead, use `try_divide`. If necessary set foo to false " +
"(except for ANSI interval type) to bypass this error.bar")
"Division by zero. To return NULL instead, use `try_divide`. If necessary set foo " +
"to \"false\" (except for ANSI interval type) to bypass this error.bar")
}

test("Error message is formatted") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,4 +63,8 @@ trait QueryErrorsBase {
def toSQLConf(conf: String): String = {
quoteByDefault(conf)
}

def toDSOption(option: String): String = {
quoteByDefault(option)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -214,8 +214,12 @@ object QueryExecutionErrors extends QueryErrorsBase {
}

def mapKeyNotExistError(key: Any, dataType: DataType, context: String): NoSuchElementException = {
new SparkNoSuchElementException(errorClass = "MAP_KEY_DOES_NOT_EXIST",
messageParameters = Array(toSQLValue(key, dataType), SQLConf.ANSI_ENABLED.key, context))
new SparkNoSuchElementException(
errorClass = "MAP_KEY_DOES_NOT_EXIST",
messageParameters = Array(
toSQLValue(key, dataType),
toSQLConf(SQLConf.ANSI_ENABLED.key),
context))
}

def inputTypeUnsupportedError(dataType: DataType): Throwable = {
Expand Down Expand Up @@ -578,6 +582,7 @@ object QueryExecutionErrors extends QueryErrorsBase {
new IllegalStateException(s"unrecognized format $format")
}

// scalastyle:off line.size.limit
def sparkUpgradeInReadingDatesError(
format: String, config: String, option: String): SparkUpgradeException = {
new SparkUpgradeException(
Expand All @@ -590,14 +595,15 @@ object QueryExecutionErrors extends QueryErrorsBase {
|Spark 2.x or legacy versions of Hive, which uses a legacy hybrid calendar
|that is different from Spark 3.0+'s Proleptic Gregorian calendar.
|See more details in SPARK-31404. You can set the SQL config ${toSQLConf(config)} or
|the datasource option '$option' to 'LEGACY' to rebase the datetime values
|the datasource option ${toDSOption(option)} to "LEGACY" to rebase the datetime values
|w.r.t. the calendar difference during reading. To read the datetime values
|as it is, set the SQL config ${toSQLConf(config)} or the datasource option '$option'
|to 'CORRECTED'.
|as it is, set the SQL config ${toSQLConf(config)} or the datasource option ${toDSOption(option)}
|to "CORRECTED".
|""".stripMargin),
cause = null
)
}
// scalastyle:on line.size.limit

def sparkUpgradeInWritingDatesError(format: String, config: String): SparkUpgradeException = {
new SparkUpgradeException(
Expand All @@ -609,9 +615,9 @@ object QueryExecutionErrors extends QueryErrorsBase {
|into $format files can be dangerous, as the files may be read by Spark 2.x
|or legacy versions of Hive later, which uses a legacy hybrid calendar that
|is different from Spark 3.0+'s Proleptic Gregorian calendar. See more
|details in SPARK-31404. You can set ${toSQLConf(config)} to 'LEGACY' to rebase the
|details in SPARK-31404. You can set ${toSQLConf(config)} to "LEGACY" to rebase the
|datetime values w.r.t. the calendar difference during writing, to get maximum
|interoperability. Or set ${toSQLConf(config)} to 'CORRECTED' to write the datetime
|interoperability. Or set ${toSQLConf(config)} to "CORRECTED" to write the datetime
|values as it is, if you are 100% sure that the written files will only be read by
|Spark 3.0+ or other systems that use Proleptic Gregorian calendar.
|""".stripMargin),
Expand Down
24 changes: 12 additions & 12 deletions sql/core/src/test/resources/sql-tests/results/ansi/array.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ select element_at(array(1, 2, 3), 5)
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index 5 is out of bounds. The array has 3 elements. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index 5 is out of bounds. The array has 3 elements. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
Expand All @@ -177,7 +177,7 @@ select element_at(array(1, 2, 3), -5)
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index -5 is out of bounds. The array has 3 elements. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index -5 is out of bounds. The array has 3 elements. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
Expand All @@ -195,7 +195,7 @@ select elt(4, '123', '456')
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index 4 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index 4 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
Expand All @@ -204,7 +204,7 @@ select elt(0, '123', '456')
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index 0 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index 0 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
Expand All @@ -213,7 +213,7 @@ select elt(-1, '123', '456')
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index -1 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index -1 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
Expand Down Expand Up @@ -254,7 +254,7 @@ select array(1, 2, 3)[5]
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index 5 is out of bounds. The array has 3 elements. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index 5 is out of bounds. The array has 3 elements. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
Expand All @@ -263,7 +263,7 @@ select array(1, 2, 3)[-1]
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index -1 is out of bounds. The array has 3 elements. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index -1 is out of bounds. The array has 3 elements. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
Expand Down Expand Up @@ -337,7 +337,7 @@ select element_at(array(1, 2, 3), 5)
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index 5 is out of bounds. The array has 3 elements. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index 5 is out of bounds. The array has 3 elements. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
Expand All @@ -346,7 +346,7 @@ select element_at(array(1, 2, 3), -5)
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index -5 is out of bounds. The array has 3 elements. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index -5 is out of bounds. The array has 3 elements. To return NULL instead, use `try_element_at`. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
Expand All @@ -364,7 +364,7 @@ select elt(4, '123', '456')
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index 4 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index 4 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
Expand All @@ -373,7 +373,7 @@ select elt(0, '123', '456')
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index 0 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index 0 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.


-- !query
Expand All @@ -382,4 +382,4 @@ select elt(-1, '123', '456')
struct<>
-- !query output
org.apache.spark.SparkArrayIndexOutOfBoundsException
The index -1 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to false to bypass this error.
The index -1 is out of bounds. The array has 2 elements. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error.
Loading