Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions common/utils/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -2210,6 +2210,12 @@
],
"sqlState" : "42607"
},
"NON_FOLDABLE_ARGUMENT" : {
"message" : [
"The function <funcName> requires the parameter <paramName> to be a foldable expression of the type <paramType>, but the actual argument is a non-foldable."
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm a bit concerned, does the user know what is foldable and what is non-foldable?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How would you improve the error message?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

After thinking about it, I really didn't find a better way to express it.😂

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ya, in this case, new message looks reasonable to me. :)

],
"sqlState" : "22024"
},
"NON_LAST_MATCHED_CLAUSE_OMIT_CONDITION" : {
"message" : [
"When there are more than one MATCHED clauses in a MERGE statement, only the last MATCHED clause can omit the condition."
Expand Down Expand Up @@ -4024,11 +4030,6 @@
"<funcName>() doesn't support the <mode> mode. Acceptable modes are <permissiveMode> and <failFastMode>."
]
},
"_LEGACY_ERROR_TEMP_1100" : {
"message" : [
"The '<argName>' parameter of function '<funcName>' needs to be a <requiredType> literal."
]
},
"_LEGACY_ERROR_TEMP_1103" : {
"message" : [
"Unsupported component type <clz> in arrays."
Expand Down
8 changes: 8 additions & 0 deletions docs/sql-error-conditions.md
Original file line number Diff line number Diff line change
Expand Up @@ -1299,6 +1299,12 @@ Cannot call function `<functionName>` because named argument references are not

It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query.

### NON_FOLDABLE_ARGUMENT

[SQLSTATE: 22024](sql-error-conditions-sqlstates.html#class-22-data-exception)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Data exception category is also reasonable to me if there is no other proper one.


The function `<funcName>` requires the parameter `<paramName>` to be a foldable expression of the type `<paramType>`, but the actual argument is a non-foldable.

### NON_LAST_MATCHED_CLAUSE_OMIT_CONDITION

[SQLSTATE: 42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation)
Expand Down Expand Up @@ -2174,3 +2180,5 @@ The operation `<operation>` requires a `<requiredType>`. But `<objectName>` is a
The `<functionName>` requires `<expectedNum>` parameters but the actual number is `<actualNum>`.

For more details see [WRONG_NUM_ARGS](sql-error-conditions-wrong-num-args-error-class.html)


Original file line number Diff line number Diff line change
Expand Up @@ -2934,7 +2934,7 @@ object Extract {
}
}
} else {
throw QueryCompilationErrors.requireLiteralParameter(funcName, "field", "string")
throw QueryCompilationErrors.nonFoldableArgumentError(funcName, "field", StringType)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -283,10 +283,10 @@ trait CeilFloorExpressionBuilderBase extends ExpressionBuilder {
} else if (numArgs == 2) {
val scale = expressions(1)
if (!(scale.foldable && scale.dataType == IntegerType)) {
throw QueryCompilationErrors.requireLiteralParameter(funcName, "scale", "int")
throw QueryCompilationErrors.nonFoldableArgumentError(funcName, "scale", IntegerType)
}
if (scale.eval() == null) {
throw QueryCompilationErrors.requireLiteralParameter(funcName, "scale", "int")
throw QueryCompilationErrors.nonFoldableArgumentError(funcName, "scale", IntegerType)
}
buildWithTwoParams(expressions(0), scale)
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ object ToCharacterBuilder extends ExpressionBuilder {
case _: DatetimeType => DateFormatClass(inputExpr, format)
case _: BinaryType =>
if (!(format.dataType == StringType && format.foldable)) {
throw QueryCompilationErrors.requireLiteralParameter(funcName, "format", "string")
throw QueryCompilationErrors.nonFoldableArgumentError(funcName, "format", StringType)
}
format.eval().asInstanceOf[UTF8String].toString.toLowerCase(Locale.ROOT).trim match {
case "base64" => Base64(inputExpr)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1207,14 +1207,16 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat
"failFastMode" -> FailFastMode.name))
}

def requireLiteralParameter(
funcName: String, argName: String, requiredType: String): Throwable = {
def nonFoldableArgumentError(
funcName: String,
paramName: String,
paramType: DataType): Throwable = {
new AnalysisException(
errorClass = "_LEGACY_ERROR_TEMP_1100",
errorClass = "NON_FOLDABLE_ARGUMENT",
messageParameters = Map(
"argName" -> argName,
"funcName" -> funcName,
"requiredType" -> requiredType))
"funcName" -> toSQLId(funcName),
"paramName" -> toSQLId(paramName),
"paramType" -> toSQLType(paramType)))
}

def literalTypeUnsupportedForSourceTypeError(field: String, source: Expression): Throwable = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,12 @@ SELECT CEIL(2.5, null)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "scale",
"funcName" : "ceil",
"requiredType" : "int"
"funcName" : "`ceil`",
"paramName" : "`scale`",
"paramType" : "\"INT\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -102,11 +103,12 @@ SELECT CEIL(2.5, 'a')
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "scale",
"funcName" : "ceil",
"requiredType" : "int"
"funcName" : "`ceil`",
"paramName" : "`scale`",
"paramType" : "\"INT\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down Expand Up @@ -223,11 +225,12 @@ SELECT FLOOR(2.5, null)
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "scale",
"funcName" : "floor",
"requiredType" : "int"
"funcName" : "`floor`",
"paramName" : "`scale`",
"paramType" : "\"INT\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -244,11 +247,12 @@ SELECT FLOOR(2.5, 'a')
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "scale",
"funcName" : "floor",
"requiredType" : "int"
"funcName" : "`floor`",
"paramName" : "`scale`",
"paramType" : "\"INT\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -932,11 +932,12 @@ select date_part(c, c) from t
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "field",
"funcName" : "date_part",
"requiredType" : "string"
"funcName" : "`date_part`",
"paramName" : "`field`",
"paramType" : "\"STRING\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down Expand Up @@ -964,11 +965,12 @@ select date_part(i, i) from t
-- !query analysis
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "field",
"funcName" : "date_part",
"requiredType" : "string"
"funcName" : "`date_part`",
"paramName" : "`field`",
"paramType" : "\"STRING\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,11 +94,12 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "scale",
"funcName" : "ceil",
"requiredType" : "int"
"funcName" : "`ceil`",
"paramName" : "`scale`",
"paramType" : "\"INT\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -117,11 +118,12 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "scale",
"funcName" : "ceil",
"requiredType" : "int"
"funcName" : "`ceil`",
"paramName" : "`scale`",
"paramType" : "\"INT\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down Expand Up @@ -253,11 +255,12 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "scale",
"funcName" : "floor",
"requiredType" : "int"
"funcName" : "`floor`",
"paramName" : "`scale`",
"paramType" : "\"INT\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -276,11 +279,12 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "scale",
"funcName" : "floor",
"requiredType" : "int"
"funcName" : "`floor`",
"paramName" : "`scale`",
"paramType" : "\"INT\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down
18 changes: 10 additions & 8 deletions sql/core/src/test/resources/sql-tests/results/extract.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -714,11 +714,12 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "field",
"funcName" : "date_part",
"requiredType" : "string"
"funcName" : "`date_part`",
"paramName" : "`field`",
"paramType" : "\"STRING\""
},
"queryContext" : [ {
"objectType" : "",
Expand All @@ -745,11 +746,12 @@ struct<>
-- !query output
org.apache.spark.sql.AnalysisException
{
"errorClass" : "_LEGACY_ERROR_TEMP_1100",
"errorClass" : "NON_FOLDABLE_ARGUMENT",
"sqlState" : "22024",
"messageParameters" : {
"argName" : "field",
"funcName" : "date_part",
"requiredType" : "string"
"funcName" : "`date_part`",
"paramName" : "`field`",
"paramType" : "\"STRING\""
},
"queryContext" : [ {
"objectType" : "",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -875,11 +875,11 @@ class StringFunctionsSuite extends QueryTest with SharedSparkSession {
exception = intercept[AnalysisException] {
df2.select(func(col("input"), col("format"))).collect()
},
errorClass = "_LEGACY_ERROR_TEMP_1100",
errorClass = "NON_FOLDABLE_ARGUMENT",
parameters = Map(
"argName" -> "format",
"funcName" -> funcName,
"requiredType" -> "string"))
"funcName" -> s"`$funcName`",
"paramName" -> "`format`",
"paramType" -> "\"STRING\""))
checkError(
exception = intercept[AnalysisException] {
df2.select(func(col("input"), lit("invalid_format"))).collect()
Expand Down