diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 3f6c1ca036269..07b30c0b49211 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -255,6 +255,64 @@ ], "sqlState" : "22023" }, + "INVALID_FUNCTION_ARGUMENTS" : { + "message" : [ + "Arguments of the function are invalid:" + ], + "subClass" : { + "APPROX_COUNT_DISTINCT" : { + "message" : [ + "The second argument should be a double literal" + ] + }, + "CAST_ALIAS" : { + "message" : [ + "Function accepts only one argument" + ] + }, + "EMPTY_NUMBER_OF_ARGUMENTS" : { + "message" : [ + "Empty number of arguments" + ] + }, + "FIRST_LAST" : { + "message" : [ + "The second argument should be a boolean literal" + ] + }, + "INVALID_ARGUMENT_INDEX" : { + "message" : [ + "The value of parameter(s) '' has invalid index, expects , but got ." + ] + }, + "INVALID_ARGUMENT_LENGTH" : { + "message" : [ + "The value of parameter(s) '' has invalid length, expects , but got ." + ] + }, + "INVALID_ARGUMENT_VALUE" : { + "message" : [ + "The value of parameter(s) '' is invalid: " + ] + }, + "INVALID_NUMBER_OF_ARGUMENTS" : { + "message" : [ + "Invalid number of arguments. Expected: ; Found: " + ] + }, + "INVALID_NUMBER_OF_ARGUMENTS_FOR_V2FUNCTION" : { + "message" : [ + "There are arguments in V2Function, but parameters returned from 'inputTypes()'" + ] + }, + "INVALID_OPERATION_FOR_V2FUNCTION" : { + "message" : [ + "V2Function cannot process input: (): " + ] + } + }, + "sqlState" : "22023" + }, "INVALID_JSON_SCHEMA_MAP_TYPE" : { "message" : [ "Input schema can only contain STRING as a key type for a MAP." @@ -265,12 +323,6 @@ "The group aggregate pandas UDF cannot be invoked together with as other, non-pandas aggregate functions." ] }, - "INVALID_PARAMETER_VALUE" : { - "message" : [ - "The value of parameter(s) '' in is invalid: " - ], - "sqlState" : "22023" - }, "INVALID_PROPERTY_KEY" : { "message" : [ " is an invalid property key, please use quotes, e.g. SET =" diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala index 9ab0b223e1172..df3b5a93eaf0e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala @@ -90,6 +90,18 @@ class AnalysisException protected[sql] ( errorSubClass = Some(errorSubClass), messageParameters = messageParameters) + def this( + errorClass: String, + errorSubClass: String, + messageParameters: Array[String], + cause: Option[Throwable]) = + this( + SparkThrowableHelper.getMessage(errorClass, errorSubClass, messageParameters), + errorClass = Some(errorClass), + errorSubClass = Some(errorSubClass), + messageParameters = messageParameters, + cause = cause) + def this( errorClass: String, errorSubClass: String, diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala index 42f3ca041b885..34a9cf9e81e21 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala @@ -153,7 +153,11 @@ object FunctionRegistryBase { } catch { // the exception is an invocation exception. To get a meaningful message, we need the // cause. - case e: Exception => throw new AnalysisException(e.getCause.getMessage) + case e: Exception => + e.getCause match { + case ae: AnalysisException if ae.errorClass.isDefined => throw ae; + case other => throw new AnalysisException(other.getMessage) + } } } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervals.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervals.scala index f3bf251ba0b5a..e110a2795369d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervals.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproxCountDistinctForIntervals.scala @@ -55,7 +55,8 @@ case class ApproxCountDistinctForIntervals( this( child = child, endpointsExpression = endpointsExpression, - relativeSD = HyperLogLogPlusPlus.validateDoubleLiteral(relativeSD), + relativeSD = HyperLogLogPlusPlus + .validateDoubleLiteral("ApproxCountDistinctForIntervals", relativeSD), mutableAggBufferOffset = 0, inputAggBufferOffset = 0) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/HyperLogLogPlusPlus.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/HyperLogLogPlusPlus.scala index 9b0493f3e68a4..e5ddba40c66ec 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/HyperLogLogPlusPlus.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/HyperLogLogPlusPlus.scala @@ -70,7 +70,7 @@ case class HyperLogLogPlusPlus( def this(child: Expression, relativeSD: Expression) = { this( child = child, - relativeSD = HyperLogLogPlusPlus.validateDoubleLiteral(relativeSD), + relativeSD = HyperLogLogPlusPlus.validateDoubleLiteral("approx_count_distinct", relativeSD), mutableAggBufferOffset = 0, inputAggBufferOffset = 0) } @@ -144,10 +144,10 @@ case class HyperLogLogPlusPlus( } object HyperLogLogPlusPlus { - def validateDoubleLiteral(exp: Expression): Double = exp match { + def validateDoubleLiteral(name: String, exp: Expression): Double = exp match { case Literal(d: Double, DoubleType) => d case Literal(dec: Decimal, _) => dec.toDouble case _ => - throw QueryCompilationErrors.secondArgumentNotDoubleLiteralError + throw QueryCompilationErrors.secondArgumentNotDoubleLiteralError(name) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 8b9663f17347f..aeb07327ea1af 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -69,9 +69,10 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def zeroArgumentIndexError(): Throwable = { new AnalysisException( - errorClass = "INVALID_PARAMETER_VALUE", + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "INVALID_ARGUMENT_INDEX", messageParameters = Array( - "strfmt", toSQLId("format_string"), "expects %1$, %2$ and so on, but got %0$.")) + toSQLId("format_string"), "strfmt", "%1$, %2$ and so on", "%0$")) } def unorderablePivotColError(pivotCol: Expression): Throwable = { @@ -496,14 +497,19 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def invalidFunctionArgumentsError( name: String, expectedInfo: String, actualNumber: Int): Throwable = { - new AnalysisException(s"Invalid number of arguments for function $name. " + - s"Expected: $expectedInfo; Found: $actualNumber") + new AnalysisException( + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "INVALID_NUMBER_OF_ARGUMENTS", + messageParameters = Array(name, expectedInfo, s"$actualNumber")) } def invalidFunctionArgumentNumberError( validParametersCount: Seq[Int], name: String, actualNumber: Int): Throwable = { - if (validParametersCount.length == 0) { - new AnalysisException(s"Invalid arguments for function $name") + if (validParametersCount.isEmpty) { + new AnalysisException( + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "EMPTY_NUMBER_OF_ARGUMENTS", + messageParameters = Array(name)) } else { val expectedNumberOfParameters = if (validParametersCount.length == 1) { validParametersCount.head.toString @@ -516,7 +522,10 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { } def functionAcceptsOnlyOneArgumentError(name: String): Throwable = { - new AnalysisException(s"Function $name accepts only one argument") + new AnalysisException( + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "CAST_ALIAS", + messageParameters = Array(name)) } def alterV2TableSetLocationWithPartitionNotSupportedError(): Throwable = { @@ -819,8 +828,12 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { new AnalysisException(s"Unsupported component type $clz in arrays") } - def secondArgumentNotDoubleLiteralError(): Throwable = { - new AnalysisException("The second argument should be a double literal.") + def secondArgumentNotDoubleLiteralError(name: String): Throwable = { + new AnalysisException( + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "APPROX_COUNT_DISTINCT", + messageParameters = Array(name) + ) } def dataTypeUnsupportedByExtractValueError( @@ -1540,16 +1553,26 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { unbound: UnboundFunction, arguments: Seq[Expression], unsupported: UnsupportedOperationException): Throwable = { - new AnalysisException(s"Function '${unbound.name}' cannot process " + - s"input: (${arguments.map(_.dataType.simpleString).mkString(", ")}): " + - unsupported.getMessage, cause = Some(unsupported)) + new AnalysisException( + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "INVALID_OPERATION_FOR_V2FUNCTION", + messageParameters = Array( + unbound.name, + arguments.map(x => toSQLType(x.dataType)).mkString(", "), + unsupported.getMessage), + cause = Some(unsupported)) } def v2FunctionInvalidInputTypeLengthError( bound: BoundFunction, args: Seq[Expression]): Throwable = { - new AnalysisException(s"Invalid bound function '${bound.name()}: there are ${args.length} " + - s"arguments but ${bound.inputTypes().length} parameters returned from 'inputTypes()'") + new AnalysisException( + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "INVALID_NUMBER_OF_ARGUMENTS_FOR_V2FUNCTION", + messageParameters = Array( + bound.name(), s"${args.length}", + s"${bound.inputTypes().length}") + ) } def ambiguousRelationAliasNameInNestedCTEError(name: String): Throwable = { @@ -1629,7 +1652,11 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { } def secondArgumentInFunctionIsNotBooleanLiteralError(funcName: String): Throwable = { - new AnalysisException(s"The second argument in $funcName should be a boolean literal.") + new AnalysisException( + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "FIRST_LAST", + messageParameters = Array(funcName) + ) } def joinConditionMissingOrTrivialError( diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index e4481a4c7835d..f1aa9f4a8f3ee 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -1981,11 +1981,13 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { def invalidAesKeyLengthError(actualLength: Int): RuntimeException = { new SparkRuntimeException( - errorClass = "INVALID_PARAMETER_VALUE", + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "INVALID_ARGUMENT_LENGTH", messageParameters = Array( - "key", aesFuncName, - s"expects a binary value with 16, 24 or 32 bytes, but got ${actualLength.toString} bytes.")) + "key", + "a binary value with 16, 24 or 32 bytes", + s"${actualLength.toString} bytes")) } def aesModeUnsupportedError(mode: String, padding: String): RuntimeException = { @@ -1997,10 +1999,11 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { def aesCryptoError(detailMessage: String): RuntimeException = { new SparkRuntimeException( - errorClass = "INVALID_PARAMETER_VALUE", + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "INVALID_ARGUMENT_VALUE", messageParameters = Array( - "expr, key", aesFuncName, + "expr, key", s"Detail message: $detailMessage")) } @@ -2083,10 +2086,11 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { def invalidPatternError(funcName: String, pattern: String): RuntimeException = { new SparkRuntimeException( - errorClass = "INVALID_PARAMETER_VALUE", + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "INVALID_ARGUMENT_VALUE", messageParameters = Array( - "regexp", toSQLId(funcName), + "regexp", pattern)) } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala index 91b0f0e1039f6..a40674f6a6bf3 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/RegexpExpressionsSuite.scala @@ -502,18 +502,20 @@ class RegexpExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { val s = $"s".string.at(0) val p = $"p".string.at(1) val r = $"r".int.at(2) - val prefix = "[INVALID_PARAMETER_VALUE] The value of parameter(s) 'regexp' in" + val prefix = "[INVALID_FUNCTION_ARGUMENTS.INVALID_ARGUMENT_VALUE] " + + "Arguments of the %s function are invalid: " + + "The value of parameter(s) 'regexp'" checkExceptionInExpression[SparkRuntimeException]( RegExpExtract(s, p, r), create_row("1a 2b 14m", "(?l)", 0), - s"$prefix `regexp_extract` is invalid: (?l)") + s"${prefix.format("`regexp_extract`")} is invalid: (?l)") checkExceptionInExpression[SparkRuntimeException]( RegExpExtractAll(s, p, r), create_row("abc", "] [", 0), - s"$prefix `regexp_extract_all` is invalid: ] [") + s"${prefix.format("`regexp_extract_all`")} is invalid: ] [") checkExceptionInExpression[SparkRuntimeException]( RegExpInStr(s, p, r), create_row("abc", ", (", 0), - s"$prefix `regexp_instr` is invalid: , (") + s"${prefix.format("`regexp_instr`")} is invalid: , (") } } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/FirstLastTestSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/FirstLastTestSuite.scala index 292edc715538b..793406db1e693 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/FirstLastTestSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/aggregate/FirstLastTestSuite.scala @@ -112,14 +112,14 @@ class FirstLastTestSuite extends SparkFunSuite { val msg1 = intercept[AnalysisException] { new First(input, Literal(1, IntegerType)) }.getMessage - assert(msg1.contains("The second argument in first should be a boolean literal")) + assert(msg1.contains("The second argument should be a boolean literal")) val msg2 = intercept[AnalysisException] { new Last(input, Literal(1, IntegerType)) }.getMessage - assert(msg2.contains("The second argument in last should be a boolean literal")) + assert(msg2.contains("The second argument should be a boolean literal")) val msg3 = intercept[AnalysisException] { new AnyValue(input, Literal(1, IntegerType)) }.getMessage - assert(msg3.contains("The second argument in any_value should be a boolean literal")) + assert(msg3.contains("The second argument should be a boolean literal")) } } diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out index 08dcc011f2475..5bec56af82b67 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out @@ -719,8 +719,16 @@ select decode() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7 - +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "decode", + "expected" : "2", + "found" : "0" + } +} -- !query select decode(encode('abc', 'utf-8')) @@ -728,8 +736,16 @@ select decode(encode('abc', 'utf-8')) struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7 - +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "decode", + "expected" : "2", + "found" : "1" + } +} -- !query select decode(encode('abc', 'utf-8'), 'utf-8') diff --git a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out index 38efdac409231..e5b2d62cbd2ff 100644 --- a/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ceil-floor-with-scale-param.sql.out @@ -111,7 +111,16 @@ SELECT CEIL(2.5, 0, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function ceil. Expected: 2; Found: 3; line 1 pos 7 +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "ceil", + "expected" : "2", + "found" : "3" + } +} -- !query @@ -226,4 +235,13 @@ SELECT FLOOR(2.5, 0, 0) struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function floor. Expected: 2; Found: 3; line 1 pos 7 +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "floor", + "expected" : "2", + "found" : "3" + } +} diff --git a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out index 301e5cc78df4b..030405a08f472 100644 --- a/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/csv-functions.sql.out @@ -70,8 +70,16 @@ select from_csv() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function from_csv. Expected: one of 2 and 3; Found: 0; line 1 pos 7 - +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "from_csv", + "expected" : "one of 2 and 3", + "found" : "0" + } +} -- !query select from_csv('1,abc', schema_of_csv('1,abc')) diff --git a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out index ca079e4add048..966e6e9c97420 100644 --- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out @@ -87,8 +87,16 @@ select to_json() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function to_json. Expected: one of 1 and 2; Found: 0; line 1 pos 7 - +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "to_json", + "expected" : "one of 1 and 2", + "found" : "0" + } +} -- !query select from_json('{"a":1}', 'a INT') @@ -161,8 +169,16 @@ select from_json() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function from_json. Expected: one of 2 and 3; Found: 0; line 1 pos 7 - +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "from_json", + "expected" : "one of 2 and 3", + "found" : "0" + } +} -- !query SELECT json_tuple('{"a" : 1, "b" : 2}', CAST(NULL AS STRING), 'b', CAST(NULL AS STRING), 'a') @@ -446,8 +462,16 @@ select json_array_length() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function json_array_length. Expected: 1; Found: 0; line 1 pos 7 - +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "json_array_length", + "expected" : "1", + "found" : "0" + } +} -- !query select json_array_length('') @@ -519,8 +543,16 @@ select json_object_keys() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function json_object_keys. Expected: 1; Found: 0; line 1 pos 7 - +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "json_object_keys", + "expected" : "1", + "found" : "0" + } +} -- !query select json_object_keys(null) diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out index 906b47d62a023..b9a354bc4ddc6 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out @@ -309,12 +309,14 @@ struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "INVALID_PARAMETER_VALUE", + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_ARGUMENT_INDEX", "sqlState" : "22023", "messageParameters" : { + "funcName" : "`format_string`", "parameter" : "strfmt", - "functionName" : "`format_string`", - "expected" : "expects %1$, %2$ and so on, but got %0$." + "expected" : "%1$, %2$ and so on", + "found" : "%0$" } } diff --git a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out index 65e1e31ae7cf3..2617b98a09468 100644 --- a/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/regexp-functions.sql.out @@ -132,12 +132,13 @@ struct<> -- !query output org.apache.spark.SparkRuntimeException { - "errorClass" : "INVALID_PARAMETER_VALUE", + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_ARGUMENT_VALUE", "sqlState" : "22023", "messageParameters" : { + "funcName" : "`regexp_extract`", "parameter" : "regexp", - "functionName" : "`regexp_extract`", - "expected" : "(?l)" + "message" : "(?l)" } } @@ -275,16 +276,16 @@ struct<> -- !query output org.apache.spark.SparkRuntimeException { - "errorClass" : "INVALID_PARAMETER_VALUE", + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_ARGUMENT_VALUE", "sqlState" : "22023", "messageParameters" : { + "funcName" : "`regexp_extract_all`", "parameter" : "regexp", - "functionName" : "`regexp_extract_all`", - "expected" : "], [" + "message" : "], [" } } - -- !query SELECT regexp_replace('healthy, wealthy, and wise', '\\w+thy', 'something') -- !query schema @@ -582,11 +583,12 @@ struct<> -- !query output org.apache.spark.SparkRuntimeException { - "errorClass" : "INVALID_PARAMETER_VALUE", + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_ARGUMENT_VALUE", "sqlState" : "22023", "messageParameters" : { + "funcName" : "`regexp_instr`", "parameter" : "regexp", - "functionName" : "`regexp_instr`", - "expected" : ") ?" + "message" : ") ?" } } diff --git a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out index 9521cbf0bf2b8..051515eb86091 100644 --- a/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/sql-compatibility-functions.sql.out @@ -93,7 +93,14 @@ SELECT string(1, 2) struct<> -- !query output org.apache.spark.sql.AnalysisException -Function string accepts only one argument; line 1 pos 7 +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "CAST_ALIAS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "string" + } +} -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out index a02b27142ff21..5c14729451414 100644 --- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out @@ -651,7 +651,16 @@ select decode() struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function decode. Expected: 2; Found: 0; line 1 pos 7 +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "decode", + "expected" : "2", + "found" : "0" + } +} -- !query @@ -660,8 +669,16 @@ select decode(encode('abc', 'utf-8')) struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function decode. Expected: 2; Found: 1; line 1 pos 7 - +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "decode", + "expected" : "2", + "found" : "1" + } +} -- !query select decode(encode('abc', 'utf-8'), 'utf-8') diff --git a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out index 1ce6fbbdbc84b..cbc0536697cef 100644 --- a/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/table-valued-functions.sql.out @@ -72,7 +72,7 @@ Table-valued function range with alternatives: range(start: long, end: long, step: long) range(start: long, end: long) range(end: long) -cannot be applied to (integer, integer, integer, integer, integer): Invalid number of arguments for function range. Expected: one of 1, 2, 3 and 4; Found: 5; line 1 pos 14 +cannot be applied to (integer, integer, integer, integer, integer): [INVALID_FUNCTION_ARGUMENTS.INVALID_NUMBER_OF_ARGUMENTS] Arguments of the range function are invalid: Invalid number of arguments. Expected: one of 1, 2, 3 and 4; Found: 5; line 1 pos 14 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out index acdc65a23f4b9..d4e9d07ac5e7d 100644 --- a/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/timestamp-ntz.sql.out @@ -45,8 +45,16 @@ SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 45.678, 'CET') struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function make_timestamp_ntz. Expected: 6; Found: 7; line 1 pos 7 - +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "make_timestamp_ntz", + "expected" : "6", + "found" : "7" + } +} -- !query SELECT make_timestamp_ntz(2021, 07, 11, 6, 30, 60.007) diff --git a/sql/core/src/test/resources/sql-tests/results/udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udaf.sql.out index 0605af1c808db..24d8e1db697d5 100644 --- a/sql/core/src/test/resources/sql-tests/results/udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udaf.sql.out @@ -31,8 +31,16 @@ SELECT default.myDoubleAvg(int_col1, 3) as my_avg from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function spark_catalog.default.mydoubleavg. Expected: 1; Found: 2; line 1 pos 7 - +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "spark_catalog.default.mydoubleavg", + "expected" : "1", + "found" : "2" + } +} -- !query CREATE FUNCTION udaf1 AS 'test.non.existent.udaf' diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out index 80a3d9af94269..873b7530ecd43 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-udaf.sql.out @@ -31,8 +31,16 @@ SELECT default.myDoubleAvg(udf(int_col1), udf(3)) as my_avg from t1 struct<> -- !query output org.apache.spark.sql.AnalysisException -Invalid number of arguments for function spark_catalog.default.mydoubleavg. Expected: 1; Found: 2; line 1 pos 7 - +{ + "errorClass" : "INVALID_FUNCTION_ARGUMENTS", + "errorSubClass" : "INVALID_NUMBER_OF_ARGUMENTS", + "sqlState" : "22023", + "messageParameters" : { + "funcName" : "spark_catalog.default.mydoubleavg", + "expected" : "1", + "found" : "2" + } +} -- !query CREATE FUNCTION udaf1 AS 'test.non.existent.udaf' diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala index 9924fbfbf626c..c08ce7fb32377 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala @@ -3675,7 +3675,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { val ex2 = intercept[AnalysisException] { df.selectExpr("zip_with(a1, a2, (acc, x) -> x, (acc, x) -> x)") } - assert(ex2.getMessage.contains("Invalid number of arguments for function zip_with")) + assert(ex2.getMessage.contains("Invalid number of arguments")) val ex3 = intercept[AnalysisException] { df.selectExpr("zip_with(i, a2, (acc, x) -> x)") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala index d07be9c19714e..d0638db04a6c4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala @@ -582,7 +582,7 @@ class StringFunctionsSuite extends QueryTest with SharedSparkSession { val m = intercept[AnalysisException] { df.selectExpr("sentences()") }.getMessage - assert(m.contains("Invalid number of arguments for function sentences")) + assert(m.contains("Invalid number of arguments")) } test("str_to_map function") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala index ba8af6238938c..5be8884d712a0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala @@ -103,7 +103,7 @@ class UDFSuite extends QueryTest with SharedSparkSession { val e = intercept[AnalysisException] { df.selectExpr("substr('abcd', 2, 3, 4)") } - assert(e.getMessage.contains("Invalid number of arguments for function substr. Expected:")) + assert(e.getMessage.contains("Invalid number of arguments. Expected:")) } test("error reporting for incorrect number of arguments - udf") { @@ -112,7 +112,7 @@ class UDFSuite extends QueryTest with SharedSparkSession { spark.udf.register("foo", (_: String).length) df.selectExpr("foo(2, 3, 4)") } - assert(e.getMessage.contains("Invalid number of arguments for function foo. Expected:")) + assert(e.getMessage.contains("Invalid number of arguments. Expected:")) } test("error reporting for undefined functions") { @@ -618,7 +618,7 @@ class UDFSuite extends QueryTest with SharedSparkSession { val e = intercept[AnalysisException] { spark.sql("SELECT CAST(1)") } - assert(e.getMessage.contains("Invalid arguments for function cast")) + assert(e.getMessage.contains("Empty number of arguments")) } test("only one case class parameter") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala index 85aa7221b0ee6..48b52067276f8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala @@ -19,10 +19,12 @@ package org.apache.spark.sql.errors import org.apache.spark.sql.{AnalysisException, ClassData, IntegratedUDFTestUtils, QueryTest, Row} import org.apache.spark.sql.api.java.{UDF1, UDF2, UDF23Test} +import org.apache.spark.sql.connector.catalog.{Identifier, InMemoryCatalog} +import org.apache.spark.sql.connector.catalog.functions.{BoundFunction, ScalarFunction, UnboundFunction} import org.apache.spark.sql.expressions.SparkUserDefinedFunction import org.apache.spark.sql.functions.{grouping, grouping_id, lit, struct, sum, udf} import org.apache.spark.sql.internal.SQLConf -import org.apache.spark.sql.types.{IntegerType, MapType, StringType, StructField, StructType} +import org.apache.spark.sql.types.{DataType, DataTypes, IntegerType, MapType, StringType, StructField, StructType} case class StringLongClass(a: String, b: Long) @@ -109,17 +111,19 @@ class QueryCompilationErrorsSuite } } - test("INVALID_PARAMETER_VALUE: the argument_index of string format is invalid") { + test("INVALID_FUNCTION_ARGUMENTS: the argument_index of string format is invalid") { withSQLConf(SQLConf.ALLOW_ZERO_INDEX_IN_FORMAT_STRING.key -> "false") { checkError( exception = intercept[AnalysisException] { sql("select format_string('%0$s', 'Hello')") }, - errorClass = "INVALID_PARAMETER_VALUE", + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = Some("INVALID_ARGUMENT_INDEX"), parameters = Map( + "funcName" -> "`format_string`", "parameter" -> "strfmt", - "functionName" -> "`format_string`", - "expected" -> "expects %1$, %2$ and so on, but got %0$.")) + "expected" -> "%1$, %2$ and so on", + "found" -> "%0$")) } } @@ -616,6 +620,141 @@ class QueryCompilationErrorsSuite "functionName" -> "`array_contains`", "classCanonicalName" -> "org.apache.spark.sql.catalyst.expressions.ArrayContains")) } + + test("INVALID_FUNCTION_ARGUMENTS: invalid number of the udf function arguments") { + spark.udf.register("testFunc", (n: Int) => n.toString) + checkError( + exception = intercept[AnalysisException]( + sql(s"SELECT testFunc(123, 123) as value") + ), + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = Some("INVALID_NUMBER_OF_ARGUMENTS"), + parameters = Map( + "funcName" -> "testFunc", + "expected" -> "1", + "found" -> "2" + ), + sqlState = Some("22023") + ) + } + + test("INVALID_FUNCTION_ARGUMENTS: empty number of the function arguments") { + checkError( + exception = intercept[AnalysisException]( + sql("SELECT CAST()") + ), + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = Some("EMPTY_NUMBER_OF_ARGUMENTS"), + parameters = Map( + "funcName" -> "cast" + ), + sqlState = Some("22023") + ) + } + + test("INVALID_FUNCTION_ARGUMENTS: invalid number of the function arguments") { + checkError( + exception = intercept[AnalysisException]( + sql("SELECT to_timestamp_ntz()") + ), + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = Some("INVALID_NUMBER_OF_ARGUMENTS"), + parameters = Map( + "funcName" -> "to_timestamp_ntz", + "expected" -> "one of 1 and 2", + "found" -> "0" + ), + sqlState = Some("22023") + ) + } + + test("INVALID_FUNCTION_ARGUMENTS: the function use two arguments") { + checkError( + exception = intercept[AnalysisException]( + sql(s"SELECT int('1', '2')") + ), + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = Some("CAST_ALIAS"), + parameters = Map( + "funcName" -> "int" + ), + sqlState = Some("22023") + ) + } + + test("INVALID_FUNCTION_ARGUMENTS: invalid the second argument of the int type") { + checkError( + exception = intercept[AnalysisException]( + sql(s"SELECT approx_count_distinct(1,1)") + ), + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = Some("APPROX_COUNT_DISTINCT"), + parameters = Map( + "funcName" -> "approx_count_distinct" + ), + sqlState = Some("22023") + ) + } + + test("INVALID_FUNCTION_ARGUMENTS: cannot process function input of the map type") { + withSQLConf(SQLConf.DEFAULT_CATALOG.key -> "testcat", + "spark.sql.catalog.testcat" -> classOf[InMemoryCatalog].getName) { + spark.sessionState.catalogManager.catalog("testcat") + .asInstanceOf[InMemoryCatalog] + .createFunction(Identifier.of(Array("ns"), "strlen"), new StrLen) + checkError( + exception = intercept[AnalysisException]( + sql("SELECT testcat.ns.strlen(map('abc', 'abc'))") + ), + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = Some("INVALID_OPERATION_FOR_V2FUNCTION"), + parameters = Map( + "funcName" -> "strlen", + "type" -> "\"MAP\"", + "message" -> "Expect StringType, but found MapType(StringType,StringType,false)" + ), + sqlState = Some("22023") + ) + spark.sessionState.conf.unsetConf("spark.sql.catalog.testcat") + } + } + + test("INVALID_FUNCTION_ARGUMENTS: invalid input type length of the V2Function") { + withSQLConf(SQLConf.DEFAULT_CATALOG.key -> "testcat", + "spark.sql.catalog.testcat" -> classOf[InMemoryCatalog].getName) { + spark.sessionState.catalogManager.catalog("testcat") + .asInstanceOf[InMemoryCatalog] + .createFunction(Identifier.of(Array("ns"), "strlen"), new StrLen) + checkError( + exception = intercept[AnalysisException]( + sql("SELECT testcat.ns.strlen('abc','abc')") + ), + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = Some("INVALID_NUMBER_OF_ARGUMENTS_FOR_V2FUNCTION"), + parameters = Map( + "funcName" -> "strlen", + "expected" -> "2", + "found" -> "1" + ), + sqlState = Some("22023") + ) + spark.sessionState.conf.unsetConf("spark.sql.catalog.testcat") + } + } + + test("INVALID_FUNCTION_ARGUMENTS: invalid the second argument of the string type") { + checkError( + exception = intercept[AnalysisException]( + sql(s"SELECT first(1, '1')") + ), + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = Some("FIRST_LAST"), + parameters = Map( + "funcName" -> "first" + ), + sqlState = Some("22023") + ) + } } class MyCastToString extends SparkUserDefinedFunction( @@ -646,3 +785,23 @@ class MultiIntSum extends } // scalastyle:on argcount } + +class StrLen extends UnboundFunction { + override def bind(inputType: StructType): BoundFunction = { + for (field <- inputType.fields) { + if (!field.dataType.isInstanceOf[StringType]) { + throw new UnsupportedOperationException("Expect StringType, but found " + field.dataType) + } + } + new ScalarFunction[Int] { + override def inputTypes(): Array[DataType] = Array[DataType](StringType) + override def resultType(): DataType = DataTypes.IntegerType + override def name(): String = "strlen" + def invoke(str: String): Int = str.length + } + } + + override def description(): String = "strlen(string) -> int" + + override def name(): String = "strlen" +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala index c2723ba4c1a54..f65245022effa 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala @@ -67,18 +67,19 @@ class QueryExecutionErrorsSuite (df1, df2) } - test("INVALID_PARAMETER_VALUE: invalid key lengths in AES functions") { + test("INVALID_FUNCTION_ARGUMENTS: invalid key lengths in AES functions") { val (df1, df2) = getAesInputs() def checkInvalidKeyLength(df: => DataFrame, inputBytes: Int): Unit = { checkError( exception = intercept[SparkException] { df.collect }.getCause.asInstanceOf[SparkRuntimeException], - errorClass = "INVALID_PARAMETER_VALUE", - parameters = Map("parameter" -> "key", - "functionName" -> "`aes_encrypt`/`aes_decrypt`", - "expected" -> ("expects a binary value with 16, 24 or 32 bytes, but got " + - inputBytes.toString + " bytes.")), + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "INVALID_ARGUMENT_LENGTH", + parameters = Map("funcName" -> "`aes_encrypt`/`aes_decrypt`", + "parameter" -> "key", + "expected" -> "a binary value with 16, 24 or 32 bytes", + "found" -> s"${inputBytes.toString} bytes"), sqlState = "22023") } @@ -101,7 +102,7 @@ class QueryExecutionErrorsSuite } } - test("INVALID_PARAMETER_VALUE: AES decrypt failure - key mismatch") { + test("INVALID_FUNCTION_ARGUMENTS: AES decrypt failure - key mismatch") { val (_, df2) = getAesInputs() Seq( ("value16", "1234567812345678"), @@ -111,10 +112,12 @@ class QueryExecutionErrorsSuite exception = intercept[SparkException] { df2.selectExpr(s"aes_decrypt(unbase64($colName), binary('$key'), 'ECB')").collect }.getCause.asInstanceOf[SparkRuntimeException], - errorClass = "INVALID_PARAMETER_VALUE", - parameters = Map("parameter" -> "expr, key", - "functionName" -> "`aes_encrypt`/`aes_decrypt`", - "expected" -> ("Detail message: " + + errorClass = "INVALID_FUNCTION_ARGUMENTS", + errorSubClass = "INVALID_ARGUMENT_VALUE", + parameters = Map( + "funcName" -> "`aes_encrypt`/`aes_decrypt`", + "parameter" -> "expr, key", + "message" -> ("Detail message: " + "Given final block not properly padded. " + "Such issues can arise if a bad key is used during decryption.")), sqlState = "22023") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala index 3fcc8612b89ee..08e61d5122261 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDAFSuite.scala @@ -174,7 +174,7 @@ class HiveUDAFSuite extends QueryTest sql(s"SELECT $functionName(100)") }.getMessage assert(e.contains( - s"Invalid number of arguments for function $functionName. Expected: 2; Found: 1;")) + s"Invalid number of arguments. Expected: 2; Found: 1;")) } } }