diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala index 1aa5ec71d1ba8..7404a27e1753f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala @@ -590,14 +590,19 @@ object FunctionRegistry { val validParametersCount = constructors .filter(_.getParameterTypes.forall(_ == classOf[Expression])) .map(_.getParameterCount).distinct.sorted - val expectedNumberOfParameters = if (validParametersCount.length == 1) { - validParametersCount.head.toString + val invalidArgumentsMsg = if (validParametersCount.length == 0) { + s"Invalid arguments for function $name" } else { - validParametersCount.init.mkString("one of ", ", ", " and ") + - validParametersCount.last + val expectedNumberOfParameters = if (validParametersCount.length == 1) { + validParametersCount.head.toString + } else { + validParametersCount.init.mkString("one of ", ", ", " and ") + + validParametersCount.last + } + s"Invalid number of arguments for function $name. " + + s"Expected: $expectedNumberOfParameters; Found: ${params.length}" } - throw new AnalysisException(s"Invalid number of arguments for function $name. " + - s"Expected: $expectedNumberOfParameters; Found: ${params.length}") + throw new AnalysisException(invalidArgumentsMsg) } Try(f.newInstance(expressions : _*).asInstanceOf[Expression]) match { case Success(e) => e diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala index 984e1b993b910..08a98e29d2ad0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/UDFSuite.scala @@ -535,4 +535,11 @@ class UDFSuite extends QueryTest with SharedSQLContext { assert(info.getNote === "") assert(info.getExtended.contains("> SELECT upper('SparkSql');")) } + + test("SPARK-28521 error message for CAST(parameter types contains DataType)") { + val e = intercept[AnalysisException] { + spark.sql("SELECT CAST(1)") + } + assert(e.getMessage.contains("Invalid arguments for function cast")) + } }