From f441282973b744875db2e3bede9da211993891f7 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Mon, 24 Oct 2022 12:30:02 +0500 Subject: [PATCH 1/6] Quote function names in datatype mismatch errors --- .../expressions/CallMethodViaReflection.scala | 8 +++-- .../sql/catalyst/expressions/arithmetic.scala | 8 ++--- .../expressions/collectionOperations.scala | 36 +++++++++++-------- .../expressions/jsonExpressions.scala | 8 +++-- .../expressions/stringExpressions.scala | 4 +-- .../spark/sql/catalyst/util/TypeUtils.scala | 4 +-- .../sql-tests/results/ansi/interval.sql.out | 4 +-- .../sql-tests/results/ansi/map.sql.out | 4 +-- .../sql-tests/results/interval.sql.out | 4 +-- .../resources/sql-tests/results/map.sql.out | 4 +-- .../typeCoercion/native/mapconcat.sql.out | 10 +++--- 11 files changed, 53 insertions(+), 41 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala index 1b1d5514b3f22..fa52e6cd8517f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala @@ -22,8 +22,8 @@ import java.lang.reflect.{Method, Modifier} import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{DataTypeMismatch, TypeCheckSuccess} -import org.apache.spark.sql.catalyst.expressions.Cast.{toSQLExpr, toSQLType} import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback +import org.apache.spark.sql.errors.QueryErrorsBase import org.apache.spark.sql.types._ import org.apache.spark.unsafe.types.UTF8String import org.apache.spark.util.Utils @@ -56,7 +56,9 @@ import org.apache.spark.util.Utils since = "2.0.0", group = "misc_funcs") case class CallMethodViaReflection(children: Seq[Expression]) - extends Nondeterministic with CodegenFallback { + extends Nondeterministic + with CodegenFallback + with QueryErrorsBase { override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("reflect") @@ -65,7 +67,7 @@ case class CallMethodViaReflection(children: Seq[Expression]) DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", "actualNum" -> children.length.toString)) } else { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index d82108aa3c9f8..3e8ec94c33ce8 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -1203,7 +1203,7 @@ case class Least(children: Seq[Expression]) extends ComplexTypeMergingExpression DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", "actualNum" -> children.length.toString)) } else if (!TypeCoercion.haveSameType(inputTypesForMerging)) { @@ -1215,7 +1215,7 @@ case class Least(children: Seq[Expression]) extends ComplexTypeMergingExpression ) ) } else { - TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(dataType, prettyName) } } @@ -1294,7 +1294,7 @@ case class Greatest(children: Seq[Expression]) extends ComplexTypeMergingExpress DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", "actualNum" -> children.length.toString)) } else if (!TypeCoercion.haveSameType(inputTypesForMerging)) { @@ -1306,7 +1306,7 @@ case class Greatest(children: Seq[Expression]) extends ComplexTypeMergingExpress ) ) } else { - TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(dataType, prettyName) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala index efaadac6ed1c8..52e2e6233bce8 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala @@ -26,7 +26,6 @@ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion, UnresolvedAttribute, UnresolvedSeed} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.expressions.ArraySortLike.NullOrder -import org.apache.spark.sql.catalyst.expressions.Cast.{toSQLExpr, toSQLType} import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ import org.apache.spark.sql.catalyst.trees.{BinaryLike, SQLQueryContext, UnaryLike} @@ -34,7 +33,7 @@ import org.apache.spark.sql.catalyst.trees.TreePattern.{ARRAYS_ZIP, CONCAT, Tree import org.apache.spark.sql.catalyst.util._ import org.apache.spark.sql.catalyst.util.DateTimeConstants._ import org.apache.spark.sql.catalyst.util.DateTimeUtils._ -import org.apache.spark.sql.errors.QueryExecutionErrors +import org.apache.spark.sql.errors.{QueryErrorsBase, QueryExecutionErrors} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ import org.apache.spark.sql.util.SQLOpenHashSet @@ -47,8 +46,10 @@ import org.apache.spark.unsafe.types.{ByteArray, CalendarInterval, UTF8String} * Base trait for [[BinaryExpression]]s with two arrays of the same element type and implicit * casting. */ -trait BinaryArrayExpressionWithImplicitCast extends BinaryExpression - with ImplicitCastInputTypes { +trait BinaryArrayExpressionWithImplicitCast + extends BinaryExpression + with ImplicitCastInputTypes + with QueryErrorsBase { @transient protected lazy val elementType: DataType = inputTypes.head.asInstanceOf[ArrayType].elementType @@ -72,7 +73,7 @@ trait BinaryArrayExpressionWithImplicitCast extends BinaryExpression DataTypeMismatch( errorSubClass = "BINARY_ARRAY_DIFF_TYPES", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "arrayType" -> toSQLType(ArrayType), "leftType" -> toSQLType(left.dataType), "rightType" -> toSQLType(right.dataType) @@ -219,7 +220,10 @@ case class MapKeys(child: Expression) group = "map_funcs", since = "3.3.0") case class MapContainsKey(left: Expression, right: Expression) - extends RuntimeReplaceable with BinaryLike[Expression] with ImplicitCastInputTypes { + extends RuntimeReplaceable + with BinaryLike[Expression] + with ImplicitCastInputTypes + with QueryErrorsBase { override lazy val replacement: Expression = ArrayContains(MapKeys(left), right) @@ -240,14 +244,14 @@ case class MapContainsKey(left: Expression, right: Expression) case (_, NullType) => DataTypeMismatch( errorSubClass = "NULL_TYPE", - Map("functionName" -> prettyName)) + Map("functionName" -> toSQLId(prettyName))) case (MapType(kt, _, _), dt) if kt.sameType(dt) => TypeUtils.checkForOrderingExpr(kt, s"function $prettyName") case _ => DataTypeMismatch( errorSubClass = "MAP_CONTAINS_KEY_DIFF_TYPES", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "dataType" -> toSQLType(MapType), "leftType" -> toSQLType(left.dataType), "rightType" -> toSQLType(right.dataType) @@ -676,20 +680,21 @@ case class MapEntries(child: Expression) """, group = "map_funcs", since = "2.4.0") -case class MapConcat(children: Seq[Expression]) extends ComplexTypeMergingExpression { +case class MapConcat(children: Seq[Expression]) + extends ComplexTypeMergingExpression + with QueryErrorsBase { override def checkInputDataTypes(): TypeCheckResult = { - val funcName = s"function $prettyName" if (children.exists(!_.dataType.isInstanceOf[MapType])) { DataTypeMismatch( errorSubClass = "MAP_CONCAT_DIFF_TYPES", messageParameters = Map( - "functionName" -> funcName, + "functionName" -> toSQLId(prettyName), "dataType" -> children.map(_.dataType).map(toSQLType).mkString("[", ", ", "]") ) ) } else { - val sameTypeCheck = TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), funcName) + val sameTypeCheck = TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), prettyName) if (sameTypeCheck.isFailure) { sameTypeCheck } else { @@ -802,7 +807,10 @@ case class MapConcat(children: Seq[Expression]) extends ComplexTypeMergingExpres """, group = "map_funcs", since = "2.4.0") -case class MapFromEntries(child: Expression) extends UnaryExpression with NullIntolerant { +case class MapFromEntries(child: Expression) + extends UnaryExpression + with NullIntolerant + with QueryErrorsBase { @transient private lazy val dataTypeDetails: Option[(MapType, Boolean, Boolean)] = child.dataType match { @@ -827,7 +835,7 @@ case class MapFromEntries(child: Expression) extends UnaryExpression with NullIn DataTypeMismatch( errorSubClass = "MAP_FROM_ENTRIES_WRONG_TYPE", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "childExpr" -> toSQLExpr(child), "childType" -> toSQLType(child.dataType) ) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala index 959edbd1c5ae6..3529644aeeac6 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala @@ -355,7 +355,9 @@ case class GetJsonObject(json: Expression, path: Expression) since = "1.6.0") // scalastyle:on line.size.limit line.contains.tab case class JsonTuple(children: Seq[Expression]) - extends Generator with CodegenFallback { + extends Generator + with CodegenFallback + with QueryErrorsBase { import SharedFactory._ @@ -396,7 +398,7 @@ case class JsonTuple(children: Seq[Expression]) DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> prettyName, + "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", "actualNum" -> children.length.toString)) } else if (children.forall(child => StringType.acceptsType(child.dataType))) { @@ -404,7 +406,7 @@ case class JsonTuple(children: Seq[Expression]) } else { DataTypeMismatch( errorSubClass = "NON_STRING_TYPE", - messageParameters = Map("funcName" -> prettyName)) + messageParameters = Map("funcName" -> toSQLId(prettyName))) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala index 6927c4cfa3c9a..2c8cef311c373 100755 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala @@ -278,7 +278,7 @@ case class Elt( DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> "elt", + "functionName" -> toSQLId(prettyName), "expectedNum" -> "> 1", "actualNum" -> children.length.toString ) @@ -305,7 +305,7 @@ case class Elt( ) ) } - TypeUtils.checkForSameTypeInputExpr(inputTypes, s"function $prettyName") + TypeUtils.checkForSameTypeInputExpr(inputTypes, prettyName) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala index 0bb5d29c5c47e..de1460eb2ea31 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/TypeUtils.scala @@ -35,7 +35,7 @@ object TypeUtils extends QueryErrorsBase { DataTypeMismatch( errorSubClass = "INVALID_ORDERING_TYPE", Map( - "functionName" -> caller, + "functionName" -> toSQLId(caller), "dataType" -> toSQLType(dt) ) ) @@ -49,7 +49,7 @@ object TypeUtils extends QueryErrorsBase { DataTypeMismatch( errorSubClass = "DATA_DIFF_TYPES", messageParameters = Map( - "functionName" -> caller, + "functionName" -> toSQLId(caller), "dataType" -> types.map(toSQLType).mkString("(", " or ", ")") ) ) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 2078d3d8eb686..65d32adb122ef 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -3609,7 +3609,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "function array", + "functionName" : "`function array`", "sqlExpr" : "\"array(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { @@ -3648,7 +3648,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "function coalesce", + "functionName" : "`function coalesce`", "sqlExpr" : "\"coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out index a550dbbec8820..a9b577dd4c37c 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/map.sql.out @@ -73,7 +73,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.MAP_CONTAINS_KEY_DIFF_TYPES", "messageParameters" : { "dataType" : "\"MAP\"", - "functionName" : "map_contains_key", + "functionName" : "`map_contains_key`", "leftType" : "\"MAP\"", "rightType" : "\"INT\"", "sqlExpr" : "\"map_contains_key(map(1, a, 2, b), 1)\"" @@ -98,7 +98,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.MAP_CONTAINS_KEY_DIFF_TYPES", "messageParameters" : { "dataType" : "\"MAP\"", - "functionName" : "map_contains_key", + "functionName" : "`map_contains_key`", "leftType" : "\"MAP\"", "rightType" : "\"STRING\"", "sqlExpr" : "\"map_contains_key(map(1, a, 2, b), 1)\"" diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 6eb5fb4ce8447..fd47b65007b5c 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -3422,7 +3422,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "function array", + "functionName" : "`function array`", "sqlExpr" : "\"array(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { @@ -3461,7 +3461,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "function coalesce", + "functionName" : "`function coalesce`", "sqlExpr" : "\"coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/results/map.sql.out b/sql/core/src/test/resources/sql-tests/results/map.sql.out index a550dbbec8820..a9b577dd4c37c 100644 --- a/sql/core/src/test/resources/sql-tests/results/map.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/map.sql.out @@ -73,7 +73,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.MAP_CONTAINS_KEY_DIFF_TYPES", "messageParameters" : { "dataType" : "\"MAP\"", - "functionName" : "map_contains_key", + "functionName" : "`map_contains_key`", "leftType" : "\"MAP\"", "rightType" : "\"INT\"", "sqlExpr" : "\"map_contains_key(map(1, a, 2, b), 1)\"" @@ -98,7 +98,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.MAP_CONTAINS_KEY_DIFF_TYPES", "messageParameters" : { "dataType" : "\"MAP\"", - "functionName" : "map_contains_key", + "functionName" : "`map_contains_key`", "leftType" : "\"MAP\"", "rightType" : "\"STRING\"", "sqlExpr" : "\"map_contains_key(map(1, a, 2, b), 1)\"" diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out index 0e2b0cf2789ed..726356b7896db 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/mapconcat.sql.out @@ -95,7 +95,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"MAP\" or \"MAP, ARRAY>\")", - "functionName" : "function map_concat", + "functionName" : "`map_concat`", "sqlExpr" : "\"map_concat(tinyint_map1, array_map1)\"" }, "queryContext" : [ { @@ -120,7 +120,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"MAP\" or \"MAP\")", - "functionName" : "function map_concat", + "functionName" : "`map_concat`", "sqlExpr" : "\"map_concat(boolean_map1, int_map2)\"" }, "queryContext" : [ { @@ -145,7 +145,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"MAP\" or \"MAP, STRUCT>\")", - "functionName" : "function map_concat", + "functionName" : "`map_concat`", "sqlExpr" : "\"map_concat(int_map1, struct_map2)\"" }, "queryContext" : [ { @@ -170,7 +170,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"MAP, STRUCT>\" or \"MAP, ARRAY>\")", - "functionName" : "function map_concat", + "functionName" : "`map_concat`", "sqlExpr" : "\"map_concat(struct_map1, array_map2)\"" }, "queryContext" : [ { @@ -195,7 +195,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"MAP\" or \"MAP, ARRAY>\")", - "functionName" : "function map_concat", + "functionName" : "`map_concat`", "sqlExpr" : "\"map_concat(int_map1, array_map2)\"" }, "queryContext" : [ { From 38e685d9717388cf24e2478d100e8eadb162e420 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Mon, 24 Oct 2022 19:22:42 +0300 Subject: [PATCH 2/6] Don't pass the function word --- .../spark/sql/catalyst/expressions/collectionOperations.scala | 2 +- .../spark/sql/catalyst/expressions/complexTypeCreator.scala | 2 +- .../apache/spark/sql/catalyst/expressions/mathExpressions.scala | 2 +- .../apache/spark/sql/catalyst/expressions/nullExpressions.scala | 2 +- .../spark/sql/catalyst/expressions/stringExpressions.scala | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala index 52e2e6233bce8..d631bb65c17b5 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala @@ -2427,7 +2427,7 @@ case class Concat(children: Seq[Expression]) extends ComplexTypeMergingExpressio s" ${BinaryType.simpleString} or ${ArrayType.simpleString}, but it's " + childTypes.map(_.catalogString).mkString("[", ", ", "]")) } - TypeUtils.checkForSameTypeInputExpr(childTypes, s"function $prettyName") + TypeUtils.checkForSameTypeInputExpr(childTypes, prettyName) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala index c6ae14e5e3c9a..27d4f506ac864 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeCreator.scala @@ -68,7 +68,7 @@ case class CreateArray(children: Seq[Expression], useStringTypeWhenEmpty: Boolea override def stringArgs: Iterator[Any] = super.stringArgs.take(1) override def checkInputDataTypes(): TypeCheckResult = { - TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), s"function $prettyName") + TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), prettyName) } private val defaultElementType: DataType = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala index 5643598b4bd56..999791e96e5cd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala @@ -1788,7 +1788,7 @@ case class WidthBucket( TypeCheckSuccess case _ => val types = Seq(value.dataType, minValue.dataType, maxValue.dataType) - TypeUtils.checkForSameTypeInputExpr(types, s"function $prettyName") + TypeUtils.checkForSameTypeInputExpr(types, prettyName) } case f => f } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala index 8d171c2c6631d..1e6cc356173eb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala @@ -60,7 +60,7 @@ case class Coalesce(children: Seq[Expression]) TypeCheckResult.TypeCheckFailure( s"input to function $prettyName requires at least one argument") } else { - TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), s"function $prettyName") + TypeUtils.checkForSameTypeInputExpr(children.map(_.dataType), prettyName) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala index 2c8cef311c373..8ae4bb9c29c0c 100755 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala @@ -782,7 +782,7 @@ case class Overlay(input: Expression, replace: Expression, pos: Expression, len: val inputTypeCheck = super.checkInputDataTypes() if (inputTypeCheck.isSuccess) { TypeUtils.checkForSameTypeInputExpr( - input.dataType :: replace.dataType :: Nil, s"function $prettyName") + input.dataType :: replace.dataType :: Nil, prettyName) } else { inputTypeCheck } From 8af39c42373ba7fb45cfa0aa85e4cd55eb1536fa Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Mon, 24 Oct 2022 19:39:33 +0300 Subject: [PATCH 3/6] Re-gen sql.out --- .../test/resources/sql-tests/results/ansi/interval.sql.out | 4 ++-- .../src/test/resources/sql-tests/results/interval.sql.out | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index 65d32adb122ef..18ba4fb0ab7d5 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -3609,7 +3609,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "`function array`", + "functionName" : "`array`", "sqlExpr" : "\"array(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { @@ -3648,7 +3648,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "`function coalesce`", + "functionName" : "`coalesce`", "sqlExpr" : "\"coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index fd47b65007b5c..bdb9ba81ff31f 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -3422,7 +3422,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "`function array`", + "functionName" : "`array`", "sqlExpr" : "\"array(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { @@ -3461,7 +3461,7 @@ org.apache.spark.sql.AnalysisException "errorClass" : "DATATYPE_MISMATCH.DATA_DIFF_TYPES", "messageParameters" : { "dataType" : "(\"INTERVAL MONTH\" or \"INTERVAL DAY\")", - "functionName" : "`function coalesce`", + "functionName" : "`coalesce`", "sqlExpr" : "\"coalesce(INTERVAL '1' MONTH, INTERVAL '20' DAY)\"" }, "queryContext" : [ { From 799867bc54300593323e8a8f9345b7b554989a5c Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Mon, 24 Oct 2022 20:14:50 +0300 Subject: [PATCH 4/6] Remove the function word --- .../catalyst/expressions/aggregate/Max.scala | 2 +- .../expressions/aggregate/MaxByAndMinBy.scala | 2 +- .../catalyst/expressions/aggregate/Min.scala | 2 +- .../expressions/collectionOperations.scala | 19 +++++++++---------- .../expressions/complexTypeExtractors.scala | 2 +- .../expressions/higherOrderFunctions.scala | 2 +- .../sql/catalyst/expressions/predicates.scala | 6 +++--- .../expressions/StringExpressionsSuite.scala | 2 +- 8 files changed, 18 insertions(+), 19 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Max.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Max.scala index b802678ec0468..902f53309de41 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Max.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Max.scala @@ -41,7 +41,7 @@ case class Max(child: Expression) extends DeclarativeAggregate with UnaryLike[Ex override def dataType: DataType = child.dataType override def checkInputDataTypes(): TypeCheckResult = - TypeUtils.checkForOrderingExpr(child.dataType, "function max") + TypeUtils.checkForOrderingExpr(child.dataType, prettyName) private lazy val max = AttributeReference("max", child.dataType)() diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxByAndMinBy.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxByAndMinBy.scala index 664bc32ccc464..096a42686a366 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxByAndMinBy.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/MaxByAndMinBy.scala @@ -47,7 +47,7 @@ abstract class MaxMinBy extends DeclarativeAggregate with BinaryLike[Expression] override def dataType: DataType = valueExpr.dataType override def checkInputDataTypes(): TypeCheckResult = - TypeUtils.checkForOrderingExpr(orderingExpr.dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(orderingExpr.dataType, prettyName) // The attributes used to keep extremum (max or min) and associated aggregated values. private lazy val extremumOrdering = diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Min.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Min.scala index 9c5c7bbda4dc7..7a9588808dbdb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Min.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Min.scala @@ -41,7 +41,7 @@ case class Min(child: Expression) extends DeclarativeAggregate with UnaryLike[Ex override def dataType: DataType = child.dataType override def checkInputDataTypes(): TypeCheckResult = - TypeUtils.checkForOrderingExpr(child.dataType, "function min") + TypeUtils.checkForOrderingExpr(child.dataType, prettyName) private lazy val min = AttributeReference("min", child.dataType)() diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala index d631bb65c17b5..256139aca0144 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala @@ -246,7 +246,7 @@ case class MapContainsKey(left: Expression, right: Expression) errorSubClass = "NULL_TYPE", Map("functionName" -> toSQLId(prettyName))) case (MapType(kt, _, _), dt) if kt.sameType(dt) => - TypeUtils.checkForOrderingExpr(kt, s"function $prettyName") + TypeUtils.checkForOrderingExpr(kt, prettyName) case _ => DataTypeMismatch( errorSubClass = "MAP_CONTAINS_KEY_DIFF_TYPES", @@ -1298,7 +1298,7 @@ case class ArrayContains(left: Expression, right: Expression) case (_, NullType) => TypeCheckResult.TypeCheckFailure("Null typed values cannot be used as arguments") case (ArrayType(e1, _), e2) if e1.sameType(e2) => - TypeUtils.checkForOrderingExpr(e2, s"function $prettyName") + TypeUtils.checkForOrderingExpr(e2, prettyName) case _ => TypeCheckResult.TypeCheckFailure(s"Input to function $prettyName should have " + s"been ${ArrayType.simpleString} followed by a value with same element type, but it's " + s"[${left.dataType.catalogString}, ${right.dataType.catalogString}].") @@ -1381,7 +1381,7 @@ case class ArraysOverlap(left: Expression, right: Expression) override def checkInputDataTypes(): TypeCheckResult = super.checkInputDataTypes() match { case TypeCheckResult.TypeCheckSuccess => - TypeUtils.checkForOrderingExpr(elementType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(elementType, prettyName) case failure => failure } @@ -1909,7 +1909,7 @@ case class ArrayMin(child: Expression) override def checkInputDataTypes(): TypeCheckResult = { val typeCheckResult = super.checkInputDataTypes() if (typeCheckResult.isSuccess) { - TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(dataType, prettyName) } else { typeCheckResult } @@ -1982,7 +1982,7 @@ case class ArrayMax(child: Expression) override def checkInputDataTypes(): TypeCheckResult = { val typeCheckResult = super.checkInputDataTypes() if (typeCheckResult.isSuccess) { - TypeUtils.checkForOrderingExpr(dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(dataType, prettyName) } else { typeCheckResult } @@ -2071,7 +2071,7 @@ case class ArrayPosition(left: Expression, right: Expression) override def checkInputDataTypes(): TypeCheckResult = { (left.dataType, right.dataType) match { case (ArrayType(e1, _), e2) if e1.sameType(e2) => - TypeUtils.checkForOrderingExpr(e2, s"function $prettyName") + TypeUtils.checkForOrderingExpr(e2, prettyName) case _ => TypeCheckResult.TypeCheckFailure(s"Input to function $prettyName should have " + s"been ${ArrayType.simpleString} followed by a value with same element type, but it's " + s"[${left.dataType.catalogString}, ${right.dataType.catalogString}].") @@ -3481,7 +3481,7 @@ case class ArrayRemove(left: Expression, right: Expression) override def checkInputDataTypes(): TypeCheckResult = { (left.dataType, right.dataType) match { case (ArrayType(e1, _), e2) if e1.sameType(e2) => - TypeUtils.checkForOrderingExpr(e2, s"function $prettyName") + TypeUtils.checkForOrderingExpr(e2, prettyName) case _ => TypeCheckResult.TypeCheckFailure(s"Input to function $prettyName should have " + s"been ${ArrayType.simpleString} followed by a value with same element type, but it's " + s"[${left.dataType.catalogString}, ${right.dataType.catalogString}].") @@ -3681,7 +3681,7 @@ case class ArrayDistinct(child: Expression) super.checkInputDataTypes() match { case f if f.isFailure => f case TypeCheckResult.TypeCheckSuccess => - TypeUtils.checkForOrderingExpr(elementType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(elementType, prettyName) } } @@ -3836,8 +3836,7 @@ trait ArrayBinaryLike override def checkInputDataTypes(): TypeCheckResult = { val typeCheckResult = super.checkInputDataTypes() if (typeCheckResult.isSuccess) { - TypeUtils.checkForOrderingExpr(dataType.asInstanceOf[ArrayType].elementType, - s"function $prettyName") + TypeUtils.checkForOrderingExpr(dataType.asInstanceOf[ArrayType].elementType, prettyName) } else { typeCheckResult } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala index 274de47ee7525..d0ef5365bc945 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/complexTypeExtractors.scala @@ -444,7 +444,7 @@ case class GetMapValue(child: Expression, key: Expression) super.checkInputDataTypes() match { case f if f.isFailure => f case TypeCheckResult.TypeCheckSuccess => - TypeUtils.checkForOrderingExpr(keyType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(keyType, prettyName) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala index 5b8b4b3f621ee..98513fb5dddff 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/higherOrderFunctions.scala @@ -1023,7 +1023,7 @@ case class MapZipWith(left: Expression, right: Expression, function: Expression) super.checkArgumentDataTypes() match { case TypeCheckResult.TypeCheckSuccess => if (leftKeyType.sameType(rightKeyType)) { - TypeUtils.checkForOrderingExpr(leftKeyType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(leftKeyType, prettyName) } else { TypeCheckResult.TypeCheckFailure(s"The input to function $prettyName should have " + s"been two ${MapType.simpleString}s with compatible key types, but the key types are " + diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala index 21f65cb3402ee..8795df0cab88e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala @@ -404,7 +404,7 @@ case class InSubquery(values: Seq[Expression], query: ListQuery) |Right side: |[${query.childOutputs.map(_.dataType.catalogString).mkString(", ")}].""".stripMargin) } else { - TypeUtils.checkForOrderingExpr(value.dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(value.dataType, prettyName) } } @@ -453,7 +453,7 @@ case class In(value: Expression, list: Seq[Expression]) extends Predicate { TypeCheckResult.TypeCheckFailure(s"Arguments must be same type but were: " + s"${value.dataType.catalogString} != ${mismatchOpt.get.dataType.catalogString}") } else { - TypeUtils.checkForOrderingExpr(value.dataType, s"function $prettyName") + TypeUtils.checkForOrderingExpr(value.dataType, prettyName) } } @@ -934,7 +934,7 @@ abstract class BinaryComparison extends BinaryOperator with Predicate { override def checkInputDataTypes(): TypeCheckResult = super.checkInputDataTypes() match { case TypeCheckResult.TypeCheckSuccess => - TypeUtils.checkForOrderingExpr(left.dataType, this.getClass.getSimpleName) + TypeUtils.checkForOrderingExpr(left.dataType, prettyName) case failure => failure } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala index fce94bf02a0b0..94ae774070c8a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/StringExpressionsSuite.scala @@ -1594,7 +1594,7 @@ class StringExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { DataTypeMismatch( errorSubClass = "WRONG_NUM_PARAMS", messageParameters = Map( - "functionName" -> "elt", + "functionName" -> "`elt`", "expectedNum" -> "> 1", "actualNum" -> "1" ) From 083f8501ee96e348eeb6ffc774e88d6d4659c153 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Mon, 24 Oct 2022 22:52:03 +0300 Subject: [PATCH 5/6] Fix tests --- .../sql/catalyst/expressions/predicates.scala | 2 +- .../analysis/AnalysisErrorSuite.scala | 2 +- .../ExpressionTypeCheckingSuite.scala | 26 +++++++++---------- .../catalyst/expressions/PredicateSuite.scala | 2 +- .../spark/sql/DataFrameAggregateSuite.scala | 4 +-- 5 files changed, 18 insertions(+), 18 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala index 8795df0cab88e..899ece6f5297d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/predicates.scala @@ -934,7 +934,7 @@ abstract class BinaryComparison extends BinaryOperator with Predicate { override def checkInputDataTypes(): TypeCheckResult = super.checkInputDataTypes() match { case TypeCheckResult.TypeCheckSuccess => - TypeUtils.checkForOrderingExpr(left.dataType, prettyName) + TypeUtils.checkForOrderingExpr(left.dataType, symbol) case failure => failure } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala index c44a0852b85c3..ecd5b9e22fb24 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala @@ -725,7 +725,7 @@ class AnalysisErrorSuite extends AnalysisTest { inputPlan = plan2, expectedErrorClass = "DATATYPE_MISMATCH.INVALID_ORDERING_TYPE", expectedMessageParameters = Map( - "functionName" -> "EqualTo", + "functionName" -> "`=`", "dataType" -> "\"MAP\"", "sqlExpr" -> "\"(b = d)\"" ), diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala index 0d66ad4b06848..e3829311e2dc5 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/ExpressionTypeCheckingSuite.scala @@ -298,7 +298,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = EqualTo($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField = mapField)\"", - "functionName" -> "EqualTo", + "functionName" -> "`=`", "dataType" -> "\"MAP\"" ) ) @@ -306,7 +306,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = EqualTo($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField = mapField)\"", - "functionName" -> "EqualTo", + "functionName" -> "`=`", "dataType" -> "\"MAP\"" ) ) @@ -314,7 +314,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = EqualNullSafe($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField <=> mapField)\"", - "functionName" -> "EqualNullSafe", + "functionName" -> "`<=>`", "dataType" -> "\"MAP\"" ) ) @@ -322,7 +322,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = LessThan($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField < mapField)\"", - "functionName" -> "LessThan", + "functionName" -> "`<`", "dataType" -> "\"MAP\"" ) ) @@ -330,7 +330,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = LessThanOrEqual($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField <= mapField)\"", - "functionName" -> "LessThanOrEqual", + "functionName" -> "`<=`", "dataType" -> "\"MAP\"" ) ) @@ -338,7 +338,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = GreaterThan($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField > mapField)\"", - "functionName" -> "GreaterThan", + "functionName" -> "`>`", "dataType" -> "\"MAP\"" ) ) @@ -346,7 +346,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = GreaterThanOrEqual($"mapField", $"mapField"), messageParameters = Map( "sqlExpr" -> "\"(mapField >= mapField)\"", - "functionName" -> "GreaterThanOrEqual", + "functionName" -> "`>=`", "dataType" -> "\"MAP\"" ) ) @@ -385,7 +385,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = Min($"mapField"), messageParameters = Map( "sqlExpr" -> "\"min(mapField)\"", - "functionName" -> "function min", + "functionName" -> "`min`", "dataType" -> "\"MAP\"" ) ) @@ -393,7 +393,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = Max($"mapField"), messageParameters = Map( "sqlExpr" -> "\"max(mapField)\"", - "functionName" -> "function max", + "functionName" -> "`max`", "dataType" -> "\"MAP\"" ) ) @@ -427,7 +427,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = CreateArray(Seq($"intField", $"booleanField")), messageParameters = Map( "sqlExpr" -> "\"array(intField, booleanField)\"", - "functionName" -> "function array", + "functionName" -> "`array`", "dataType" -> "(\"INT\" or \"BOOLEAN\")" ) ) @@ -435,7 +435,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = Coalesce(Seq($"intField", $"booleanField")), messageParameters = Map( "sqlExpr" -> "\"coalesce(intField, booleanField)\"", - "functionName" -> "function coalesce", + "functionName" -> "`coalesce`", "dataType" -> "(\"INT\" or \"BOOLEAN\")" ) ) @@ -601,7 +601,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = expr1, messageParameters = Map( "sqlExpr" -> toSQLExpr(expr1), - "functionName" -> expr1.prettyName, + "functionName" -> toSQLId(expr1.prettyName), "expectedNum" -> "> 1", "actualNum" -> "1") ) @@ -621,7 +621,7 @@ class ExpressionTypeCheckingSuite extends SparkFunSuite with SQLHelper with Quer expr = expr3, messageParameters = Map( "sqlExpr" -> toSQLExpr(expr3), - "functionName" -> s"function ${expr3.prettyName}", + "functionName" -> s"`${expr3.prettyName}`", "dataType" -> "\"MAP\"" ) ) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala index 5e5d0f7445e37..73cc9aca56828 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/PredicateSuite.scala @@ -242,7 +242,7 @@ class PredicateSuite extends SparkFunSuite with ExpressionEvalHelper { case TypeCheckResult.DataTypeMismatch(errorSubClass, messageParameters) => assert(errorSubClass == "INVALID_ORDERING_TYPE") assert(messageParameters === Map( - "functionName" -> "function in", "dataType" -> "\"MAP\"")) + "functionName" -> "`in`", "dataType" -> "\"MAP\"")) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala index 54911d2a6fb61..ff8dd596ebe16 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala @@ -918,7 +918,7 @@ class DataFrameAggregateSuite extends QueryTest errorClass = "DATATYPE_MISMATCH.INVALID_ORDERING_TYPE", sqlState = None, parameters = Map( - "functionName" -> "function max_by", + "functionName" -> "`max_by`", "dataType" -> "\"MAP\"", "sqlExpr" -> "\"max_by(x, y)\"" ), @@ -988,7 +988,7 @@ class DataFrameAggregateSuite extends QueryTest errorClass = "DATATYPE_MISMATCH.INVALID_ORDERING_TYPE", sqlState = None, parameters = Map( - "functionName" -> "function min_by", + "functionName" -> "`min_by`", "dataType" -> "\"MAP\"", "sqlExpr" -> "\"min_by(x, y)\"" ), From 2d2d78856d4205a5fc3572e4bc7421cdd334e03b Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Tue, 25 Oct 2022 08:24:00 +0300 Subject: [PATCH 6/6] Fix DataFrameFunctionsSuite --- .../spark/sql/DataFrameFunctionsSuite.scala | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala index c52cb85e119d6..85877c97ed596 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameFunctionsSuite.scala @@ -1012,7 +1012,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> "\"map_concat(map1, map2)\"", "dataType" -> "(\"MAP, INT>\" or \"MAP\")", - "functionName" -> "function map_concat"), + "functionName" -> "`map_concat`"), context = ExpectedContext( fragment = "map_concat(map1, map2)", start = 0, @@ -1028,7 +1028,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> "\"map_concat(map1, map2)\"", "dataType" -> "(\"MAP, INT>\" or \"MAP\")", - "functionName" -> "function map_concat") + "functionName" -> "`map_concat`") ) checkError( @@ -1040,7 +1040,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> "\"map_concat(map1, 12)\"", "dataType" -> "[\"MAP, INT>\", \"INT\"]", - "functionName" -> "function map_concat"), + "functionName" -> "`map_concat`"), context = ExpectedContext( fragment = "map_concat(map1, 12)", start = 0, @@ -1056,7 +1056,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> "\"map_concat(map1, 12)\"", "dataType" -> "[\"MAP, INT>\", \"INT\"]", - "functionName" -> "function map_concat") + "functionName" -> "`map_concat`") ) } @@ -3651,7 +3651,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { parameters = Map( "sqlExpr" -> "\"map_zip_with(mmi, mmi, lambdafunction(x, x, y, z))\"", "dataType" -> "\"MAP\"", - "functionName" -> "function map_zip_with"), + "functionName" -> "`map_zip_with`"), context = ExpectedContext( fragment = "map_zip_with(mmi, mmi, (x, y, z) -> x)", start = 0, @@ -4289,7 +4289,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"greatest()\"", - "functionName" -> "greatest", + "functionName" -> "`greatest`", "expectedNum" -> "> 1", "actualNum" -> "0") ) @@ -4302,7 +4302,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"greatest()\"", - "functionName" -> "greatest", + "functionName" -> "`greatest`", "expectedNum" -> "> 1", "actualNum" -> "0"), context = ExpectedContext( @@ -4319,7 +4319,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"least()\"", - "functionName" -> "least", + "functionName" -> "`least`", "expectedNum" -> "> 1", "actualNum" -> "0") ) @@ -4332,7 +4332,7 @@ class DataFrameFunctionsSuite extends QueryTest with SharedSparkSession { sqlState = None, parameters = Map( "sqlExpr" -> "\"least()\"", - "functionName" -> "least", + "functionName" -> "`least`", "expectedNum" -> "> 1", "actualNum" -> "0"), context = ExpectedContext(