From 7b0dca8be45262c213ea1c9a52981ff866db2b76 Mon Sep 17 00:00:00 2001 From: Richard Yu Date: Sat, 15 Jul 2023 16:31:17 -0700 Subject: [PATCH 1/5] SPARK-44059 --- .../utils/src/main/resources/error/README.md | 1 + .../main/resources/error/error-classes.json | 44 +- .../resources/error/error-classes.json.orig | 6036 +++++++++++++++++ ...outine-parameter-assignment-error-class.md | 36 + docs/sql-error-conditions.md | 34 +- docs/sql-error-conditions.md.orig | 2020 ++++++ .../catalyst/analysis/FunctionRegistry.scala | 113 +- .../aggregate/CountMinSketchAgg.scala | 48 +- .../sql/catalyst/expressions/generators.scala | 68 +- .../expressions/maskExpressions.scala | 129 +- .../plans/logical/FunctionBuilderBase.scala | 179 + .../sql/errors/QueryCompilationErrors.scala | 76 +- .../analysis/NamedArgumentFunctionSuite.scala | 151 + .../named-function-arguments.sql.out | 337 +- .../inputs/named-function-arguments.sql | 55 + .../results/named-function-arguments.sql.out | 324 +- .../sql/errors/QueryParsingErrorsSuite.scala | 9 +- 17 files changed, 9481 insertions(+), 179 deletions(-) create mode 100644 common/utils/src/main/resources/error/error-classes.json.orig create mode 100644 docs/sql-error-conditions-duplicate-routine-parameter-assignment-error-class.md create mode 100644 docs/sql-error-conditions.md.orig create mode 100644 sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala create mode 100644 sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedArgumentFunctionSuite.scala diff --git a/common/utils/src/main/resources/error/README.md b/common/utils/src/main/resources/error/README.md index dfcb42d49e79a..aed2c0becd311 100644 --- a/common/utils/src/main/resources/error/README.md +++ b/common/utils/src/main/resources/error/README.md @@ -666,6 +666,7 @@ The following SQLSTATEs are collated from: |4274C |42 |Syntax Error or Access Rule Violation |74C |The specified attribute was not found in the trusted context.|DB2 |N |DB2 | |4274D |42 |Syntax Error or Access Rule Violation |74D |The specified attribute already exists in the trusted context.|DB2 |N |DB2 | |4274E |42 |Syntax Error or Access Rule Violation |74E |The specified attribute is not supported in the trusted context.|DB2 |N |DB2 | +|4274K |42 |Syntax Error or Access Rule Violation |74K |Invalid use of a named argument when invoking a routine.|DB2 |N |DB2 | |4274M |42 |Syntax Error or Access Rule Violation |74M |An undefined period name was detected. |DB2 |N |DB2 | |42801 |42 |Syntax Error or Access Rule Violation |801 |Isolation level UR is invalid, because the result table is not read-only.|DB2 |N |DB2 | |42802 |42 |Syntax Error or Access Rule Violation |802 |The number of target values is not the same as the number of source values.|DB2 |N |DB2 | diff --git a/common/utils/src/main/resources/error/error-classes.json b/common/utils/src/main/resources/error/error-classes.json index e8cdaa6c63b3f..b136878e6d2c0 100644 --- a/common/utils/src/main/resources/error/error-classes.json +++ b/common/utils/src/main/resources/error/error-classes.json @@ -738,6 +738,24 @@ ], "sqlState" : "23505" }, + "DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT" : { + "message" : [ + "Call to function is invalid because it includes multiple argument assignments to the same parameter name ." + ], + "subClass" : { + "BOTH_POSITIONAL_AND_NAMED" : { + "message" : [ + "A positional argument and named argument both referred to the same parameter." + ] + }, + "DOUBLE_NAMED_ARGUMENT_REFERENCE" : { + "message" : [ + "More than one named argument referred to the same parameter." + ] + } + }, + "sqlState" : "4274K" + }, "EMPTY_JSON_FIELD_VALUE" : { "message" : [ "Failed to parse an empty string for data type ." @@ -1956,7 +1974,13 @@ "Not allowed to implement multiple UDF interfaces, UDF class ." ] }, - "NAMED_ARGUMENTS_SUPPORT_DISABLED" : { + "NAMED_PARAMETERS_NOT_SUPPORTED" : { + "message" : [ + "Named parameters are not supported for function ; please retry the query with positional arguments to the function call instead." + ], + "sqlState" : "4274K" + }, + "NAMED_PARAMETER_SUPPORT_DISABLED" : { "message" : [ "Cannot call function because named argument references are not enabled here. In this case, the named argument reference was . Set \"spark.sql.allowNamedFunctionArguments\" to \"true\" to turn on feature." ] @@ -2295,6 +2319,12 @@ ], "sqlState" : "42614" }, + "REQUIRED_PARAMETER_NOT_FOUND" : { + "message" : [ + "Cannot invoke function because the parameter named is required, but the function call did not supply a value. Please update the function call to supply an argument value (either positionally or by name) and retry the query again." + ], + "sqlState" : "4274K" + }, "REQUIRES_SINGLE_PART_NAMESPACE" : { "message" : [ " requires a single-part namespace, but got ." @@ -2485,6 +2515,12 @@ ], "sqlState" : "42K09" }, + "UNEXPECTED_POSITIONAL_ARGUMENT" : { + "message" : [ + "Cannot invoke function because it contains positional argument(s) following named argument(s); please rearrange them so the positional arguments come first and then retry the query again." + ], + "sqlState" : "4274K" + }, "UNKNOWN_PROTOBUF_MESSAGE_TYPE" : { "message" : [ "Attempting to treat as a Message, but it was ." @@ -2514,6 +2550,12 @@ ], "sqlState" : "428C4" }, + "UNRECOGNIZED_PARAMETER_NAME" : { + "message" : [ + "Cannot invoke function because the function call included a named argument reference for the argument named , but this function does not include any signature containing an argument with this name. Did you mean one of the following? []." + ], + "sqlState" : "4274K" + }, "UNRECOGNIZED_SQL_TYPE" : { "message" : [ "Unrecognized SQL type - name: , id: ." diff --git a/common/utils/src/main/resources/error/error-classes.json.orig b/common/utils/src/main/resources/error/error-classes.json.orig new file mode 100644 index 0000000000000..e8cdaa6c63b3f --- /dev/null +++ b/common/utils/src/main/resources/error/error-classes.json.orig @@ -0,0 +1,6036 @@ +{ + "AGGREGATE_FUNCTION_WITH_NONDETERMINISTIC_EXPRESSION" : { + "message" : [ + "Non-deterministic expression should not appear in the arguments of an aggregate function." + ] + }, + "ALL_PARTITION_COLUMNS_NOT_ALLOWED" : { + "message" : [ + "Cannot use all columns for partition columns." + ] + }, + "ALTER_TABLE_COLUMN_DESCRIPTOR_DUPLICATE" : { + "message" : [ + "ALTER TABLE column specifies descriptor \"\" more than once, which is invalid." + ], + "sqlState" : "42710" + }, + "AMBIGUOUS_ALIAS_IN_NESTED_CTE" : { + "message" : [ + "Name is ambiguous in nested CTE.", + "Please set to \"CORRECTED\" so that name defined in inner CTE takes precedence. If set it to \"LEGACY\", outer CTE definitions will take precedence.", + "See '/sql-migration-guide.html#query-engine'." + ] + }, + "AMBIGUOUS_COLUMN_OR_FIELD" : { + "message" : [ + "Column or field is ambiguous and has matches." + ], + "sqlState" : "42702" + }, + "AMBIGUOUS_LATERAL_COLUMN_ALIAS" : { + "message" : [ + "Lateral column alias is ambiguous and has matches." + ], + "sqlState" : "42702" + }, + "AMBIGUOUS_REFERENCE" : { + "message" : [ + "Reference is ambiguous, could be: ." + ], + "sqlState" : "42704" + }, + "AMBIGUOUS_REFERENCE_TO_FIELDS" : { + "message" : [ + "Ambiguous reference to the field . It appears times in the schema." + ], + "sqlState" : "42000" + }, + "ARITHMETIC_OVERFLOW" : { + "message" : [ + ". If necessary set to \"false\" to bypass this error." + ], + "sqlState" : "22003" + }, + "AS_OF_JOIN" : { + "message" : [ + "Invalid as-of join." + ], + "subClass" : { + "TOLERANCE_IS_NON_NEGATIVE" : { + "message" : [ + "The input argument `tolerance` must be non-negative." + ] + }, + "TOLERANCE_IS_UNFOLDABLE" : { + "message" : [ + "The input argument `tolerance` must be a constant." + ] + } + } + }, + "AVRO_INCORRECT_TYPE" : { + "message" : [ + "Cannot convert Avro to SQL because the original encoded data type is , however you're trying to read the field as , which would lead to an incorrect answer. To allow reading this field, enable the SQL configuration: ." + ] + }, + "AVRO_LOWER_PRECISION" : { + "message" : [ + "Cannot convert Avro to SQL because the original encoded data type is , however you're trying to read the field as , which leads to data being read as null. Please provide a wider decimal type to get the correct result. To allow reading null to this field, enable the SQL configuration: ." + ] + }, + "BATCH_METADATA_NOT_FOUND" : { + "message" : [ + "Unable to find batch ." + ], + "sqlState" : "42K03" + }, + "BINARY_ARITHMETIC_OVERFLOW" : { + "message" : [ + " caused overflow." + ], + "sqlState" : "22003" + }, + "CALL_ON_STREAMING_DATASET_UNSUPPORTED" : { + "message" : [ + "The method can not be called on streaming Dataset/DataFrame." + ] + }, + "CANNOT_CAST_DATATYPE" : { + "message" : [ + "Cannot cast to ." + ], + "sqlState" : "42846" + }, + "CANNOT_CONVERT_PROTOBUF_FIELD_TYPE_TO_SQL_TYPE" : { + "message" : [ + "Cannot convert Protobuf to SQL because schema is incompatible (protobufType = , sqlType = )." + ] + }, + "CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE" : { + "message" : [ + "Unable to convert of Protobuf to SQL type ." + ] + }, + "CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_FIELD_TYPE" : { + "message" : [ + "Cannot convert SQL to Protobuf because schema is incompatible (protobufType = , sqlType = )." + ] + }, + "CANNOT_CONVERT_SQL_VALUE_TO_PROTOBUF_ENUM_TYPE" : { + "message" : [ + "Cannot convert SQL to Protobuf because is not in defined values for enum: ." + ] + }, + "CANNOT_DECODE_URL" : { + "message" : [ + "The provided URL cannot be decoded: . Please ensure that the URL is properly formatted and try again." + ], + "sqlState" : "22546" + }, + "CANNOT_INVOKE_IN_TRANSFORMATIONS" : { + "message" : [ + "Dataset transformations and actions can only be invoked by the driver, not inside of other Dataset transformations; for example, dataset1.map(x => dataset2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the dataset1.map transformation. For more information, see SPARK-28702." + ] + }, + "CANNOT_LOAD_FUNCTION_CLASS" : { + "message" : [ + "Cannot load class when registering the function , please make sure it is on the classpath." + ] + }, + "CANNOT_LOAD_PROTOBUF_CLASS" : { + "message" : [ + "Could not load Protobuf class with name . ." + ] + }, + "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE" : { + "message" : [ + "Failed to merge incompatible data types and . Please check the data types of the columns being merged and ensure that they are compatible. If necessary, consider casting the columns to compatible data types before attempting the merge." + ], + "sqlState" : "42825" + }, + "CANNOT_MERGE_SCHEMAS" : { + "message" : [ + "Failed merging schemas:", + "Initial schema:", + "", + "Schema that cannot be merged with the initial schema:", + "." + ], + "sqlState" : "42KD9" + }, + "CANNOT_MODIFY_CONFIG" : { + "message" : [ + "Cannot modify the value of the Spark config: .", + "See also '/sql-migration-guide.html#ddl-statements'." + ], + "sqlState" : "46110" + }, + "CANNOT_PARSE_DECIMAL" : { + "message" : [ + "Cannot parse decimal. Please ensure that the input is a valid number with optional decimal point or comma separators." + ], + "sqlState" : "22018" + }, + "CANNOT_PARSE_INTERVAL" : { + "message" : [ + "Unable to parse . Please ensure that the value provided is in a valid format for defining an interval. You can reference the documentation for the correct format. If the issue persists, please double check that the input value is not null or empty and try again." + ] + }, + "CANNOT_PARSE_JSON_FIELD" : { + "message" : [ + "Cannot parse the field name and the value of the JSON token type to target Spark data type ." + ], + "sqlState" : "2203G" + }, + "CANNOT_PARSE_PROTOBUF_DESCRIPTOR" : { + "message" : [ + "Error parsing descriptor bytes into Protobuf FileDescriptorSet." + ] + }, + "CANNOT_PARSE_TIMESTAMP" : { + "message" : [ + ". If necessary set to \"false\" to bypass this error." + ], + "sqlState" : "22007" + }, + "CANNOT_READ_FILE_FOOTER" : { + "message" : [ + "Could not read footer for file: . Please ensure that the file is in either ORC or Parquet format. If not, please convert it to a valid format. If the file is in the valid format, please check if it is corrupt. If it is, you can choose to either ignore it or fix the corruption." + ] + }, + "CANNOT_RECOGNIZE_HIVE_TYPE" : { + "message" : [ + "Cannot recognize hive type string: , column: . The specified data type for the field cannot be recognized by Spark SQL. Please check the data type of the specified field and ensure that it is a valid Spark SQL data type. Refer to the Spark SQL documentation for a list of valid data types and their format. If the data type is correct, please ensure that you are using a supported version of Spark SQL." + ], + "sqlState" : "429BB" + }, + "CANNOT_RENAME_ACROSS_SCHEMA" : { + "message" : [ + "Renaming a across schemas is not allowed." + ], + "sqlState" : "0AKD0" + }, + "CANNOT_RESOLVE_STAR_EXPAND" : { + "message" : [ + "Cannot resolve .* given input columns . Please check that the specified table or struct exists and is accessible in the input columns." + ] + }, + "CANNOT_RESTORE_PERMISSIONS_FOR_PATH" : { + "message" : [ + "Failed to set permissions on created path back to ." + ] + }, + "CANNOT_UPDATE_FIELD" : { + "message" : [ + "Cannot update field type:" + ], + "subClass" : { + "ARRAY_TYPE" : { + "message" : [ + "Update the element by updating .element." + ] + }, + "INTERVAL_TYPE" : { + "message" : [ + "Update an interval by updating its fields." + ] + }, + "MAP_TYPE" : { + "message" : [ + "Update a map by updating .key or .value." + ] + }, + "STRUCT_TYPE" : { + "message" : [ + "Update a struct by updating its fields." + ] + }, + "USER_DEFINED_TYPE" : { + "message" : [ + "Update a UserDefinedType[] by updating its fields." + ] + } + } + }, + "CANNOT_UP_CAST_DATATYPE" : { + "message" : [ + "Cannot up cast from to .", + "
" + ] + }, + "CAST_INVALID_INPUT" : { + "message" : [ + "The value of the type cannot be cast to because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set to \"false\" to bypass this error." + ], + "sqlState" : "22018" + }, + "CAST_OVERFLOW" : { + "message" : [ + "The value of the type cannot be cast to due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set to \"false\" to bypass this error." + ], + "sqlState" : "22003" + }, + "CAST_OVERFLOW_IN_TABLE_INSERT" : { + "message" : [ + "Fail to insert a value of type into the type column due to an overflow. Use `try_cast` on the input value to tolerate overflow and return NULL instead." + ], + "sqlState" : "22003" + }, + "CODEC_NOT_AVAILABLE" : { + "message" : [ + "The codec is not available. Consider to set the config to ." + ] + }, + "CODEC_SHORT_NAME_NOT_FOUND" : { + "message" : [ + "Cannot find a short name for the codec ." + ] + }, + "COLUMN_ALIASES_IS_NOT_ALLOWED" : { + "message" : [ + "Columns aliases are not allowed in ." + ] + }, + "COLUMN_ALREADY_EXISTS" : { + "message" : [ + "The column already exists. Consider to choose another name or rename the existing column." + ], + "sqlState" : "42711" + }, + "COLUMN_NOT_DEFINED_IN_TABLE" : { + "message" : [ + " column is not defined in table , defined table columns are: ." + ] + }, + "COLUMN_NOT_FOUND" : { + "message" : [ + "The column cannot be found. Verify the spelling and correctness of the column name according to the SQL config ." + ], + "sqlState" : "42703" + }, + "COMPARATOR_RETURNS_NULL" : { + "message" : [ + "The comparator has returned a NULL for a comparison between and . It should return a positive integer for \"greater than\", 0 for \"equal\" and a negative integer for \"less than\". To revert to deprecated behavior where NULL is treated as 0 (equal), you must set \"spark.sql.legacy.allowNullComparisonResultInArraySort\" to \"true\"." + ] + }, + "CONCURRENT_QUERY" : { + "message" : [ + "Another instance of this query was just started by a concurrent session." + ] + }, + "CONCURRENT_STREAM_LOG_UPDATE" : { + "message" : [ + "Concurrent update to the log. Multiple streaming jobs detected for .", + "Please make sure only one streaming job runs on a specific checkpoint location at a time." + ], + "sqlState" : "40000" + }, + "CONNECT" : { + "message" : [ + "Generic Spark Connect error." + ], + "subClass" : { + "INTERCEPTOR_CTOR_MISSING" : { + "message" : [ + "Cannot instantiate GRPC interceptor because is missing a default constructor without arguments." + ] + }, + "INTERCEPTOR_RUNTIME_ERROR" : { + "message" : [ + "Error instantiating GRPC interceptor: " + ] + }, + "PLUGIN_CTOR_MISSING" : { + "message" : [ + "Cannot instantiate Spark Connect plugin because is missing a default constructor without arguments." + ] + }, + "PLUGIN_RUNTIME_ERROR" : { + "message" : [ + "Error instantiating Spark Connect plugin: " + ] + } + } + }, + "CONVERSION_INVALID_INPUT" : { + "message" : [ + "The value () cannot be converted to because it is malformed. Correct the value as per the syntax, or change its format. Use to tolerate malformed input and return NULL instead." + ], + "sqlState" : "22018" + }, + "CREATE_PERMANENT_VIEW_WITHOUT_ALIAS" : { + "message" : [ + "Not allowed to create the permanent view without explicitly assigning an alias for the expression ." + ] + }, + "CREATE_TABLE_COLUMN_DESCRIPTOR_DUPLICATE" : { + "message" : [ + "CREATE TABLE column specifies descriptor \"\" more than once, which is invalid." + ], + "sqlState" : "42710" + }, + "CREATE_VIEW_COLUMN_ARITY_MISMATCH" : { + "message" : [ + "Cannot create view , the reason is" + ], + "subClass" : { + "NOT_ENOUGH_DATA_COLUMNS" : { + "message" : [ + "not enough data columns:", + "View columns: .", + "Data columns: ." + ] + }, + "TOO_MANY_DATA_COLUMNS" : { + "message" : [ + "too many data columns:", + "View columns: .", + "Data columns: ." + ] + } + }, + "sqlState" : "21S01" + }, + "DATATYPE_MISMATCH" : { + "message" : [ + "Cannot resolve due to data type mismatch:" + ], + "subClass" : { + "ARRAY_FUNCTION_DIFF_TYPES" : { + "message" : [ + "Input to should have been followed by a value with same element type, but it's [, ]." + ] + }, + "BINARY_ARRAY_DIFF_TYPES" : { + "message" : [ + "Input to function should have been two with same element type, but it's [, ]." + ] + }, + "BINARY_OP_DIFF_TYPES" : { + "message" : [ + "the left and right operands of the binary operator have incompatible types ( and )." + ] + }, + "BINARY_OP_WRONG_TYPE" : { + "message" : [ + "the binary operator requires the input type , not ." + ] + }, + "BLOOM_FILTER_BINARY_OP_WRONG_TYPE" : { + "message" : [ + "The Bloom filter binary input to should be either a constant value or a scalar subquery expression, but it's ." + ] + }, + "BLOOM_FILTER_WRONG_TYPE" : { + "message" : [ + "Input to function should have been followed by value with , but it's []." + ] + }, + "CANNOT_CONVERT_TO_JSON" : { + "message" : [ + "Unable to convert column of type to JSON." + ] + }, + "CANNOT_DROP_ALL_FIELDS" : { + "message" : [ + "Cannot drop all fields in struct." + ] + }, + "CAST_WITHOUT_SUGGESTION" : { + "message" : [ + "cannot cast to ." + ] + }, + "CAST_WITH_CONF_SUGGESTION" : { + "message" : [ + "cannot cast to with ANSI mode on.", + "If you have to cast to , you can set as ." + ] + }, + "CAST_WITH_FUNC_SUGGESTION" : { + "message" : [ + "cannot cast to .", + "To convert values from to , you can use the functions instead." + ] + }, + "CREATE_MAP_KEY_DIFF_TYPES" : { + "message" : [ + "The given keys of function should all be the same type, but they are ." + ] + }, + "CREATE_MAP_VALUE_DIFF_TYPES" : { + "message" : [ + "The given values of function should all be the same type, but they are ." + ] + }, + "CREATE_NAMED_STRUCT_WITHOUT_FOLDABLE_STRING" : { + "message" : [ + "Only foldable `STRING` expressions are allowed to appear at odd position, but they are ." + ] + }, + "DATA_DIFF_TYPES" : { + "message" : [ + "Input to should all be the same type, but it's ." + ] + }, + "FILTER_NOT_BOOLEAN" : { + "message" : [ + "Filter expression of type is not a boolean." + ] + }, + "HASH_MAP_TYPE" : { + "message" : [ + "Input to the function cannot contain elements of the \"MAP\" type. In Spark, same maps may have different hashcode, thus hash expressions are prohibited on \"MAP\" elements. To restore previous behavior set \"spark.sql.legacy.allowHashOnMapType\" to \"true\"." + ] + }, + "INPUT_SIZE_NOT_ONE" : { + "message" : [ + "Length of should be 1." + ] + }, + "INVALID_ARG_VALUE" : { + "message" : [ + "The value must to be a literal of , but got ." + ] + }, + "INVALID_JSON_MAP_KEY_TYPE" : { + "message" : [ + "Input schema can only contain STRING as a key type for a MAP." + ] + }, + "INVALID_JSON_SCHEMA" : { + "message" : [ + "Input schema must be a struct, an array or a map." + ] + }, + "INVALID_MAP_KEY_TYPE" : { + "message" : [ + "The key of map cannot be/contain ." + ] + }, + "INVALID_ORDERING_TYPE" : { + "message" : [ + "The does not support ordering on type ." + ] + }, + "INVALID_ROW_LEVEL_OPERATION_ASSIGNMENTS" : { + "message" : [ + "" + ] + }, + "IN_SUBQUERY_DATA_TYPE_MISMATCH" : { + "message" : [ + "The data type of one or more elements in the left hand side of an IN subquery is not compatible with the data type of the output of the subquery. Mismatched columns: [], left side: [], right side: []." + ] + }, + "IN_SUBQUERY_LENGTH_MISMATCH" : { + "message" : [ + "The number of columns in the left hand side of an IN subquery does not match the number of columns in the output of subquery. Left hand side columns(length: ): [], right hand side columns(length: ): []." + ] + }, + "MAP_CONCAT_DIFF_TYPES" : { + "message" : [ + "The should all be of type map, but it's ." + ] + }, + "MAP_FUNCTION_DIFF_TYPES" : { + "message" : [ + "Input to should have been followed by a value with same key type, but it's [, ]." + ] + }, + "MAP_ZIP_WITH_DIFF_TYPES" : { + "message" : [ + "Input to the should have been two maps with compatible key types, but it's [, ]." + ] + }, + "NON_FOLDABLE_INPUT" : { + "message" : [ + "the input should be a foldable expression; however, got ." + ] + }, + "NON_STRING_TYPE" : { + "message" : [ + "all arguments must be strings." + ] + }, + "NULL_TYPE" : { + "message" : [ + "Null typed values cannot be used as arguments of ." + ] + }, + "PARAMETER_CONSTRAINT_VIOLATION" : { + "message" : [ + "The () must be the ()." + ] + }, + "RANGE_FRAME_INVALID_TYPE" : { + "message" : [ + "The data type used in the order specification does not match the data type which is used in the range frame." + ] + }, + "RANGE_FRAME_MULTI_ORDER" : { + "message" : [ + "A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: ." + ] + }, + "RANGE_FRAME_WITHOUT_ORDER" : { + "message" : [ + "A range window frame cannot be used in an unordered window specification." + ] + }, + "SEQUENCE_WRONG_INPUT_TYPES" : { + "message" : [ + " uses the wrong parameter type. The parameter type must conform to:", + "1. The start and stop expressions must resolve to the same type.", + "2. If start and stop expressions resolve to the type, then the step expression must resolve to the type.", + "3. Otherwise, if start and stop expressions resolve to the type, then the step expression must resolve to the same type." + ] + }, + "SPECIFIED_WINDOW_FRAME_DIFF_TYPES" : { + "message" : [ + "Window frame bounds and do not have the same type: <> ." + ] + }, + "SPECIFIED_WINDOW_FRAME_INVALID_BOUND" : { + "message" : [ + "Window frame upper bound does not follow the lower bound ." + ] + }, + "SPECIFIED_WINDOW_FRAME_UNACCEPTED_TYPE" : { + "message" : [ + "The data type of the bound does not match the expected data type ." + ] + }, + "SPECIFIED_WINDOW_FRAME_WITHOUT_FOLDABLE" : { + "message" : [ + "Window frame bound is not a literal." + ] + }, + "SPECIFIED_WINDOW_FRAME_WRONG_COMPARISON" : { + "message" : [ + "The lower bound of a window frame must be to the upper bound." + ] + }, + "STACK_COLUMN_DIFF_TYPES" : { + "message" : [ + "The data type of the column () do not have the same type: () <> ()." + ] + }, + "TYPE_CHECK_FAILURE_WITH_HINT" : { + "message" : [ + "." + ] + }, + "UNEXPECTED_CLASS_TYPE" : { + "message" : [ + "class not found." + ] + }, + "UNEXPECTED_INPUT_TYPE" : { + "message" : [ + "Parameter requires the type, however has the type ." + ] + }, + "UNEXPECTED_NULL" : { + "message" : [ + "The must not be null." + ] + }, + "UNEXPECTED_RETURN_TYPE" : { + "message" : [ + "The requires return type, but the actual is type." + ] + }, + "UNEXPECTED_STATIC_METHOD" : { + "message" : [ + "cannot find a static method that matches the argument types in ." + ] + }, + "UNSUPPORTED_INPUT_TYPE" : { + "message" : [ + "The input of can't be type data." + ] + }, + "VALUE_OUT_OF_RANGE" : { + "message" : [ + "The must be between (current value = )." + ] + }, + "WRONG_NUM_ARG_TYPES" : { + "message" : [ + "The expression requires argument types but the actual number is ." + ] + }, + "WRONG_NUM_ENDPOINTS" : { + "message" : [ + "The number of endpoints must be >= 2 to construct intervals but the actual number is ." + ] + } + }, + "sqlState" : "42K09" + }, + "DATATYPE_MISSING_SIZE" : { + "message" : [ + "DataType requires a length parameter, for example (10). Please specify the length." + ], + "sqlState" : "42K01" + }, + "DATA_SOURCE_NOT_FOUND" : { + "message" : [ + "Failed to find the data source: . Please find packages at `https://spark.apache.org/third-party-projects.html`." + ], + "sqlState" : "42K02" + }, + "DATETIME_OVERFLOW" : { + "message" : [ + "Datetime operation overflow: ." + ], + "sqlState" : "22008" + }, + "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION" : { + "message" : [ + "Decimal precision exceeds max precision ." + ], + "sqlState" : "22003" + }, + "DEFAULT_DATABASE_NOT_EXISTS" : { + "message" : [ + "Default database does not exist, please create it first or change default database to ``." + ], + "sqlState" : "42704" + }, + "DISTINCT_WINDOW_FUNCTION_UNSUPPORTED" : { + "message" : [ + "Distinct window functions are not supported: ." + ] + }, + "DIVIDE_BY_ZERO" : { + "message" : [ + "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set to \"false\" to bypass this error." + ], + "sqlState" : "22012" + }, + "DUPLICATED_FIELD_NAME_IN_ARROW_STRUCT" : { + "message" : [ + "Duplicated field names in Arrow Struct are not allowed, got ." + ] + }, + "DUPLICATED_MAP_KEY" : { + "message" : [ + "Duplicate map key was found, please check the input data. If you want to remove the duplicated keys, you can set to \"LAST_WIN\" so that the key inserted at last takes precedence." + ], + "sqlState" : "23505" + }, + "DUPLICATED_METRICS_NAME" : { + "message" : [ + "The metric name is not unique: . The same name cannot be used for metrics with different results. However multiple instances of metrics with with same result and name are allowed (e.g. self-joins)." + ] + }, + "DUPLICATE_CLAUSES" : { + "message" : [ + "Found duplicate clauses: . Please, remove one of them." + ] + }, + "DUPLICATE_KEY" : { + "message" : [ + "Found duplicate keys ." + ], + "sqlState" : "23505" + }, + "EMPTY_JSON_FIELD_VALUE" : { + "message" : [ + "Failed to parse an empty string for data type ." + ], + "sqlState" : "42604" + }, + "ENCODER_NOT_FOUND" : { + "message" : [ + "Not found an encoder of the type to Spark SQL internal representation. Consider to change the input type to one of supported at '/sql-ref-datatypes.html'." + ] + }, + "EVENT_TIME_IS_NOT_ON_TIMESTAMP_TYPE" : { + "message" : [ + "The event time has the invalid type , but expected \"TIMESTAMP\"." + ] + }, + "EXCEED_LIMIT_LENGTH" : { + "message" : [ + "Exceeds char/varchar type length limitation: ." + ] + }, + "EXPRESSION_TYPE_IS_NOT_ORDERABLE" : { + "message" : [ + "Column expression cannot be sorted because its type is not orderable." + ] + }, + "FAILED_EXECUTE_UDF" : { + "message" : [ + "Failed to execute user defined function (: () => )." + ], + "sqlState" : "39000" + }, + "FAILED_FUNCTION_CALL" : { + "message" : [ + "Failed preparing of the function for call. Please, double check function's arguments." + ], + "sqlState" : "38000" + }, + "FAILED_PARSE_STRUCT_TYPE" : { + "message" : [ + "Failed parsing struct: ." + ], + "sqlState" : "22018" + }, + "FAILED_RENAME_PATH" : { + "message" : [ + "Failed to rename to as destination already exists." + ], + "sqlState" : "42K04" + }, + "FAILED_RENAME_TEMP_FILE" : { + "message" : [ + "Failed to rename temp file to as FileSystem.rename returned false." + ] + }, + "FIELDS_ALREADY_EXISTS" : { + "message" : [ + "Cannot column, because already exists in ." + ] + }, + "FIELD_NOT_FOUND" : { + "message" : [ + "No such struct field in ." + ], + "sqlState" : "42704" + }, + "FORBIDDEN_OPERATION" : { + "message" : [ + "The operation is not allowed on the : ." + ], + "sqlState" : "42809" + }, + "GENERATED_COLUMN_WITH_DEFAULT_VALUE" : { + "message" : [ + "A column cannot have both a default value and a generation expression but column has default value: () and generation expression: ()." + ] + }, + "GRAPHITE_SINK_INVALID_PROTOCOL" : { + "message" : [ + "Invalid Graphite protocol: ." + ] + }, + "GRAPHITE_SINK_PROPERTY_MISSING" : { + "message" : [ + "Graphite sink requires '' property." + ] + }, + "GROUPING_COLUMN_MISMATCH" : { + "message" : [ + "Column of grouping () can't be found in grouping columns ." + ], + "sqlState" : "42803" + }, + "GROUPING_ID_COLUMN_MISMATCH" : { + "message" : [ + "Columns of grouping_id () does not match grouping columns ()." + ], + "sqlState" : "42803" + }, + "GROUPING_SIZE_LIMIT_EXCEEDED" : { + "message" : [ + "Grouping sets size cannot be greater than ." + ], + "sqlState" : "54000" + }, + "GROUP_BY_AGGREGATE" : { + "message" : [ + "Aggregate functions are not allowed in GROUP BY, but found ." + ], + "sqlState" : "42903" + }, + "GROUP_BY_POS_AGGREGATE" : { + "message" : [ + "GROUP BY refers to an expression that contains an aggregate function. Aggregate functions are not allowed in GROUP BY." + ], + "sqlState" : "42903" + }, + "GROUP_BY_POS_OUT_OF_RANGE" : { + "message" : [ + "GROUP BY position is not in select list (valid range is [1, ])." + ], + "sqlState" : "42805" + }, + "GROUP_EXPRESSION_TYPE_IS_NOT_ORDERABLE" : { + "message" : [ + "The expression cannot be used as a grouping expression because its data type is not an orderable data type." + ] + }, + "HLL_INVALID_INPUT_SKETCH_BUFFER" : { + "message" : [ + "Invalid call to ; only valid HLL sketch buffers are supported as inputs (such as those produced by the `hll_sketch_agg` function)." + ] + }, + "HLL_INVALID_LG_K" : { + "message" : [ + "Invalid call to ; the `lgConfigK` value must be between and , inclusive: ." + ] + }, + "HLL_UNION_DIFFERENT_LG_K" : { + "message" : [ + "Sketches have different `lgConfigK` values: and . Set the `allowDifferentLgConfigK` parameter to true to call with different `lgConfigK` values." + ] + }, + "IDENTIFIER_TOO_MANY_NAME_PARTS" : { + "message" : [ + " is not a valid identifier as it has more than 2 name parts." + ], + "sqlState" : "42601" + }, + "INCOMPARABLE_PIVOT_COLUMN" : { + "message" : [ + "Invalid pivot column . Pivot columns must be comparable." + ], + "sqlState" : "42818" + }, + "INCOMPATIBLE_COLUMN_TYPE" : { + "message" : [ + " can only be performed on tables with compatible column types. The column of the table is type which is not compatible with at the same column of the first table.." + ], + "sqlState" : "42825" + }, + "INCOMPATIBLE_DATASOURCE_REGISTER" : { + "message" : [ + "Detected an incompatible DataSourceRegister. Please remove the incompatible library from classpath or upgrade it. Error: " + ] + }, + "INCOMPATIBLE_DATA_FOR_TABLE" : { + "message" : [ + "Cannot write incompatible data for the table :" + ], + "subClass" : { + "AMBIGUOUS_COLUMN_NAME" : { + "message" : [ + "Ambiguous column name in the input data ." + ] + }, + "CANNOT_FIND_DATA" : { + "message" : [ + "Cannot find data for the output column ." + ] + }, + "CANNOT_SAFELY_CAST" : { + "message" : [ + "Cannot safely cast to ." + ] + }, + "EXTRA_STRUCT_FIELDS" : { + "message" : [ + "Cannot write extra fields to the struct ." + ] + }, + "NULLABLE_ARRAY_ELEMENTS" : { + "message" : [ + "Cannot write nullable elements to array of non-nulls: ." + ] + }, + "NULLABLE_COLUMN" : { + "message" : [ + "Cannot write nullable values to non-null column ." + ] + }, + "NULLABLE_MAP_VALUES" : { + "message" : [ + "Cannot write nullable values to map of non-nulls: ." + ] + }, + "STRUCT_MISSING_FIELDS" : { + "message" : [ + "Struct missing fields: ." + ] + }, + "UNEXPECTED_COLUMN_NAME" : { + "message" : [ + "Struct -th field name does not match (may be out of order): expected , found ." + ] + } + }, + "sqlState" : "KD000" + }, + "INCOMPATIBLE_JOIN_TYPES" : { + "message" : [ + "The join types and are incompatible." + ], + "sqlState" : "42613" + }, + "INCOMPATIBLE_VIEW_SCHEMA_CHANGE" : { + "message" : [ + "The SQL query of view has an incompatible schema change and column cannot be resolved. Expected columns named but got .", + "Please try to re-create the view by running: ." + ] + }, + "INCOMPLETE_TYPE_DEFINITION" : { + "message" : [ + "Incomplete complex type:" + ], + "subClass" : { + "ARRAY" : { + "message" : [ + "The definition of \"ARRAY\" type is incomplete. You must provide an element type. For example: \"ARRAY\"." + ] + }, + "MAP" : { + "message" : [ + "The definition of \"MAP\" type is incomplete. You must provide a key type and a value type. For example: \"MAP\"." + ] + }, + "STRUCT" : { + "message" : [ + "The definition of \"STRUCT\" type is incomplete. You must provide at least one field type. For example: \"STRUCT\"." + ] + } + }, + "sqlState" : "42K01" + }, + "INCONSISTENT_BEHAVIOR_CROSS_VERSION" : { + "message" : [ + "You may get a different result due to the upgrading to" + ], + "subClass" : { + "DATETIME_PATTERN_RECOGNITION" : { + "message" : [ + "Spark >= 3.0:", + "Fail to recognize pattern in the DateTimeFormatter. 1) You can set to \"LEGACY\" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from '/sql-ref-datetime-pattern.html'." + ] + }, + "PARSE_DATETIME_BY_NEW_PARSER" : { + "message" : [ + "Spark >= 3.0:", + "Fail to parse in the new parser. You can set to \"LEGACY\" to restore the behavior before Spark 3.0, or set to \"CORRECTED\" and treat it as an invalid datetime string." + ] + }, + "READ_ANCIENT_DATETIME" : { + "message" : [ + "Spark >= 3.0:", + "reading dates before 1582-10-15 or timestamps before 1900-01-01T00:00:00Z", + "from files can be ambiguous, as the files may be written by", + "Spark 2.x or legacy versions of Hive, which uses a legacy hybrid calendar", + "that is different from Spark 3.0+'s Proleptic Gregorian calendar.", + "See more details in SPARK-31404. You can set the SQL config or", + "the datasource option
's column with type to with type ." + ] + }, + "NOT_SUPPORTED_COMMAND_FOR_V2_TABLE" : { + "message" : [ + " is not supported for v2 tables." + ], + "sqlState" : "46110" + }, + "NOT_SUPPORTED_COMMAND_WITHOUT_HIVE_SUPPORT" : { + "message" : [ + " is not supported, if you want to enable it, please set \"spark.sql.catalogImplementation\" to \"hive\"." + ] + }, + "NOT_SUPPORTED_IN_JDBC_CATALOG" : { + "message" : [ + "Not supported command in JDBC catalog:" + ], + "subClass" : { + "COMMAND" : { + "message" : [ + "" + ] + }, + "COMMAND_WITH_PROPERTY" : { + "message" : [ + " with property ." + ] + } + }, + "sqlState" : "46110" + }, + "NO_DEFAULT_COLUMN_VALUE_AVAILABLE" : { + "message" : [ + "Can't determine the default value for since it is not nullable and it has no default value." + ], + "sqlState" : "42608" + }, + "NO_HANDLER_FOR_UDAF" : { + "message" : [ + "No handler for UDAF ''. Use sparkSession.udf.register(...) instead." + ] + }, + "NO_SQL_TYPE_IN_PROTOBUF_SCHEMA" : { + "message" : [ + "Cannot find in Protobuf schema." + ] + }, + "NO_UDF_INTERFACE" : { + "message" : [ + "UDF class doesn't implement any UDF interface." + ] + }, + "NULLABLE_COLUMN_OR_FIELD" : { + "message" : [ + "Column or field is nullable while it's required to be non-nullable." + ], + "sqlState" : "42000" + }, + "NULLABLE_ROW_ID_ATTRIBUTES" : { + "message" : [ + "Row ID attributes cannot be nullable: ." + ], + "sqlState" : "42000" + }, + "NULL_MAP_KEY" : { + "message" : [ + "Cannot use null as map key." + ], + "sqlState" : "2200E" + }, + "NUMERIC_OUT_OF_SUPPORTED_RANGE" : { + "message" : [ + "The value cannot be interpreted as a numeric since it has more than 38 digits." + ], + "sqlState" : "22003" + }, + "NUMERIC_VALUE_OUT_OF_RANGE" : { + "message" : [ + " cannot be represented as Decimal(, ). If necessary set to \"false\" to bypass this error, and return NULL instead." + ], + "sqlState" : "22003" + }, + "NUM_COLUMNS_MISMATCH" : { + "message" : [ + " can only be performed on inputs with the same number of columns, but the first input has columns and the input has columns." + ], + "sqlState" : "42826" + }, + "NUM_TABLE_VALUE_ALIASES_MISMATCH" : { + "message" : [ + "Number of given aliases does not match number of output columns. Function name: ; number of aliases: ; number of output columns: ." + ] + }, + "OPERATION_CANCELED" : { + "message" : [ + "Operation has been canceled." + ], + "sqlState" : "HY008" + }, + "ORDER_BY_POS_OUT_OF_RANGE" : { + "message" : [ + "ORDER BY position is not in select list (valid range is [1, ])." + ], + "sqlState" : "42805" + }, + "PARSE_EMPTY_STATEMENT" : { + "message" : [ + "Syntax error, unexpected empty statement." + ], + "sqlState" : "42617" + }, + "PARSE_SYNTAX_ERROR" : { + "message" : [ + "Syntax error at or near ." + ], + "sqlState" : "42601" + }, + "PARTITIONS_ALREADY_EXIST" : { + "message" : [ + "Cannot ADD or RENAME TO partition(s) in table because they already exist.", + "Choose a different name, drop the existing partition, or add the IF NOT EXISTS clause to tolerate a pre-existing partition." + ], + "sqlState" : "428FT" + }, + "PARTITIONS_NOT_FOUND" : { + "message" : [ + "The partition(s) cannot be found in table .", + "Verify the partition specification and table name.", + "To tolerate the error on drop use ALTER TABLE … DROP IF EXISTS PARTITION." + ], + "sqlState" : "428FT" + }, + "PATH_ALREADY_EXISTS" : { + "message" : [ + "Path already exists. Set mode as \"overwrite\" to overwrite the existing path." + ], + "sqlState" : "42K04" + }, + "PATH_NOT_FOUND" : { + "message" : [ + "Path does not exist: ." + ], + "sqlState" : "42K03" + }, + "PIVOT_VALUE_DATA_TYPE_MISMATCH" : { + "message" : [ + "Invalid pivot value '': value data type does not match pivot column data type ." + ], + "sqlState" : "42K09" + }, + "PLAN_VALIDATION_FAILED_RULE_EXECUTOR" : { + "message" : [ + "The input plan of is invalid: " + ] + }, + "PLAN_VALIDATION_FAILED_RULE_IN_BATCH" : { + "message" : [ + "Rule in batch generated an invalid plan: " + ] + }, + "PROTOBUF_DEPENDENCY_NOT_FOUND" : { + "message" : [ + "Could not find dependency: ." + ] + }, + "PROTOBUF_DESCRIPTOR_FILE_NOT_FOUND" : { + "message" : [ + "Error reading Protobuf descriptor file at path: ." + ] + }, + "PROTOBUF_FIELD_MISSING" : { + "message" : [ + "Searching for in Protobuf schema at gave matches. Candidates: ." + ] + }, + "PROTOBUF_FIELD_MISSING_IN_SQL_SCHEMA" : { + "message" : [ + "Found in Protobuf schema but there is no match in the SQL schema." + ] + }, + "PROTOBUF_FIELD_TYPE_MISMATCH" : { + "message" : [ + "Type mismatch encountered for field: ." + ] + }, + "PROTOBUF_MESSAGE_NOT_FOUND" : { + "message" : [ + "Unable to locate Message in Descriptor." + ] + }, + "PROTOBUF_TYPE_NOT_SUPPORT" : { + "message" : [ + "Protobuf type not yet supported: ." + ] + }, + "RECURSIVE_PROTOBUF_SCHEMA" : { + "message" : [ + "Found recursive reference in Protobuf schema, which can not be processed by Spark by default: . try setting the option `recursive.fields.max.depth` 0 to 10. Going beyond 10 levels of recursion is not allowed." + ] + }, + "RECURSIVE_VIEW" : { + "message" : [ + "Recursive view detected (cycle: )." + ] + }, + "REF_DEFAULT_VALUE_IS_NOT_ALLOWED_IN_PARTITION" : { + "message" : [ + "References to DEFAULT column values are not allowed within the PARTITION clause." + ] + }, + "RENAME_SRC_PATH_NOT_FOUND" : { + "message" : [ + "Failed to rename as was not found." + ], + "sqlState" : "42K03" + }, + "REPEATED_CLAUSE" : { + "message" : [ + "The clause may be used at most once per operation." + ], + "sqlState" : "42614" + }, + "REQUIRES_SINGLE_PART_NAMESPACE" : { + "message" : [ + " requires a single-part namespace, but got ." + ], + "sqlState" : "42K05" + }, + "ROUTINE_ALREADY_EXISTS" : { + "message" : [ + "Cannot create the function because it already exists.", + "Choose a different name, drop or replace the existing function, or add the IF NOT EXISTS clause to tolerate a pre-existing function." + ], + "sqlState" : "42723" + }, + "ROUTINE_NOT_FOUND" : { + "message" : [ + "The function cannot be found. Verify the spelling and correctness of the schema and catalog.", + "If you did not qualify the name with a schema and catalog, verify the current_schema() output, or qualify the name with the correct schema and catalog.", + "To tolerate the error on drop use DROP FUNCTION IF EXISTS." + ], + "sqlState" : "42883" + }, + "SCALAR_SUBQUERY_IS_IN_GROUP_BY_OR_AGGREGATE_FUNCTION" : { + "message" : [ + "The correlated scalar subquery '' is neither present in GROUP BY, nor in an aggregate function. Add it to GROUP BY using ordinal position or wrap it in `first()` (or `first_value`) if you don't care which value you get." + ] + }, + "SCALAR_SUBQUERY_TOO_MANY_ROWS" : { + "message" : [ + "More than one row returned by a subquery used as an expression." + ], + "sqlState" : "21000" + }, + "SCHEMA_ALREADY_EXISTS" : { + "message" : [ + "Cannot create schema because it already exists.", + "Choose a different name, drop the existing schema, or add the IF NOT EXISTS clause to tolerate pre-existing schema." + ], + "sqlState" : "42P06" + }, + "SCHEMA_NOT_EMPTY" : { + "message" : [ + "Cannot drop a schema because it contains objects.", + "Use DROP SCHEMA ... CASCADE to drop the schema and all its objects." + ], + "sqlState" : "2BP01" + }, + "SCHEMA_NOT_FOUND" : { + "message" : [ + "The schema cannot be found. Verify the spelling and correctness of the schema and catalog.", + "If you did not qualify the name with a catalog, verify the current_schema() output, or qualify the name with the correct catalog.", + "To tolerate the error on drop use DROP SCHEMA IF EXISTS." + ], + "sqlState" : "42704" + }, + "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER" : { + "message" : [ + "The second argument of function needs to be an integer." + ], + "sqlState" : "22023" + }, + "SEED_EXPRESSION_IS_UNFOLDABLE" : { + "message" : [ + "The seed expression of the expression must be foldable." + ] + }, + "SORT_BY_WITHOUT_BUCKETING" : { + "message" : [ + "sortBy must be used together with bucketBy." + ] + }, + "SPECIFY_BUCKETING_IS_NOT_ALLOWED" : { + "message" : [ + "Cannot specify bucketing information if the table schema is not specified when creating and will be inferred at runtime." + ] + }, + "SPECIFY_PARTITION_IS_NOT_ALLOWED" : { + "message" : [ + "It is not allowed to specify partition columns when the table schema is not defined. When the table schema is not provided, schema and partition columns will be inferred." + ] + }, + "SQL_CONF_NOT_FOUND" : { + "message" : [ + "The SQL config cannot be found. Please verify that the config exists." + ] + }, + "STAR_GROUP_BY_POS" : { + "message" : [ + "Star (*) is not allowed in a select list when GROUP BY an ordinal position is used." + ], + "sqlState" : "0A000" + }, + "STATIC_PARTITION_COLUMN_IN_INSERT_COLUMN_LIST" : { + "message" : [ + "Static partition column is also specified in the column list." + ] + }, + "STREAM_FAILED" : { + "message" : [ + "Query [id = , runId = ] terminated with exception: " + ] + }, + "SUM_OF_LIMIT_AND_OFFSET_EXCEEDS_MAX_INT" : { + "message" : [ + "The sum of the LIMIT clause and the OFFSET clause must not be greater than the maximum 32-bit integer value (2,147,483,647) but found limit = , offset = ." + ] + }, + "TABLE_OR_VIEW_ALREADY_EXISTS" : { + "message" : [ + "Cannot create table or view because it already exists.", + "Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects." + ], + "sqlState" : "42P07" + }, + "TABLE_OR_VIEW_NOT_FOUND" : { + "message" : [ + "The table or view cannot be found. Verify the spelling and correctness of the schema and catalog.", + "If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog.", + "To tolerate the error on drop use DROP VIEW IF EXISTS or DROP TABLE IF EXISTS." + ], + "sqlState" : "42P01" + }, + "TABLE_VALUED_FUNCTION_TOO_MANY_TABLE_ARGUMENTS" : { + "message" : [ + "There are too many table arguments for table-valued function. It allows one table argument, but got: . If you want to allow it, please set \"spark.sql.allowMultipleTableArguments.enabled\" to \"true\"" + ] + }, + "TASK_WRITE_FAILED" : { + "message" : [ + "Task failed while writing rows to ." + ] + }, + "TEMP_TABLE_OR_VIEW_ALREADY_EXISTS" : { + "message" : [ + "Cannot create the temporary view because it already exists.", + "Choose a different name, drop or replace the existing view, or add the IF NOT EXISTS clause to tolerate pre-existing views." + ], + "sqlState" : "42P07" + }, + "TEMP_VIEW_NAME_TOO_MANY_NAME_PARTS" : { + "message" : [ + "CREATE TEMPORARY VIEW or the corresponding Dataset APIs only accept single-part view names, but got: ." + ], + "sqlState" : "428EK" + }, + "TOO_MANY_ARRAY_ELEMENTS" : { + "message" : [ + "Cannot initialize array with elements of size ." + ], + "sqlState" : "54000" + }, + "UDTF_ALIAS_NUMBER_MISMATCH" : { + "message" : [ + "The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF. Expected aliases, but got . Please ensure that the number of aliases provided matches the number of columns output by the UDTF." + ] + }, + "UNABLE_TO_ACQUIRE_MEMORY" : { + "message" : [ + "Unable to acquire bytes of memory, got ." + ], + "sqlState" : "53200" + }, + "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE" : { + "message" : [ + "Unable to convert SQL type to Protobuf type ." + ] + }, + "UNABLE_TO_INFER_SCHEMA" : { + "message" : [ + "Unable to infer schema for . It must be specified manually." + ], + "sqlState" : "42KD9" + }, + "UNBOUND_SQL_PARAMETER" : { + "message" : [ + "Found the unbound parameter: . Please, fix `args` and provide a mapping of the parameter to a SQL literal." + ], + "sqlState" : "42P02" + }, + "UNCLOSED_BRACKETED_COMMENT" : { + "message" : [ + "Found an unclosed bracketed comment. Please, append */ at the end of the comment." + ], + "sqlState" : "42601" + }, + "UNEXPECTED_INPUT_TYPE" : { + "message" : [ + "Parameter of function requires the type, however has the type ." + ], + "sqlState" : "42K09" + }, + "UNKNOWN_PROTOBUF_MESSAGE_TYPE" : { + "message" : [ + "Attempting to treat as a Message, but it was ." + ] + }, + "UNPIVOT_REQUIRES_ATTRIBUTES" : { + "message" : [ + "UNPIVOT requires all given expressions to be columns when no expressions are given. These are not columns: []." + ], + "sqlState" : "42K0A" + }, + "UNPIVOT_REQUIRES_VALUE_COLUMNS" : { + "message" : [ + "At least one value column needs to be specified for UNPIVOT, all columns specified as ids." + ], + "sqlState" : "42K0A" + }, + "UNPIVOT_VALUE_DATA_TYPE_MISMATCH" : { + "message" : [ + "Unpivot value columns must share a least common type, some types do not: []." + ], + "sqlState" : "42K09" + }, + "UNPIVOT_VALUE_SIZE_MISMATCH" : { + "message" : [ + "All unpivot value columns must have the same size as there are value column names ()." + ], + "sqlState" : "428C4" + }, + "UNRECOGNIZED_SQL_TYPE" : { + "message" : [ + "Unrecognized SQL type - name: , id: ." + ], + "sqlState" : "42704" + }, + "UNRESOLVABLE_TABLE_VALUED_FUNCTION" : { + "message" : [ + "Could not resolve to a table-valued function. Please make sure that is defined as a table-valued function and that all required parameters are provided correctly. If is not defined, please create the table-valued function before using it. For more information about defining table-valued functions, please refer to the Apache Spark documentation." + ] + }, + "UNRESOLVED_ALL_IN_GROUP_BY" : { + "message" : [ + "Cannot infer grouping columns for GROUP BY ALL based on the select clause. Please explicitly specify the grouping columns." + ], + "sqlState" : "42803" + }, + "UNRESOLVED_COLUMN" : { + "message" : [ + "A column or function parameter with name cannot be resolved." + ], + "subClass" : { + "WITHOUT_SUGGESTION" : { + "message" : [ + "" + ] + }, + "WITH_SUGGESTION" : { + "message" : [ + "Did you mean one of the following? []." + ] + } + }, + "sqlState" : "42703" + }, + "UNRESOLVED_FIELD" : { + "message" : [ + "A field with name cannot be resolved with the struct-type column ." + ], + "subClass" : { + "WITHOUT_SUGGESTION" : { + "message" : [ + "" + ] + }, + "WITH_SUGGESTION" : { + "message" : [ + "Did you mean one of the following? []." + ] + } + }, + "sqlState" : "42703" + }, + "UNRESOLVED_MAP_KEY" : { + "message" : [ + "Cannot resolve column as a map key. If the key is a string literal, add the single quotes '' around it." + ], + "subClass" : { + "WITHOUT_SUGGESTION" : { + "message" : [ + "" + ] + }, + "WITH_SUGGESTION" : { + "message" : [ + "Otherwise did you mean one of the following column(s)? []." + ] + } + }, + "sqlState" : "42703" + }, + "UNRESOLVED_ROUTINE" : { + "message" : [ + "Cannot resolve function on search path ." + ], + "sqlState" : "42883" + }, + "UNRESOLVED_USING_COLUMN_FOR_JOIN" : { + "message" : [ + "USING column cannot be resolved on the side of the join. The -side columns: []." + ], + "sqlState" : "42703" + }, + "UNSET_NONEXISTENT_PROPERTIES" : { + "message" : [ + "Attempted to unset non-existent properties [] in table
." + ] + }, + "UNSUPPORTED_ADD_FILE" : { + "message" : [ + "Don't support add file." + ], + "subClass" : { + "DIRECTORY" : { + "message" : [ + "The file is a directory, consider to set \"spark.sql.legacy.addSingleFileInAddFile\" to \"false\"." + ] + }, + "LOCAL_DIRECTORY" : { + "message" : [ + "The local directory is not supported in a non-local master mode." + ] + } + } + }, + "UNSUPPORTED_ARROWTYPE" : { + "message" : [ + "Unsupported arrow type ." + ], + "sqlState" : "0A000" + }, + "UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING" : { + "message" : [ + "The char/varchar type can't be used in the table schema. If you want Spark treat them as string type as same as Spark 3.0 and earlier, please set \"spark.sql.legacy.charVarcharAsString\" to \"true\"." + ] + }, + "UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY" : { + "message" : [ + "Unsupported data source type for direct query on files: " + ] + }, + "UNSUPPORTED_DATATYPE" : { + "message" : [ + "Unsupported data type ." + ], + "sqlState" : "0A000" + }, + "UNSUPPORTED_DATA_SOURCE_FOR_DIRECT_QUERY" : { + "message" : [ + "The direct query on files does not support the data source type: . Please try a different data source type or consider using a different query method." + ] + }, + "UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE" : { + "message" : [ + "The datasource doesn't support the column of the type ." + ] + }, + "UNSUPPORTED_DEFAULT_VALUE" : { + "message" : [ + "DEFAULT column values is not supported." + ], + "subClass" : { + "WITHOUT_SUGGESTION" : { + "message" : [ + "" + ] + }, + "WITH_SUGGESTION" : { + "message" : [ + "Enable it by setting \"spark.sql.defaultColumn.enabled\" to \"true\"." + ] + } + } + }, + "UNSUPPORTED_DESERIALIZER" : { + "message" : [ + "The deserializer is not supported:" + ], + "subClass" : { + "DATA_TYPE_MISMATCH" : { + "message" : [ + "need a(n) field but got ." + ] + }, + "FIELD_NUMBER_MISMATCH" : { + "message" : [ + "try to map to Tuple, but failed as the number of fields does not line up." + ] + } + }, + "sqlState" : "0A000" + }, + "UNSUPPORTED_EXPRESSION_GENERATED_COLUMN" : { + "message" : [ + "Cannot create generated column with generation expression because ." + ] + }, + "UNSUPPORTED_EXPR_FOR_OPERATOR" : { + "message" : [ + "A query operator contains one or more unsupported expressions. Consider to rewrite it to avoid window functions, aggregate functions, and generator functions in the WHERE clause.", + "Invalid expressions: []" + ] + }, + "UNSUPPORTED_EXPR_FOR_WINDOW" : { + "message" : [ + "Expression not supported within a window function." + ], + "sqlState" : "42P20" + }, + "UNSUPPORTED_FEATURE" : { + "message" : [ + "The feature is not supported:" + ], + "subClass" : { + "AES_MODE" : { + "message" : [ + "AES- with the padding by the function." + ] + }, + "AES_MODE_AAD" : { + "message" : [ + " with AES- does not support additional authenticate data (AAD)." + ] + }, + "AES_MODE_IV" : { + "message" : [ + " with AES- does not support initialization vectors (IVs)." + ] + }, + "ANALYZE_UNCACHED_TEMP_VIEW" : { + "message" : [ + "The ANALYZE TABLE FOR COLUMNS command can operate on temporary views that have been cached already. Consider to cache the view ." + ] + }, + "ANALYZE_UNSUPPORTED_COLUMN_TYPE" : { + "message" : [ + "The ANALYZE TABLE FOR COLUMNS command does not support the type of the column in the table ." + ] + }, + "ANALYZE_VIEW" : { + "message" : [ + "The ANALYZE TABLE command does not support views." + ] + }, + "CATALOG_OPERATION" : { + "message" : [ + "Catalog does not support ." + ] + }, + "COMBINATION_QUERY_RESULT_CLAUSES" : { + "message" : [ + "Combination of ORDER BY/SORT BY/DISTRIBUTE BY/CLUSTER BY." + ] + }, + "COMMENT_NAMESPACE" : { + "message" : [ + "Attach a comment to the namespace ." + ] + }, + "DESC_TABLE_COLUMN_PARTITION" : { + "message" : [ + "DESC TABLE COLUMN for a specific partition." + ] + }, + "DROP_DATABASE" : { + "message" : [ + "Drop the default database ." + ] + }, + "DROP_NAMESPACE" : { + "message" : [ + "Drop the namespace ." + ] + }, + "HIVE_TABLE_TYPE" : { + "message" : [ + "The is hive ." + ] + }, + "HIVE_WITH_ANSI_INTERVALS" : { + "message" : [ + "Hive table with ANSI intervals." + ] + }, + "INSERT_PARTITION_SPEC_IF_NOT_EXISTS" : { + "message" : [ + "INSERT INTO with IF NOT EXISTS in the PARTITION spec." + ] + }, + "LATERAL_COLUMN_ALIAS_IN_AGGREGATE_FUNC" : { + "message" : [ + "Referencing a lateral column alias in the aggregate function ." + ] + }, + "LATERAL_COLUMN_ALIAS_IN_AGGREGATE_WITH_WINDOW_AND_HAVING" : { + "message" : [ + "Referencing lateral column alias in the aggregate query both with window expressions and with having clause. Please rewrite the aggregate query by removing the having clause or removing lateral alias reference in the SELECT list." + ] + }, + "LATERAL_COLUMN_ALIAS_IN_GROUP_BY" : { + "message" : [ + "Referencing a lateral column alias via GROUP BY alias/ALL is not supported yet." + ] + }, + "LATERAL_COLUMN_ALIAS_IN_WINDOW" : { + "message" : [ + "Referencing a lateral column alias in window expression ." + ] + }, + "LATERAL_JOIN_USING" : { + "message" : [ + "JOIN USING with LATERAL correlation." + ] + }, + "LITERAL_TYPE" : { + "message" : [ + "Literal for '' of ." + ] + }, + "MULTIPLE_BUCKET_TRANSFORMS" : { + "message" : [ + "Multiple bucket TRANSFORMs." + ] + }, + "MULTI_ACTION_ALTER" : { + "message" : [ + "The target JDBC server hosting table does not support ALTER TABLE with multiple actions. Split the ALTER TABLE up into individual actions to avoid this error." + ] + }, + "ORC_TYPE_CAST" : { + "message" : [ + "Unable to convert of Orc to data type ." + ] + }, + "PANDAS_UDAF_IN_PIVOT" : { + "message" : [ + "Pandas user defined aggregate function in the PIVOT clause." + ] + }, + "PARAMETER_MARKER_IN_UNEXPECTED_STATEMENT" : { + "message" : [ + "Parameter markers are not allowed in ." + ] + }, + "PARTITION_WITH_NESTED_COLUMN_IS_UNSUPPORTED" : { + "message" : [ + "Invalid partitioning: is missing or is in a map or array." + ] + }, + "PIVOT_AFTER_GROUP_BY" : { + "message" : [ + "PIVOT clause following a GROUP BY clause. Consider pushing the GROUP BY into a subquery." + ] + }, + "PIVOT_TYPE" : { + "message" : [ + "Pivoting by the value '' of the column data type ." + ] + }, + "PYTHON_UDF_IN_ON_CLAUSE" : { + "message" : [ + "Python UDF in the ON clause of a JOIN. In case of an INNNER JOIN consider rewriting to a CROSS JOIN with a WHERE clause." + ] + }, + "REMOVE_NAMESPACE_COMMENT" : { + "message" : [ + "Remove a comment from the namespace ." + ] + }, + "REPLACE_NESTED_COLUMN" : { + "message" : [ + "The replace function does not support nested column ." + ] + }, + "SET_NAMESPACE_PROPERTY" : { + "message" : [ + " is a reserved namespace property, ." + ] + }, + "SET_OPERATION_ON_MAP_TYPE" : { + "message" : [ + "Cannot have MAP type columns in DataFrame which calls set operations (INTERSECT, EXCEPT, etc.), but the type of column is ." + ] + }, + "SET_PROPERTIES_AND_DBPROPERTIES" : { + "message" : [ + "set PROPERTIES and DBPROPERTIES at the same time." + ] + }, + "SET_TABLE_PROPERTY" : { + "message" : [ + " is a reserved table property, ." + ] + }, + "TABLE_OPERATION" : { + "message" : [ + "Table does not support . Please check the current catalog and namespace to make sure the qualified table name is expected, and also check the catalog implementation which is configured by \"spark.sql.catalog\"." + ] + }, + "TIME_TRAVEL" : { + "message" : [ + "Time travel on the relation: ." + ] + }, + "TOO_MANY_TYPE_ARGUMENTS_FOR_UDF_CLASS" : { + "message" : [ + "UDF class with type arguments." + ] + }, + "TRANSFORM_DISTINCT_ALL" : { + "message" : [ + "TRANSFORM with the DISTINCT/ALL clause." + ] + }, + "TRANSFORM_NON_HIVE" : { + "message" : [ + "TRANSFORM with SERDE is only supported in hive mode." + ] + } + }, + "sqlState" : "0A000" + }, + "UNSUPPORTED_GENERATOR" : { + "message" : [ + "The generator is not supported:" + ], + "subClass" : { + "MULTI_GENERATOR" : { + "message" : [ + "only one generator allowed per clause but found : ." + ] + }, + "NESTED_IN_EXPRESSIONS" : { + "message" : [ + "nested in expressions ." + ] + }, + "NOT_GENERATOR" : { + "message" : [ + " is expected to be a generator. However, its class is , which is not a generator." + ] + }, + "OUTSIDE_SELECT" : { + "message" : [ + "outside the SELECT clause, found: ." + ] + } + }, + "sqlState" : "0A000" + }, + "UNSUPPORTED_GROUPING_EXPRESSION" : { + "message" : [ + "grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup." + ] + }, + "UNSUPPORTED_INSERT" : { + "message" : [ + "Can't insert into the target." + ], + "subClass" : { + "NOT_ALLOWED" : { + "message" : [ + "The target relation does not allow insertion." + ] + }, + "NOT_PARTITIONED" : { + "message" : [ + "The target relation is not partitioned." + ] + }, + "RDD_BASED" : { + "message" : [ + "An RDD-based table is not allowed." + ] + }, + "READ_FROM" : { + "message" : [ + "The target relation is also being read from." + ] + } + } + }, + "UNSUPPORTED_MERGE_CONDITION" : { + "message" : [ + "MERGE operation contains unsupported condition." + ], + "subClass" : { + "AGGREGATE" : { + "message" : [ + "Aggregates are not allowed: ." + ] + }, + "NON_DETERMINISTIC" : { + "message" : [ + "Non-deterministic expressions are not allowed: ." + ] + }, + "SUBQUERY" : { + "message" : [ + "Subqueries are not allowed: ." + ] + } + } + }, + "UNSUPPORTED_OVERWRITE" : { + "message" : [ + "Can't overwrite the target that is also being read from." + ], + "subClass" : { + "PATH" : { + "message" : [ + "The target path is ." + ] + }, + "TABLE" : { + "message" : [ + "The target table is
." + ] + } + } + }, + "UNSUPPORTED_SAVE_MODE" : { + "message" : [ + "The save mode is not supported for:" + ], + "subClass" : { + "EXISTENT_PATH" : { + "message" : [ + "an existent path." + ] + }, + "NON_EXISTENT_PATH" : { + "message" : [ + "a non-existent path." + ] + } + } + }, + "UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY" : { + "message" : [ + "Unsupported subquery expression:" + ], + "subClass" : { + "ACCESSING_OUTER_QUERY_COLUMN_IS_NOT_ALLOWED" : { + "message" : [ + "Accessing outer query column is not allowed in this location." + ] + }, + "AGGREGATE_FUNCTION_MIXED_OUTER_LOCAL_REFERENCES" : { + "message" : [ + "Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: ." + ] + }, + "CORRELATED_COLUMN_IS_NOT_ALLOWED_IN_PREDICATE" : { + "message" : [ + "Correlated column is not allowed in predicate: ." + ] + }, + "CORRELATED_COLUMN_NOT_FOUND" : { + "message" : [ + "A correlated outer name reference within a subquery expression body was not found in the enclosing query: ." + ] + }, + "CORRELATED_REFERENCE" : { + "message" : [ + "Expressions referencing the outer query are not supported outside of WHERE/HAVING clauses: ." + ] + }, + "LATERAL_JOIN_CONDITION_NON_DETERMINISTIC" : { + "message" : [ + "Lateral join condition cannot be non-deterministic: ." + ] + }, + "MUST_AGGREGATE_CORRELATED_SCALAR_SUBQUERY" : { + "message" : [ + "Correlated scalar subqueries must be aggregated to return at most one row." + ] + }, + "NON_CORRELATED_COLUMNS_IN_GROUP_BY" : { + "message" : [ + "A GROUP BY clause in a scalar correlated subquery cannot contain non-correlated columns: ." + ] + }, + "NON_DETERMINISTIC_LATERAL_SUBQUERIES" : { + "message" : [ + "Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row." + ] + }, + "UNSUPPORTED_CORRELATED_REFERENCE_DATA_TYPE" : { + "message" : [ + "Correlated column reference '' cannot be type." + ] + }, + "UNSUPPORTED_CORRELATED_SCALAR_SUBQUERY" : { + "message" : [ + "Correlated scalar subqueries can only be used in filters, aggregations, projections, and UPDATE/MERGE/DELETE commands." + ] + }, + "UNSUPPORTED_IN_EXISTS_SUBQUERY" : { + "message" : [ + "IN/EXISTS predicate subqueries can only be used in filters, joins, aggregations, window functions, projections, and UPDATE/MERGE/DELETE commands." + ] + } + }, + "sqlState" : "0A000" + }, + "UNSUPPORTED_TYPED_LITERAL" : { + "message" : [ + "Literals of the type are not supported. Supported types are ." + ], + "sqlState" : "0A000" + }, + "UNTYPED_SCALA_UDF" : { + "message" : [ + "You're using untyped Scala UDF, which does not have the input type information. Spark may blindly pass null to the Scala closure with primitive-type argument, and the closure will see the default value of the Java type for the null argument, e.g. `udf((x: Int) => x, IntegerType)`, the result is 0 for null input. To get rid of this error, you could:", + "1. use typed Scala UDF APIs(without return type parameter), e.g. `udf((x: Int) => x)`.", + "2. use Java UDF APIs, e.g. `udf(new UDF1[String, Integer] { override def call(s: String): Integer = s.length() }, IntegerType)`, if input types are all non primitive.", + "3. set \"spark.sql.legacy.allowUntypedScalaUDF\" to \"true\" and use this API with caution." + ] + }, + "VIEW_ALREADY_EXISTS" : { + "message" : [ + "Cannot create view because it already exists.", + "Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects." + ], + "sqlState" : "42P07" + }, + "VIEW_NOT_FOUND" : { + "message" : [ + "The view cannot be found. Verify the spelling and correctness of the schema and catalog.", + "If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog.", + "To tolerate the error on drop use DROP VIEW IF EXISTS." + ], + "sqlState" : "42P01" + }, + "WINDOW_FUNCTION_AND_FRAME_MISMATCH" : { + "message" : [ + " function can only be evaluated in an ordered row-based window frame with a single offset: ." + ] + }, + "WINDOW_FUNCTION_WITHOUT_OVER_CLAUSE" : { + "message" : [ + "Window function requires an OVER clause." + ] + }, + "WRITE_STREAM_NOT_ALLOWED" : { + "message" : [ + "`writeStream` can be called only on streaming Dataset/DataFrame." + ] + }, + "WRONG_COMMAND_FOR_OBJECT_TYPE" : { + "message" : [ + "The operation requires a . But is a . Use instead." + ] + }, + "WRONG_NUM_ARGS" : { + "message" : [ + "The requires parameters but the actual number is ." + ], + "subClass" : { + "WITHOUT_SUGGESTION" : { + "message" : [ + "Please, refer to '/sql-ref-functions.html' for a fix." + ] + }, + "WITH_SUGGESTION" : { + "message" : [ + "If you have to call this function with parameters, set the legacy configuration to ." + ] + } + }, + "sqlState" : "42605" + }, + "_LEGACY_ERROR_TEMP_0001" : { + "message" : [ + "Invalid InsertIntoContext." + ] + }, + "_LEGACY_ERROR_TEMP_0004" : { + "message" : [ + "Empty source for merge: you should specify a source table/subquery in merge." + ] + }, + "_LEGACY_ERROR_TEMP_0006" : { + "message" : [ + "The number of inserted values cannot match the fields." + ] + }, + "_LEGACY_ERROR_TEMP_0008" : { + "message" : [ + "There must be at least one WHEN clause in a MERGE statement." + ] + }, + "_LEGACY_ERROR_TEMP_0012" : { + "message" : [ + "DISTRIBUTE BY is not supported." + ] + }, + "_LEGACY_ERROR_TEMP_0014" : { + "message" : [ + "TABLESAMPLE does not accept empty inputs." + ] + }, + "_LEGACY_ERROR_TEMP_0015" : { + "message" : [ + "TABLESAMPLE() is not supported." + ] + }, + "_LEGACY_ERROR_TEMP_0016" : { + "message" : [ + " is not a valid byte length literal, expected syntax: DIGIT+ ('B' | 'K' | 'M' | 'G')." + ] + }, + "_LEGACY_ERROR_TEMP_0018" : { + "message" : [ + "Function trim doesn't support with type . Please use BOTH, LEADING or TRAILING as trim type." + ] + }, + "_LEGACY_ERROR_TEMP_0024" : { + "message" : [ + "Can only have a single from-to unit in the interval literal syntax." + ] + }, + "_LEGACY_ERROR_TEMP_0026" : { + "message" : [ + "Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: ." + ] + }, + "_LEGACY_ERROR_TEMP_0027" : { + "message" : [ + "The value of from-to unit must be a string." + ] + }, + "_LEGACY_ERROR_TEMP_0028" : { + "message" : [ + "Intervals FROM TO are not supported." + ] + }, + "_LEGACY_ERROR_TEMP_0029" : { + "message" : [ + "Cannot mix year-month and day-time fields: ." + ] + }, + "_LEGACY_ERROR_TEMP_0031" : { + "message" : [ + "Invalid number of buckets: ." + ] + }, + "_LEGACY_ERROR_TEMP_0032" : { + "message" : [ + "Duplicated table paths found: '' and ''. LOCATION and the case insensitive key 'path' in OPTIONS are all used to indicate the custom table path, you can only specify one of them." + ] + }, + "_LEGACY_ERROR_TEMP_0033" : { + "message" : [ + "Expected either STORED AS or STORED BY, not both." + ] + }, + "_LEGACY_ERROR_TEMP_0034" : { + "message" : [ + " is not supported in Hive-style ." + ] + }, + "_LEGACY_ERROR_TEMP_0035" : { + "message" : [ + "Operation not allowed: ." + ] + }, + "_LEGACY_ERROR_TEMP_0037" : { + "message" : [ + "It is not allowed to add catalog/namespace prefix to the table name in CACHE TABLE AS SELECT." + ] + }, + "_LEGACY_ERROR_TEMP_0038" : { + "message" : [ + "CTE definition can't have duplicate names: ." + ] + }, + "_LEGACY_ERROR_TEMP_0039" : { + "message" : [ + "Unsupported SQL statement." + ] + }, + "_LEGACY_ERROR_TEMP_0043" : { + "message" : [ + "Expected format is 'RESET' or 'RESET key'. If you want to include special characters in key, please use quotes, e.g., RESET `key`." + ] + }, + "_LEGACY_ERROR_TEMP_0044" : { + "message" : [ + "The interval value must be in the range of [-18, +18] hours with second precision." + ] + }, + "_LEGACY_ERROR_TEMP_0045" : { + "message" : [ + "Invalid time zone displacement value." + ] + }, + "_LEGACY_ERROR_TEMP_0046" : { + "message" : [ + "CREATE TEMPORARY TABLE without a provider is not allowed." + ] + }, + "_LEGACY_ERROR_TEMP_0047" : { + "message" : [ + "'ROW FORMAT' must be used with 'STORED AS'." + ] + }, + "_LEGACY_ERROR_TEMP_0048" : { + "message" : [ + "Unsupported operation: Used defined record reader/writer classes." + ] + }, + "_LEGACY_ERROR_TEMP_0049" : { + "message" : [ + "Directory path and 'path' in OPTIONS should be specified one, but not both." + ] + }, + "_LEGACY_ERROR_TEMP_0051" : { + "message" : [ + "Empty set in grouping sets is not supported." + ] + }, + "_LEGACY_ERROR_TEMP_0052" : { + "message" : [ + "CREATE VIEW with both IF NOT EXISTS and REPLACE is not allowed." + ] + }, + "_LEGACY_ERROR_TEMP_0053" : { + "message" : [ + "It is not allowed to define a TEMPORARY view with IF NOT EXISTS." + ] + }, + "_LEGACY_ERROR_TEMP_0056" : { + "message" : [ + "Invalid time travel spec: ." + ] + }, + "_LEGACY_ERROR_TEMP_0060" : { + "message" : [ + "." + ] + }, + "_LEGACY_ERROR_TEMP_0061" : { + "message" : [ + "." + ] + }, + "_LEGACY_ERROR_TEMP_0062" : { + "message" : [ + "." + ] + }, + "_LEGACY_ERROR_TEMP_0063" : { + "message" : [ + "." + ] + }, + "_LEGACY_ERROR_TEMP_0064" : { + "message" : [ + "." + ] + }, + "_LEGACY_ERROR_TEMP_1000" : { + "message" : [ + "LEGACY store assignment policy is disallowed in Spark data source V2. Please set the configuration to other values." + ] + }, + "_LEGACY_ERROR_TEMP_1002" : { + "message" : [ + "Unable to generate an encoder for inner class `` without access to the scope that this class was defined in.", + "Try moving this class out of its parent class." + ] + }, + "_LEGACY_ERROR_TEMP_1004" : { + "message" : [ + "Window specification is not defined in the WINDOW clause." + ] + }, + "_LEGACY_ERROR_TEMP_1005" : { + "message" : [ + " doesn't show up in the GROUP BY list ." + ] + }, + "_LEGACY_ERROR_TEMP_1006" : { + "message" : [ + "Aggregate expression required for pivot, but '' did not appear in any aggregate function." + ] + }, + "_LEGACY_ERROR_TEMP_1007" : { + "message" : [ + "Cannot write into temp view as it's not a data source v2 relation." + ] + }, + "_LEGACY_ERROR_TEMP_1008" : { + "message" : [ + " is not a temp view of streaming logical plan, please use batch API such as `DataFrameReader.table` to read it." + ] + }, + "_LEGACY_ERROR_TEMP_1009" : { + "message" : [ + "The depth of view exceeds the maximum view resolution depth (). Analysis is aborted to avoid errors. Increase the value of to work around this." + ] + }, + "_LEGACY_ERROR_TEMP_1010" : { + "message" : [ + "Inserting into a view is not allowed. View: ." + ] + }, + "_LEGACY_ERROR_TEMP_1011" : { + "message" : [ + "Writing into a view is not allowed. View: ." + ] + }, + "_LEGACY_ERROR_TEMP_1012" : { + "message" : [ + "Cannot write into v1 table: ." + ] + }, + "_LEGACY_ERROR_TEMP_1013" : { + "message" : [ + " is a . '' expects a table.." + ] + }, + "_LEGACY_ERROR_TEMP_1014" : { + "message" : [ + " is a temp view. '' expects a permanent view." + ] + }, + "_LEGACY_ERROR_TEMP_1015" : { + "message" : [ + " is a table. '' expects a view.." + ] + }, + "_LEGACY_ERROR_TEMP_1016" : { + "message" : [ + " is a temp view. '' expects a table or permanent view." + ] + }, + "_LEGACY_ERROR_TEMP_1017" : { + "message" : [ + " is a built-in/temporary function. '' expects a persistent function.." + ] + }, + "_LEGACY_ERROR_TEMP_1018" : { + "message" : [ + " is a permanent view, which is not supported by streaming reading API such as `DataStreamReader.table` yet." + ] + }, + "_LEGACY_ERROR_TEMP_1021" : { + "message" : [ + "count(.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2)." + ] + }, + "_LEGACY_ERROR_TEMP_1023" : { + "message" : [ + "Function does not support ." + ] + }, + "_LEGACY_ERROR_TEMP_1024" : { + "message" : [ + "FILTER expression is non-deterministic, it cannot be used in aggregate functions." + ] + }, + "_LEGACY_ERROR_TEMP_1025" : { + "message" : [ + "FILTER expression is not of type boolean. It cannot be used in an aggregate function." + ] + }, + "_LEGACY_ERROR_TEMP_1026" : { + "message" : [ + "FILTER expression contains aggregate. It cannot be used in an aggregate function." + ] + }, + "_LEGACY_ERROR_TEMP_1027" : { + "message" : [ + "FILTER expression contains window function. It cannot be used in an aggregate function." + ] + }, + "_LEGACY_ERROR_TEMP_1028" : { + "message" : [ + "Number of column aliases does not match number of columns. Number of column aliases: ; number of columns: ." + ] + }, + "_LEGACY_ERROR_TEMP_1030" : { + "message" : [ + "Window aggregate function with filter predicate is not supported yet." + ] + }, + "_LEGACY_ERROR_TEMP_1031" : { + "message" : [ + "It is not allowed to use a window function inside an aggregate function. Please use the inner window function in a sub-query." + ] + }, + "_LEGACY_ERROR_TEMP_1032" : { + "message" : [ + " does not have any WindowExpression." + ] + }, + "_LEGACY_ERROR_TEMP_1033" : { + "message" : [ + " has multiple Window Specifications ().", + "Please file a bug report with this error message, stack trace, and the query." + ] + }, + "_LEGACY_ERROR_TEMP_1034" : { + "message" : [ + "It is not allowed to use window functions inside clause." + ] + }, + "_LEGACY_ERROR_TEMP_1035" : { + "message" : [ + "Cannot specify window frame for function." + ] + }, + "_LEGACY_ERROR_TEMP_1036" : { + "message" : [ + "Window Frame must match the required frame ." + ] + }, + "_LEGACY_ERROR_TEMP_1037" : { + "message" : [ + "Window function requires window to be ordered, please add ORDER BY clause. For example SELECT (value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table." + ] + }, + "_LEGACY_ERROR_TEMP_1039" : { + "message" : [ + "Multiple time/session window expressions would result in a cartesian product of rows, therefore they are currently not supported." + ] + }, + "_LEGACY_ERROR_TEMP_1040" : { + "message" : [ + "Gap duration expression used in session window must be CalendarIntervalType, but got
." + ] + }, + "_LEGACY_ERROR_TEMP_1045" : { + "message" : [ + "ALTER TABLE SET LOCATION does not support partition for v2 tables." + ] + }, + "_LEGACY_ERROR_TEMP_1046" : { + "message" : [ + "Join strategy hint parameter should be an identifier or string but was ()." + ] + }, + "_LEGACY_ERROR_TEMP_1047" : { + "message" : [ + " Hint parameter should include columns, but found." + ] + }, + "_LEGACY_ERROR_TEMP_1048" : { + "message" : [ + " Hint expects a partition number as a parameter." + ] + }, + "_LEGACY_ERROR_TEMP_1049" : { + "message" : [ + "Syntax error in attribute name: ." + ] + }, + "_LEGACY_ERROR_TEMP_1050" : { + "message" : [ + "Can only star expand struct data types. Attribute: ``." + ] + }, + "_LEGACY_ERROR_TEMP_1052" : { + "message" : [ + "ADD COLUMN with v1 tables cannot specify NOT NULL." + ] + }, + "_LEGACY_ERROR_TEMP_1053" : { + "message" : [ + "ALTER COLUMN with v1 tables cannot specify NOT NULL." + ] + }, + "_LEGACY_ERROR_TEMP_1054" : { + "message" : [ + "ALTER COLUMN cannot find column in v1 table. Available: ." + ] + }, + "_LEGACY_ERROR_TEMP_1055" : { + "message" : [ + "The database name is not valid: ." + ] + }, + "_LEGACY_ERROR_TEMP_1057" : { + "message" : [ + "SHOW COLUMNS with conflicting databases: '' != ''." + ] + }, + "_LEGACY_ERROR_TEMP_1058" : { + "message" : [ + "Cannot create table with both USING and ." + ] + }, + "_LEGACY_ERROR_TEMP_1059" : { + "message" : [ + "STORED AS with file format '' is invalid." + ] + }, + "_LEGACY_ERROR_TEMP_1060" : { + "message" : [ + " does not support nested column: ." + ] + }, + "_LEGACY_ERROR_TEMP_1065" : { + "message" : [ + "`` is not a valid name for tables/databases. Valid names only contain alphabet characters, numbers and _." + ] + }, + "_LEGACY_ERROR_TEMP_1066" : { + "message" : [ + " is a system preserved database, you cannot create a database with this name." + ] + }, + "_LEGACY_ERROR_TEMP_1068" : { + "message" : [ + " is a system preserved database, you cannot use it as current database. To access global temporary views, you should use qualified name with the GLOBAL_TEMP_DATABASE, e.g. SELECT * FROM .viewName." + ] + }, + "_LEGACY_ERROR_TEMP_1069" : { + "message" : [ + "CREATE EXTERNAL TABLE must be accompanied by LOCATION." + ] + }, + "_LEGACY_ERROR_TEMP_1071" : { + "message" : [ + "Some existing schema fields () are not present in the new schema. We don't support dropping columns yet." + ] + }, + "_LEGACY_ERROR_TEMP_1072" : { + "message" : [ + "Only the tables/views belong to the same database can be retrieved. Querying tables/views are ." + ] + }, + "_LEGACY_ERROR_TEMP_1073" : { + "message" : [ + "RENAME TABLE source and destination databases do not match: '' != ''." + ] + }, + "_LEGACY_ERROR_TEMP_1074" : { + "message" : [ + "RENAME TEMPORARY VIEW from '' to '': cannot specify database name '' in the destination table." + ] + }, + "_LEGACY_ERROR_TEMP_1076" : { + "message" : [ + "Partition spec is invalid.
." + ] + }, + "_LEGACY_ERROR_TEMP_1079" : { + "message" : [ + "Resource Type '' is not supported." + ] + }, + "_LEGACY_ERROR_TEMP_1080" : { + "message" : [ + "Table did not specify database." + ] + }, + "_LEGACY_ERROR_TEMP_1081" : { + "message" : [ + "Table did not specify locationUri." + ] + }, + "_LEGACY_ERROR_TEMP_1082" : { + "message" : [ + "Partition [] did not specify locationUri." + ] + }, + "_LEGACY_ERROR_TEMP_1083" : { + "message" : [ + "Number of buckets should be greater than 0 but less than or equal to bucketing.maxBuckets (``). Got ``." + ] + }, + "_LEGACY_ERROR_TEMP_1084" : { + "message" : [ + "Corrupted table name context in catalog: parts expected, but part is missing." + ] + }, + "_LEGACY_ERROR_TEMP_1085" : { + "message" : [ + "Corrupted view SQL configs in catalog." + ] + }, + "_LEGACY_ERROR_TEMP_1086" : { + "message" : [ + "Corrupted view query output column names in catalog: parts expected, but part is missing." + ] + }, + "_LEGACY_ERROR_TEMP_1087" : { + "message" : [ + "Corrupted view referred temp view names in catalog." + ] + }, + "_LEGACY_ERROR_TEMP_1088" : { + "message" : [ + "Corrupted view referred temp functions names in catalog." + ] + }, + "_LEGACY_ERROR_TEMP_1089" : { + "message" : [ + "Column statistics deserialization is not supported for column of data type: ." + ] + }, + "_LEGACY_ERROR_TEMP_1090" : { + "message" : [ + "Column statistics serialization is not supported for column of data type: ." + ] + }, + "_LEGACY_ERROR_TEMP_1097" : { + "message" : [ + "The field for corrupt records must be string type and nullable." + ] + }, + "_LEGACY_ERROR_TEMP_1098" : { + "message" : [ + "DataType '' is not supported by ." + ] + }, + "_LEGACY_ERROR_TEMP_1099" : { + "message" : [ + "() doesn't support the mode. Acceptable modes are and ." + ] + }, + "_LEGACY_ERROR_TEMP_1100" : { + "message" : [ + "The '' parameter of function '' needs to be a literal." + ] + }, + "_LEGACY_ERROR_TEMP_1103" : { + "message" : [ + "Unsupported component type in arrays." + ] + }, + "_LEGACY_ERROR_TEMP_1104" : { + "message" : [ + "The second argument should be a double literal." + ] + }, + "_LEGACY_ERROR_TEMP_1107" : { + "message" : [ + "Table
declares capability but is not an instance of ." + ] + }, + "_LEGACY_ERROR_TEMP_1108" : { + "message" : [ + "Delete by condition with subquery is not supported: ." + ] + }, + "_LEGACY_ERROR_TEMP_1109" : { + "message" : [ + "Exec update failed: cannot translate expression to source filter: ." + ] + }, + "_LEGACY_ERROR_TEMP_1110" : { + "message" : [ + "Cannot delete from table
where ." + ] + }, + "_LEGACY_ERROR_TEMP_1111" : { + "message" : [ + "DESCRIBE does not support partition for v2 tables." + ] + }, + "_LEGACY_ERROR_TEMP_1113" : { + "message" : [ + "Table
does not support ." + ] + }, + "_LEGACY_ERROR_TEMP_1114" : { + "message" : [ + "The streaming sources in a query do not have a common supported execution mode.", + "Sources support micro-batch: .", + "Sources support continuous: ." + ] + }, + "_LEGACY_ERROR_TEMP_1120" : { + "message" : [ + "Unsupported NamespaceChange in JDBC catalog." + ] + }, + "_LEGACY_ERROR_TEMP_1121" : { + "message" : [ + "Table does not support :
." + ] + }, + "_LEGACY_ERROR_TEMP_1122" : { + "message" : [ + "Table
is not a row-level operation table." + ] + }, + "_LEGACY_ERROR_TEMP_1123" : { + "message" : [ + "Cannot rename a table with ALTER VIEW. Please use ALTER TABLE instead." + ] + }, + "_LEGACY_ERROR_TEMP_1125" : { + "message" : [ + "Database from v1 session catalog is not specified." + ] + }, + "_LEGACY_ERROR_TEMP_1126" : { + "message" : [ + "Nested databases are not supported by v1 session catalog: ." + ] + }, + "_LEGACY_ERROR_TEMP_1127" : { + "message" : [ + "Invalid partitionExprs specified: For range partitioning use REPARTITION_BY_RANGE instead." + ] + }, + "_LEGACY_ERROR_TEMP_1128" : { + "message" : [ + "Failed to resolve the schema for for the partition column: . It must be specified manually." + ] + }, + "_LEGACY_ERROR_TEMP_1131" : { + "message" : [ + "Data source does not support output mode." + ] + }, + "_LEGACY_ERROR_TEMP_1132" : { + "message" : [ + "A schema needs to be specified when using ." + ] + }, + "_LEGACY_ERROR_TEMP_1133" : { + "message" : [ + "The user-specified schema doesn't match the actual schema:", + "user-specified: , actual: . If you're using", + "DataFrameReader.schema API or creating a table, please do not specify the schema.", + "Or if you're scanning an existed table, please drop it and re-create it." + ] + }, + "_LEGACY_ERROR_TEMP_1134" : { + "message" : [ + "Unable to infer schema for at . It must be specified manually." + ] + }, + "_LEGACY_ERROR_TEMP_1135" : { + "message" : [ + " is not a valid Spark SQL Data Source." + ] + }, + "_LEGACY_ERROR_TEMP_1136" : { + "message" : [ + "Cannot save interval data type into external storage." + ] + }, + "_LEGACY_ERROR_TEMP_1137" : { + "message" : [ + "Unable to resolve given []." + ] + }, + "_LEGACY_ERROR_TEMP_1138" : { + "message" : [ + "Hive built-in ORC data source must be used with Hive support enabled. Please use the native ORC data source by setting 'spark.sql.orc.impl' to 'native'." + ] + }, + "_LEGACY_ERROR_TEMP_1139" : { + "message" : [ + "Failed to find data source: . Avro is built-in but external data source module since Spark 2.4. Please deploy the application as per the deployment section of Apache Avro Data Source Guide." + ] + }, + "_LEGACY_ERROR_TEMP_1140" : { + "message" : [ + "Failed to find data source: . Please deploy the application as per the deployment section of Structured Streaming + Kafka Integration Guide." + ] + }, + "_LEGACY_ERROR_TEMP_1141" : { + "message" : [ + "Multiple sources found for (), please specify the fully qualified class name." + ] + }, + "_LEGACY_ERROR_TEMP_1142" : { + "message" : [ + "Datasource does not support writing empty or nested empty schemas. Please make sure the data schema has at least one or more column(s)." + ] + }, + "_LEGACY_ERROR_TEMP_1143" : { + "message" : [ + "The data to be inserted needs to have the same number of columns as the target table: target table has column(s) but the inserted data has column(s), which contain partition column(s) having assigned constant values." + ] + }, + "_LEGACY_ERROR_TEMP_1144" : { + "message" : [ + "The data to be inserted needs to have the same number of partition columns as the target table: target table has partition column(s) but the inserted data has partition columns specified." + ] + }, + "_LEGACY_ERROR_TEMP_1145" : { + "message" : [ + " is not a partition column. Partition columns are ." + ] + }, + "_LEGACY_ERROR_TEMP_1146" : { + "message" : [ + "Partition column have multiple values specified, . Please only specify a single value." + ] + }, + "_LEGACY_ERROR_TEMP_1147" : { + "message" : [ + "The ordering of partition columns is . All partition columns having constant values need to appear before other partition columns that do not have an assigned constant value." + ] + }, + "_LEGACY_ERROR_TEMP_1148" : { + "message" : [ + "Can only write data to relations with a single path." + ] + }, + "_LEGACY_ERROR_TEMP_1149" : { + "message" : [ + "Fail to rebuild expression: missing key in `translatedFilterToExpr`." + ] + }, + "_LEGACY_ERROR_TEMP_1151" : { + "message" : [ + "Fail to resolve data source for the table
since the table serde property has the duplicated key with extra options specified for this scan operation. To fix this, you can rollback to the legacy behavior of ignoring the extra options by setting the config to `false`, or address the conflicts of the same config." + ] + }, + "_LEGACY_ERROR_TEMP_1153" : { + "message" : [ + "Cannot use for partition column." + ] + }, + "_LEGACY_ERROR_TEMP_1155" : { + "message" : [ + "Partition column `` not found in schema ." + ] + }, + "_LEGACY_ERROR_TEMP_1156" : { + "message" : [ + "Column not found in schema ." + ] + }, + "_LEGACY_ERROR_TEMP_1158" : { + "message" : [ + "Saving data into a view is not allowed." + ] + }, + "_LEGACY_ERROR_TEMP_1159" : { + "message" : [ + "The format of the existing table is ``. It doesn't match the specified format ``." + ] + }, + "_LEGACY_ERROR_TEMP_1160" : { + "message" : [ + "The location of the existing table is ``. It doesn't match the specified location ``." + ] + }, + "_LEGACY_ERROR_TEMP_1161" : { + "message" : [ + "The column number of the existing table () doesn't match the data schema ()." + ] + }, + "_LEGACY_ERROR_TEMP_1162" : { + "message" : [ + "Cannot resolve '' given input columns: []." + ] + }, + "_LEGACY_ERROR_TEMP_1163" : { + "message" : [ + "Specified partitioning does not match that of the existing table .", + "Specified partition columns: [].", + "Existing partition columns: []." + ] + }, + "_LEGACY_ERROR_TEMP_1164" : { + "message" : [ + "Specified bucketing does not match that of the existing table .", + "Specified bucketing: .", + "Existing bucketing: ." + ] + }, + "_LEGACY_ERROR_TEMP_1165" : { + "message" : [ + "It is not allowed to specify partitioning when the table schema is not defined." + ] + }, + "_LEGACY_ERROR_TEMP_1166" : { + "message" : [ + "Bucketing column '' should not be part of partition columns ''." + ] + }, + "_LEGACY_ERROR_TEMP_1167" : { + "message" : [ + "Bucket sorting column '' should not be part of partition columns ''." + ] + }, + "_LEGACY_ERROR_TEMP_1169" : { + "message" : [ + "Requested partitioning does not match the table :", + "Requested partitions: .", + "Table partitions: ." + ] + }, + "_LEGACY_ERROR_TEMP_1171" : { + "message" : [ + "createTableColumnTypes option column not found in schema ." + ] + }, + "_LEGACY_ERROR_TEMP_1172" : { + "message" : [ + "Parquet type not yet supported: ." + ] + }, + "_LEGACY_ERROR_TEMP_1173" : { + "message" : [ + "Illegal Parquet type: ." + ] + }, + "_LEGACY_ERROR_TEMP_1174" : { + "message" : [ + "Unrecognized Parquet type: ." + ] + }, + "_LEGACY_ERROR_TEMP_1175" : { + "message" : [ + "Unsupported data type ." + ] + }, + "_LEGACY_ERROR_TEMP_1181" : { + "message" : [ + "Stream-stream join without equality predicate is not supported." + ] + }, + "_LEGACY_ERROR_TEMP_1182" : { + "message" : [ + "Column are ambiguous. It's probably because you joined several Datasets together, and some of these Datasets are the same. This column points to one of the Datasets but Spark is unable to figure out which one. Please alias the Datasets with different names via `Dataset.as` before joining them, and specify the column using qualified name, e.g. `df.as(\"a\").join(df.as(\"b\"), $\"a.id\" > $\"b.id\")`. You can also set to false to disable this check." + ] + }, + "_LEGACY_ERROR_TEMP_1183" : { + "message" : [ + "Cannot use interval type in the table schema." + ] + }, + "_LEGACY_ERROR_TEMP_1184" : { + "message" : [ + "Catalog does not support ." + ] + }, + "_LEGACY_ERROR_TEMP_1186" : { + "message" : [ + "Multi-part identifier cannot be empty." + ] + }, + "_LEGACY_ERROR_TEMP_1187" : { + "message" : [ + "Hive data source can only be used with tables, you can not files of Hive data source directly." + ] + }, + "_LEGACY_ERROR_TEMP_1188" : { + "message" : [ + "There is a 'path' option set and () is called with a path parameter. Either remove the path option, or call () without the parameter. To ignore this check, set '' to 'true'." + ] + }, + "_LEGACY_ERROR_TEMP_1189" : { + "message" : [ + "User specified schema not supported with ``." + ] + }, + "_LEGACY_ERROR_TEMP_1190" : { + "message" : [ + "Temporary view doesn't support streaming write." + ] + }, + "_LEGACY_ERROR_TEMP_1191" : { + "message" : [ + "Streaming into views is not supported." + ] + }, + "_LEGACY_ERROR_TEMP_1192" : { + "message" : [ + "The input source() is different from the table 's data source provider()." + ] + }, + "_LEGACY_ERROR_TEMP_1193" : { + "message" : [ + "Table doesn't support streaming write - ." + ] + }, + "_LEGACY_ERROR_TEMP_1194" : { + "message" : [ + "queryName must be specified for memory sink." + ] + }, + "_LEGACY_ERROR_TEMP_1195" : { + "message" : [ + "'' is not supported with continuous trigger." + ] + }, + "_LEGACY_ERROR_TEMP_1196" : { + "message" : [ + " column not found in existing columns ()." + ] + }, + "_LEGACY_ERROR_TEMP_1197" : { + "message" : [ + "'' does not support partitioning." + ] + }, + "_LEGACY_ERROR_TEMP_1198" : { + "message" : [ + "Function '' cannot process input: (): ." + ] + }, + "_LEGACY_ERROR_TEMP_1199" : { + "message" : [ + "Invalid bound function ': there are arguments but parameters returned from 'inputTypes()'." + ] + }, + "_LEGACY_ERROR_TEMP_1200" : { + "message" : [ + " is not supported for v2 tables." + ] + }, + "_LEGACY_ERROR_TEMP_1201" : { + "message" : [ + "Cannot resolve column name \"\" among ()." + ] + }, + "_LEGACY_ERROR_TEMP_1205" : { + "message" : [ + "Expected only partition pruning predicates: ." + ] + }, + "_LEGACY_ERROR_TEMP_1207" : { + "message" : [ + "The duration and time inputs to window must be an integer, long or string literal." + ] + }, + "_LEGACY_ERROR_TEMP_1210" : { + "message" : [ + "The second argument in should be a boolean literal." + ] + }, + "_LEGACY_ERROR_TEMP_1211" : { + "message" : [ + "Detected implicit cartesian product for join between logical plans", + "", + "and", + "rightPlan", + "Join condition is missing or trivial.", + "Either: use the CROSS JOIN syntax to allow cartesian products between these relations, or: enable implicit cartesian products by setting the configuration variable spark.sql.crossJoin.enabled=true." + ] + }, + "_LEGACY_ERROR_TEMP_1212" : { + "message" : [ + "Found conflicting attributes in the condition joining outer plan:", + "", + "and subplan:", + "." + ] + }, + "_LEGACY_ERROR_TEMP_1213" : { + "message" : [ + "Window expression is empty in ." + ] + }, + "_LEGACY_ERROR_TEMP_1214" : { + "message" : [ + "Found different window function type in ." + ] + }, + "_LEGACY_ERROR_TEMP_1218" : { + "message" : [ + " should be converted to HadoopFsRelation." + ] + }, + "_LEGACY_ERROR_TEMP_1219" : { + "message" : [ + "Hive metastore does not support altering database location." + ] + }, + "_LEGACY_ERROR_TEMP_1221" : { + "message" : [ + "Hive 0.12 doesn't support creating permanent functions. Please use Hive 0.13 or higher." + ] + }, + "_LEGACY_ERROR_TEMP_1222" : { + "message" : [ + "Unknown resource type: ." + ] + }, + "_LEGACY_ERROR_TEMP_1223" : { + "message" : [ + "Invalid field id '' in day-time interval. Supported interval fields: ." + ] + }, + "_LEGACY_ERROR_TEMP_1224" : { + "message" : [ + "'interval to ' is invalid." + ] + }, + "_LEGACY_ERROR_TEMP_1225" : { + "message" : [ + "Invalid field id '' in year-month interval. Supported interval fields: ." + ] + }, + "_LEGACY_ERROR_TEMP_1226" : { + "message" : [ + "The SQL config '' was removed in the version . " + ] + }, + "_LEGACY_ERROR_TEMP_1228" : { + "message" : [ + "Decimal scale () cannot be greater than precision ()." + ] + }, + "_LEGACY_ERROR_TEMP_1231" : { + "message" : [ + " is not a valid partition column in table ." + ] + }, + "_LEGACY_ERROR_TEMP_1232" : { + "message" : [ + "Partition spec is invalid. The spec () must match the partition spec () defined in table ''." + ] + }, + "_LEGACY_ERROR_TEMP_1237" : { + "message" : [ + "The list of partition columns with values in partition specification for table '
' in database '' is not a prefix of the list of partition columns defined in the table schema. Expected a prefix of [], but got []." + ] + }, + "_LEGACY_ERROR_TEMP_1239" : { + "message" : [ + "Analyzing column statistics is not supported for column of data type: ." + ] + }, + "_LEGACY_ERROR_TEMP_1241" : { + "message" : [ + "CREATE-TABLE-AS-SELECT cannot create table with location to a non-empty directory . To allow overwriting the existing non-empty directory, set '' to true." + ] + }, + "_LEGACY_ERROR_TEMP_1246" : { + "message" : [ + "Can't find column `` given table data columns ." + ] + }, + "_LEGACY_ERROR_TEMP_1247" : { + "message" : [ + "Operation not allowed: ALTER TABLE SET [SERDE | SERDEPROPERTIES] for a specific partition is not supported for tables created with the datasource API." + ] + }, + "_LEGACY_ERROR_TEMP_1248" : { + "message" : [ + "Operation not allowed: ALTER TABLE SET SERDE is not supported for tables created with the datasource API." + ] + }, + "_LEGACY_ERROR_TEMP_1250" : { + "message" : [ + " is not allowed on since filesource partition management is disabled (spark.sql.hive.manageFilesourcePartitions = false)." + ] + }, + "_LEGACY_ERROR_TEMP_1251" : { + "message" : [ + " is not allowed on since its partition metadata is not stored in the Hive metastore. To import this information into the metastore, run `msck repair table `." + ] + }, + "_LEGACY_ERROR_TEMP_1252" : { + "message" : [ + "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead." + ] + }, + "_LEGACY_ERROR_TEMP_1253" : { + "message" : [ + "Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead." + ] + }, + "_LEGACY_ERROR_TEMP_1255" : { + "message" : [ + "Cannot drop built-in function ''." + ] + }, + "_LEGACY_ERROR_TEMP_1256" : { + "message" : [ + "Cannot refresh built-in function ." + ] + }, + "_LEGACY_ERROR_TEMP_1257" : { + "message" : [ + "Cannot refresh temporary function ." + ] + }, + "_LEGACY_ERROR_TEMP_1259" : { + "message" : [ + "ALTER ADD COLUMNS does not support views. You must drop and re-create the views for adding the new columns. Views:
." + ] + }, + "_LEGACY_ERROR_TEMP_1260" : { + "message" : [ + "ALTER ADD COLUMNS does not support datasource table with type . You must drop and re-create the table for adding the new columns. Tables:
." + ] + }, + "_LEGACY_ERROR_TEMP_1261" : { + "message" : [ + "LOAD DATA is not supported for datasource tables: ." + ] + }, + "_LEGACY_ERROR_TEMP_1262" : { + "message" : [ + "LOAD DATA target table is partitioned, but no partition spec is provided." + ] + }, + "_LEGACY_ERROR_TEMP_1263" : { + "message" : [ + "LOAD DATA target table is partitioned, but number of columns in provided partition spec () do not match number of partitioned columns in table ()." + ] + }, + "_LEGACY_ERROR_TEMP_1264" : { + "message" : [ + "LOAD DATA target table is not partitioned, but a partition spec was provided." + ] + }, + "_LEGACY_ERROR_TEMP_1266" : { + "message" : [ + "Operation not allowed: TRUNCATE TABLE on external tables: ." + ] + }, + "_LEGACY_ERROR_TEMP_1267" : { + "message" : [ + "Operation not allowed: TRUNCATE TABLE ... PARTITION is not supported for tables that are not partitioned: ." + ] + }, + "_LEGACY_ERROR_TEMP_1268" : { + "message" : [ + "Failed to truncate table when removing data of the path: ." + ] + }, + "_LEGACY_ERROR_TEMP_1270" : { + "message" : [ + "SHOW CREATE TABLE is not supported on a temporary view:
." + ] + }, + "_LEGACY_ERROR_TEMP_1271" : { + "message" : [ + "Failed to execute SHOW CREATE TABLE against table
, which is created by Hive and uses the following unsupported feature(s)", + "", + "Please use `SHOW CREATE TABLE
AS SERDE` to show Hive DDL instead." + ] + }, + "_LEGACY_ERROR_TEMP_1272" : { + "message" : [ + "SHOW CREATE TABLE doesn't support transactional Hive table. Please use `SHOW CREATE TABLE
AS SERDE` to show Hive DDL instead." + ] + }, + "_LEGACY_ERROR_TEMP_1273" : { + "message" : [ + "Failed to execute SHOW CREATE TABLE against table
, which is created by Hive and uses the following unsupported serde configuration", + "", + "Please use `SHOW CREATE TABLE
AS SERDE` to show Hive DDL instead." + ] + }, + "_LEGACY_ERROR_TEMP_1274" : { + "message" : [ + "
is a Spark data source table. Use `SHOW CREATE TABLE` without `AS SERDE` instead." + ] + }, + "_LEGACY_ERROR_TEMP_1275" : { + "message" : [ + "Failed to execute SHOW CREATE TABLE against table/view
, which is created by Hive and uses the following unsupported feature(s)", + "." + ] + }, + "_LEGACY_ERROR_TEMP_1276" : { + "message" : [ + "The logical plan that represents the view is not analyzed." + ] + }, + "_LEGACY_ERROR_TEMP_1278" : { + "message" : [ + " is not a view." + ] + }, + "_LEGACY_ERROR_TEMP_1280" : { + "message" : [ + "It is not allowed to create a persisted view from the Dataset API." + ] + }, + "_LEGACY_ERROR_TEMP_1285" : { + "message" : [ + "Since Spark 2.3, the queries from raw JSON/CSV files are disallowed when the", + "referenced columns only include the internal corrupt record column", + "(named _corrupt_record by default). For example:", + "spark.read.schema(schema).csv(file).filter($\"_corrupt_record\".isNotNull).count()", + "and spark.read.schema(schema).csv(file).select(\"_corrupt_record\").show().", + "Instead, you can cache or save the parsed results and then send the same query.", + "For example, val df = spark.read.schema(schema).csv(file).cache() and then", + "df.filter($\"_corrupt_record\".isNotNull).count()." + ] + }, + "_LEGACY_ERROR_TEMP_1286" : { + "message" : [ + "User-defined partition column not found in the JDBC relation: ." + ] + }, + "_LEGACY_ERROR_TEMP_1287" : { + "message" : [ + "Partition column type should be , , or , but found." + ] + }, + "_LEGACY_ERROR_TEMP_1288" : { + "message" : [ + "Table or view '' already exists. SaveMode: ErrorIfExists." + ] + }, + "_LEGACY_ERROR_TEMP_1290" : { + "message" : [ + "Text data source supports only a single column, and you have columns." + ] + }, + "_LEGACY_ERROR_TEMP_1291" : { + "message" : [ + "Can't find required partition column in partition schema ." + ] + }, + "_LEGACY_ERROR_TEMP_1292" : { + "message" : [ + "Temporary view '' should not have specified a database." + ] + }, + "_LEGACY_ERROR_TEMP_1293" : { + "message" : [ + "Hive data source can only be used with tables, you can't use it with CREATE TEMP VIEW USING." + ] + }, + "_LEGACY_ERROR_TEMP_1294" : { + "message" : [ + "The timestamp provided for the '' option is invalid. The expected format is 'YYYY-MM-DDTHH:mm:ss', but the provided timestamp: ." + ] + }, + "_LEGACY_ERROR_TEMP_1295" : { + "message" : [ + "Set a host to read from with option(\"host\", ...)." + ] + }, + "_LEGACY_ERROR_TEMP_1296" : { + "message" : [ + "Set a port to read from with option(\"port\", ...)." + ] + }, + "_LEGACY_ERROR_TEMP_1297" : { + "message" : [ + "IncludeTimestamp must be set to either \"true\" or \"false\"." + ] + }, + "_LEGACY_ERROR_TEMP_1298" : { + "message" : [ + "checkpointLocation must be specified either through option(\"checkpointLocation\", ...) or SparkSession.conf.set(\"\", ...)." + ] + }, + "_LEGACY_ERROR_TEMP_1299" : { + "message" : [ + "This query does not support recovering from checkpoint location. Delete to start over." + ] + }, + "_LEGACY_ERROR_TEMP_1300" : { + "message" : [ + "Unable to find the column `` given []." + ] + }, + "_LEGACY_ERROR_TEMP_1305" : { + "message" : [ + "Unsupported TableChange in JDBC catalog." + ] + }, + "_LEGACY_ERROR_TEMP_1306" : { + "message" : [ + "There is a 'path' or 'paths' option set and load() is called with path parameters. Either remove the path option if it's the same as the path parameter, or add it to the load() parameter if you do want to read multiple paths. To ignore this check, set '' to 'true'." + ] + }, + "_LEGACY_ERROR_TEMP_1307" : { + "message" : [ + "There is a 'path' option set and save() is called with a path parameter. Either remove the path option, or call save() without the parameter. To ignore this check, set '' to 'true'." + ] + }, + "_LEGACY_ERROR_TEMP_1308" : { + "message" : [ + "TableProvider implementation cannot be written with mode, please use Append or Overwrite modes instead." + ] + }, + "_LEGACY_ERROR_TEMP_1309" : { + "message" : [ + "insertInto() can't be used together with partitionBy(). Partition columns have already been defined for the table. It is not necessary to use partitionBy()." + ] + }, + "_LEGACY_ERROR_TEMP_1310" : { + "message" : [ + "Couldn't find a catalog to handle the identifier ." + ] + }, + "_LEGACY_ERROR_TEMP_1312" : { + "message" : [ + "'' does not support bucketBy right now." + ] + }, + "_LEGACY_ERROR_TEMP_1313" : { + "message" : [ + "'' does not support bucketBy and sortBy right now." + ] + }, + "_LEGACY_ERROR_TEMP_1316" : { + "message" : [ + "Invalid partition transformation: ." + ] + }, + "_LEGACY_ERROR_TEMP_1319" : { + "message" : [ + "Invalid join type in joinWith: ." + ] + }, + "_LEGACY_ERROR_TEMP_1320" : { + "message" : [ + "Typed column that needs input type and schema cannot be passed in untyped `select` API. Use the typed `Dataset.select` API instead." + ] + }, + "_LEGACY_ERROR_TEMP_1321" : { + "message" : [ + "Invalid view name: ." + ] + }, + "_LEGACY_ERROR_TEMP_1322" : { + "message" : [ + "Invalid number of buckets: bucket(, )." + ] + }, + "_LEGACY_ERROR_TEMP_1323" : { + "message" : [ + "\"\" is not a numeric column. Aggregation function can only be applied on a numeric column." + ] + }, + "_LEGACY_ERROR_TEMP_1324" : { + "message" : [ + "The pivot column has more than distinct values, this could indicate an error. If this was intended, set to at least the number of distinct values of the pivot column." + ] + }, + "_LEGACY_ERROR_TEMP_1325" : { + "message" : [ + "Cannot modify the value of a static config: ." + ] + }, + "_LEGACY_ERROR_TEMP_1327" : { + "message" : [ + "Command execution is not supported in runner ." + ] + }, + "_LEGACY_ERROR_TEMP_1328" : { + "message" : [ + "Can not instantiate class , please make sure it has public non argument constructor." + ] + }, + "_LEGACY_ERROR_TEMP_1329" : { + "message" : [ + "Can not load class , please make sure it is on the classpath." + ] + }, + "_LEGACY_ERROR_TEMP_1330" : { + "message" : [ + "Class doesn't implement interface UserDefinedAggregateFunction." + ] + }, + "_LEGACY_ERROR_TEMP_1331" : { + "message" : [ + "Missing field in table
with schema:", + "." + ] + }, + "_LEGACY_ERROR_TEMP_1332" : { + "message" : [ + "" + ] + }, + "_LEGACY_ERROR_TEMP_1334" : { + "message" : [ + "Cannot specify both version and timestamp when time travelling the table." + ] + }, + "_LEGACY_ERROR_TEMP_1338" : { + "message" : [ + "Sinks cannot request distribution and ordering in continuous execution mode." + ] + }, + "_LEGACY_ERROR_TEMP_1339" : { + "message" : [ + "Failed to execute INSERT INTO command because the VALUES list contains a DEFAULT column reference as part of another expression; this is not allowed." + ] + }, + "_LEGACY_ERROR_TEMP_1340" : { + "message" : [ + "Failed to execute UPDATE command because the SET list contains a DEFAULT column reference as part of another expression; this is not allowed." + ] + }, + "_LEGACY_ERROR_TEMP_1343" : { + "message" : [ + "Failed to execute MERGE INTO command because one of its INSERT or UPDATE assignments contains a DEFAULT column reference as part of another expression; this is not allowed." + ] + }, + "_LEGACY_ERROR_TEMP_1344" : { + "message" : [ + "Invalid DEFAULT value for column : fails to parse as a valid literal value." + ] + }, + "_LEGACY_ERROR_TEMP_1345" : { + "message" : [ + "Failed to execute command because DEFAULT values are not supported for target data source with table provider: \"\"." + ] + }, + "_LEGACY_ERROR_TEMP_1346" : { + "message" : [ + "Failed to execute command because DEFAULT values are not supported when adding new columns to previously existing target data source with table provider: \"\"." + ] + }, + "_LEGACY_ERROR_TEMP_2000" : { + "message" : [ + ". If necessary set to false to bypass this error." + ] + }, + "_LEGACY_ERROR_TEMP_2003" : { + "message" : [ + "Unsuccessful try to zip maps with unique keys due to exceeding the array size limit ." + ] + }, + "_LEGACY_ERROR_TEMP_2005" : { + "message" : [ + "Type does not support ordered operations." + ] + }, + "_LEGACY_ERROR_TEMP_2011" : { + "message" : [ + "Unexpected data type ." + ] + }, + "_LEGACY_ERROR_TEMP_2013" : { + "message" : [ + "Negative values found in " + ] + }, + "_LEGACY_ERROR_TEMP_2015" : { + "message" : [ + "Cannot generate code for incomparable type: ." + ] + }, + "_LEGACY_ERROR_TEMP_2016" : { + "message" : [ + "Can not interpolate into code block." + ] + }, + "_LEGACY_ERROR_TEMP_2017" : { + "message" : [ + "not resolved." + ] + }, + "_LEGACY_ERROR_TEMP_2018" : { + "message" : [ + "class `` is not supported by `MapObjects` as resulting collection." + ] + }, + "_LEGACY_ERROR_TEMP_2020" : { + "message" : [ + "Couldn't find a valid constructor on ." + ] + }, + "_LEGACY_ERROR_TEMP_2021" : { + "message" : [ + "Couldn't find a primary constructor on ." + ] + }, + "_LEGACY_ERROR_TEMP_2023" : { + "message" : [ + "Unresolved encoder expected, but was found." + ] + }, + "_LEGACY_ERROR_TEMP_2024" : { + "message" : [ + "Only expression encoders are supported for now." + ] + }, + "_LEGACY_ERROR_TEMP_2025" : { + "message" : [ + " must override either or ." + ] + }, + "_LEGACY_ERROR_TEMP_2026" : { + "message" : [ + "Failed to convert value (class of ) with the type of to JSON." + ] + }, + "_LEGACY_ERROR_TEMP_2027" : { + "message" : [ + "Unexpected operator in correlated subquery." + ] + }, + "_LEGACY_ERROR_TEMP_2028" : { + "message" : [ + "This line should be unreachable." + ] + }, + "_LEGACY_ERROR_TEMP_2030" : { + "message" : [ + "Can not handle nested schema yet... plan ." + ] + }, + "_LEGACY_ERROR_TEMP_2031" : { + "message" : [ + "The input external row cannot be null." + ] + }, + "_LEGACY_ERROR_TEMP_2032" : { + "message" : [ + "" + ] + }, + "_LEGACY_ERROR_TEMP_2033" : { + "message" : [ + "Unable to create database as failed to create its directory ." + ] + }, + "_LEGACY_ERROR_TEMP_2034" : { + "message" : [ + "Unable to drop database as failed to delete its directory ." + ] + }, + "_LEGACY_ERROR_TEMP_2035" : { + "message" : [ + "Unable to create table
as failed to create its directory ." + ] + }, + "_LEGACY_ERROR_TEMP_2036" : { + "message" : [ + "Unable to delete partition path ." + ] + }, + "_LEGACY_ERROR_TEMP_2037" : { + "message" : [ + "Unable to drop table
as failed to delete its directory ." + ] + }, + "_LEGACY_ERROR_TEMP_2038" : { + "message" : [ + "Unable to rename table to as failed to rename its directory ." + ] + }, + "_LEGACY_ERROR_TEMP_2039" : { + "message" : [ + "Unable to create partition path ." + ] + }, + "_LEGACY_ERROR_TEMP_2040" : { + "message" : [ + "Unable to rename partition path ." + ] + }, + "_LEGACY_ERROR_TEMP_2041" : { + "message" : [ + " is not implemented." + ] + }, + "_LEGACY_ERROR_TEMP_2042" : { + "message" : [ + ". If necessary set to false to bypass this error." + ] + }, + "_LEGACY_ERROR_TEMP_2043" : { + "message" : [ + "- caused overflow." + ] + }, + "_LEGACY_ERROR_TEMP_2045" : { + "message" : [ + "Unsupported table change: " + ] + }, + "_LEGACY_ERROR_TEMP_2046" : { + "message" : [ + "[BUG] Not a DataSourceRDDPartition: ." + ] + }, + "_LEGACY_ERROR_TEMP_2047" : { + "message" : [ + "'path' is not specified." + ] + }, + "_LEGACY_ERROR_TEMP_2048" : { + "message" : [ + "Schema must be specified when creating a streaming source DataFrame. If some files already exist in the directory, then depending on the file format you may be able to create a static DataFrame on that directory with 'spark.read.load(directory)' and infer schema from it." + ] + }, + "_LEGACY_ERROR_TEMP_2049" : { + "message" : [ + "Data source does not support streamed ." + ] + }, + "_LEGACY_ERROR_TEMP_2050" : { + "message" : [ + "Expected exactly one path to be specified, but got: ." + ] + }, + "_LEGACY_ERROR_TEMP_2052" : { + "message" : [ + " was removed in Spark 2.0. Please check if your library is compatible with Spark 2.0." + ] + }, + "_LEGACY_ERROR_TEMP_2053" : { + "message" : [ + "buildReader is not supported for ." + ] + }, + "_LEGACY_ERROR_TEMP_2055" : { + "message" : [ + "", + "It is possible the underlying files have been updated. You can explicitly invalidate the cache in Spark by running 'REFRESH TABLE tableName' command in SQL or by recreating the Dataset/DataFrame involved." + ] + }, + "_LEGACY_ERROR_TEMP_2056" : { + "message" : [ + "Unable to clear output directory prior to writing to it." + ] + }, + "_LEGACY_ERROR_TEMP_2057" : { + "message" : [ + "Unable to clear partition directory prior to writing to it." + ] + }, + "_LEGACY_ERROR_TEMP_2058" : { + "message" : [ + "Failed to cast value `` to `` for partition column ``." + ] + }, + "_LEGACY_ERROR_TEMP_2059" : { + "message" : [ + "End of stream." + ] + }, + "_LEGACY_ERROR_TEMP_2060" : { + "message" : [ + "The fallback v1 relation reports inconsistent schema:", + "Schema of v2 scan: .", + "Schema of v1 relation: ." + ] + }, + "_LEGACY_ERROR_TEMP_2061" : { + "message" : [ + "No records should be returned from EmptyDataReader." + ] + }, + "_LEGACY_ERROR_TEMP_2062" : { + "message" : [ + "", + "It is possible the underlying files have been updated. You can explicitly invalidate the cache in Spark by recreating the Dataset/DataFrame involved." + ] + }, + "_LEGACY_ERROR_TEMP_2063" : { + "message" : [ + "Parquet column cannot be converted in file . Column: , Expected: , Found: ." + ] + }, + "_LEGACY_ERROR_TEMP_2064" : { + "message" : [ + "Encountered error while reading file . Details:" + ] + }, + "_LEGACY_ERROR_TEMP_2065" : { + "message" : [ + "Cannot create columnar reader." + ] + }, + "_LEGACY_ERROR_TEMP_2066" : { + "message" : [ + "Invalid namespace name: ." + ] + }, + "_LEGACY_ERROR_TEMP_2067" : { + "message" : [ + "Unsupported partition transform: ." + ] + }, + "_LEGACY_ERROR_TEMP_2068" : { + "message" : [ + "Missing database location." + ] + }, + "_LEGACY_ERROR_TEMP_2069" : { + "message" : [ + "Cannot remove reserved property: ." + ] + }, + "_LEGACY_ERROR_TEMP_2070" : { + "message" : [ + "Writing job failed." + ] + }, + "_LEGACY_ERROR_TEMP_2071" : { + "message" : [ + "Commit denied for partition (task , attempt , stage .)." + ] + }, + "_LEGACY_ERROR_TEMP_2073" : { + "message" : [ + "Cannot create JDBC table with partition." + ] + }, + "_LEGACY_ERROR_TEMP_2074" : { + "message" : [ + "user-specified schema." + ] + }, + "_LEGACY_ERROR_TEMP_2075" : { + "message" : [ + "Write is not supported for binary file data source." + ] + }, + "_LEGACY_ERROR_TEMP_2076" : { + "message" : [ + "The length of is , which exceeds the max length allowed: ." + ] + }, + "_LEGACY_ERROR_TEMP_2077" : { + "message" : [ + "Unsupported field name: ." + ] + }, + "_LEGACY_ERROR_TEMP_2078" : { + "message" : [ + "Both '' and '' can not be specified at the same time." + ] + }, + "_LEGACY_ERROR_TEMP_2079" : { + "message" : [ + "Option '' or '' is required." + ] + }, + "_LEGACY_ERROR_TEMP_2080" : { + "message" : [ + "Option `` can not be empty." + ] + }, + "_LEGACY_ERROR_TEMP_2081" : { + "message" : [ + "Invalid value `` for parameter ``. This can be `NONE`, `READ_UNCOMMITTED`, `READ_COMMITTED`, `REPEATABLE_READ` or `SERIALIZABLE`." + ] + }, + "_LEGACY_ERROR_TEMP_2082" : { + "message" : [ + "Can't get JDBC type for ." + ] + }, + "_LEGACY_ERROR_TEMP_2083" : { + "message" : [ + "Unsupported type ." + ] + }, + "_LEGACY_ERROR_TEMP_2084" : { + "message" : [ + "Unsupported array element type based on binary." + ] + }, + "_LEGACY_ERROR_TEMP_2085" : { + "message" : [ + "Nested arrays unsupported." + ] + }, + "_LEGACY_ERROR_TEMP_2086" : { + "message" : [ + "Can't translate non-null value for field ." + ] + }, + "_LEGACY_ERROR_TEMP_2087" : { + "message" : [ + "Invalid value `` for parameter `` in table writing via JDBC. The minimum value is 1." + ] + }, + "_LEGACY_ERROR_TEMP_2088" : { + "message" : [ + " is not supported yet." + ] + }, + "_LEGACY_ERROR_TEMP_2089" : { + "message" : [ + "DataType: ." + ] + }, + "_LEGACY_ERROR_TEMP_2090" : { + "message" : [ + "The input filter of should be fully convertible." + ] + }, + "_LEGACY_ERROR_TEMP_2093" : { + "message" : [ + "Found duplicate field(s) \"\": in case-insensitive mode." + ] + }, + "_LEGACY_ERROR_TEMP_2094" : { + "message" : [ + "Found duplicate field(s) \"\": in id mapping mode." + ] + }, + "_LEGACY_ERROR_TEMP_2095" : { + "message" : [ + "Failed to merge incompatible schemas and ." + ] + }, + "_LEGACY_ERROR_TEMP_2096" : { + "message" : [ + " is not supported temporarily." + ] + }, + "_LEGACY_ERROR_TEMP_2097" : { + "message" : [ + "Could not execute broadcast in secs. You can increase the timeout for broadcasts via or disable broadcast join by setting to -1." + ] + }, + "_LEGACY_ERROR_TEMP_2098" : { + "message" : [ + "Could not compare cost with ." + ] + }, + "_LEGACY_ERROR_TEMP_2100" : { + "message" : [ + "not support type: ." + ] + }, + "_LEGACY_ERROR_TEMP_2101" : { + "message" : [ + "Not support non-primitive type now." + ] + }, + "_LEGACY_ERROR_TEMP_2102" : { + "message" : [ + "Unsupported type: ." + ] + }, + "_LEGACY_ERROR_TEMP_2103" : { + "message" : [ + "Dictionary encoding should not be used because of dictionary overflow." + ] + }, + "_LEGACY_ERROR_TEMP_2104" : { + "message" : [ + "End of the iterator." + ] + }, + "_LEGACY_ERROR_TEMP_2105" : { + "message" : [ + "Could not allocate memory to grow BytesToBytesMap." + ] + }, + "_LEGACY_ERROR_TEMP_2106" : { + "message" : [ + "Can't acquire bytes memory to build hash relation, got bytes." + ] + }, + "_LEGACY_ERROR_TEMP_2107" : { + "message" : [ + "There is not enough memory to build hash map." + ] + }, + "_LEGACY_ERROR_TEMP_2108" : { + "message" : [ + "Does not support row that is larger than 256M." + ] + }, + "_LEGACY_ERROR_TEMP_2109" : { + "message" : [ + "Cannot build HashedRelation with more than 1/3 billions unique keys." + ] + }, + "_LEGACY_ERROR_TEMP_2110" : { + "message" : [ + "Can not build a HashedRelation that is larger than 8G." + ] + }, + "_LEGACY_ERROR_TEMP_2111" : { + "message" : [ + "failed to push a row into ." + ] + }, + "_LEGACY_ERROR_TEMP_2112" : { + "message" : [ + "Unexpected window function frame ." + ] + }, + "_LEGACY_ERROR_TEMP_2113" : { + "message" : [ + "Unable to parse as a percentile." + ] + }, + "_LEGACY_ERROR_TEMP_2114" : { + "message" : [ + " is not a recognised statistic." + ] + }, + "_LEGACY_ERROR_TEMP_2115" : { + "message" : [ + "Unknown column: ." + ] + }, + "_LEGACY_ERROR_TEMP_2116" : { + "message" : [ + "Unexpected: ." + ] + }, + "_LEGACY_ERROR_TEMP_2120" : { + "message" : [ + "Do not support array of type ." + ] + }, + "_LEGACY_ERROR_TEMP_2121" : { + "message" : [ + "Do not support type ." + ] + }, + "_LEGACY_ERROR_TEMP_2124" : { + "message" : [ + "Failed to merge decimal types with incompatible scale and ." + ] + }, + "_LEGACY_ERROR_TEMP_2126" : { + "message" : [ + "Unsuccessful attempt to build maps with elements due to exceeding the map size limit ." + ] + }, + "_LEGACY_ERROR_TEMP_2128" : { + "message" : [ + "The key array and value array of MapData must have the same length." + ] + }, + "_LEGACY_ERROR_TEMP_2129" : { + "message" : [ + "Conflict found: Field differs from derived from ." + ] + }, + "_LEGACY_ERROR_TEMP_2130" : { + "message" : [ + "Fail to recognize '' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from '/sql-ref-datetime-pattern.html'." + ] + }, + "_LEGACY_ERROR_TEMP_2131" : { + "message" : [ + "Exception when registering StreamingQueryListener." + ] + }, + "_LEGACY_ERROR_TEMP_2133" : { + "message" : [ + "Cannot parse field name , field value , [] as target spark data type []." + ] + }, + "_LEGACY_ERROR_TEMP_2134" : { + "message" : [ + "Cannot parse field value for pattern as target spark data type []." + ] + }, + "_LEGACY_ERROR_TEMP_2138" : { + "message" : [ + "Cannot have circular references in bean class, but got the circular reference of class ." + ] + }, + "_LEGACY_ERROR_TEMP_2139" : { + "message" : [ + "cannot have circular references in class, but got the circular reference of class ." + ] + }, + "_LEGACY_ERROR_TEMP_2140" : { + "message" : [ + "`` is not a valid identifier of Java and cannot be used as field name", + "." + ] + }, + "_LEGACY_ERROR_TEMP_2142" : { + "message" : [ + "Attributes for type is not supported." + ] + }, + "_LEGACY_ERROR_TEMP_2144" : { + "message" : [ + "Unable to find constructor for . This could happen if is an interface, or a trait without companion object constructor." + ] + }, + "_LEGACY_ERROR_TEMP_2145" : { + "message" : [ + " cannot be more than one character." + ] + }, + "_LEGACY_ERROR_TEMP_2146" : { + "message" : [ + " should be an integer. Found ." + ] + }, + "_LEGACY_ERROR_TEMP_2147" : { + "message" : [ + " flag can be true or false." + ] + }, + "_LEGACY_ERROR_TEMP_2148" : { + "message" : [ + "null value found but field is not nullable." + ] + }, + "_LEGACY_ERROR_TEMP_2150" : { + "message" : [ + "Due to Scala's limited support of tuple, tuple with more than 22 elements are not supported." + ] + }, + "_LEGACY_ERROR_TEMP_2151" : { + "message" : [ + "Error while decoding: ", + "." + ] + }, + "_LEGACY_ERROR_TEMP_2152" : { + "message" : [ + "Error while encoding: ", + "." + ] + }, + "_LEGACY_ERROR_TEMP_2153" : { + "message" : [ + "class has unexpected serializer: ." + ] + }, + "_LEGACY_ERROR_TEMP_2154" : { + "message" : [ + "Failed to get outer pointer for ." + ] + }, + "_LEGACY_ERROR_TEMP_2155" : { + "message" : [ + " is not annotated with SQLUserDefinedType nor registered with UDTRegistration.}" + ] + }, + "_LEGACY_ERROR_TEMP_2156" : { + "message" : [ + "The size function doesn't support the operand type ." + ] + }, + "_LEGACY_ERROR_TEMP_2157" : { + "message" : [ + "Unexpected value for start in function : SQL array indices start at 1." + ] + }, + "_LEGACY_ERROR_TEMP_2158" : { + "message" : [ + "Unexpected value for length in function : length must be greater than or equal to 0." + ] + }, + "_LEGACY_ERROR_TEMP_2159" : { + "message" : [ + "Unsuccessful try to concat arrays with elements due to exceeding the array size limit ." + ] + }, + "_LEGACY_ERROR_TEMP_2160" : { + "message" : [ + "Unsuccessful try to flatten an array of arrays with elements due to exceeding the array size limit ." + ] + }, + "_LEGACY_ERROR_TEMP_2161" : { + "message" : [ + "Unsuccessful try to create array with elements due to exceeding the array size limit ." + ] + }, + "_LEGACY_ERROR_TEMP_2162" : { + "message" : [ + "Unsuccessful try to union arrays with elements due to exceeding the array size limit ." + ] + }, + "_LEGACY_ERROR_TEMP_2163" : { + "message" : [ + "Initial type must be a ." + ] + }, + "_LEGACY_ERROR_TEMP_2164" : { + "message" : [ + "Initial type must be an , a or a ." + ] + }, + "_LEGACY_ERROR_TEMP_2165" : { + "message" : [ + "Malformed records are detected in schema inference. Parse Mode: ." + ] + }, + "_LEGACY_ERROR_TEMP_2166" : { + "message" : [ + "Malformed JSON." + ] + }, + "_LEGACY_ERROR_TEMP_2167" : { + "message" : [ + "Malformed records are detected in schema inference. Parse Mode: . Reasons: Failed to infer a common schema. Struct types are expected, but `` was found." + ] + }, + "_LEGACY_ERROR_TEMP_2168" : { + "message" : [ + "Decorrelate inner query through is not supported." + ] + }, + "_LEGACY_ERROR_TEMP_2169" : { + "message" : [ + "This method should not be called in the analyzer." + ] + }, + "_LEGACY_ERROR_TEMP_2170" : { + "message" : [ + "Cannot safely merge SERDEPROPERTIES:", + "", + "", + "The conflict keys: ." + ] + }, + "_LEGACY_ERROR_TEMP_2171" : { + "message" : [ + "Not supported pair: , at ()." + ] + }, + "_LEGACY_ERROR_TEMP_2172" : { + "message" : [ + "Once strategy's idempotence is broken for batch ", + "." + ] + }, + "_LEGACY_ERROR_TEMP_2175" : { + "message" : [ + "Rule id not found for . Please modify RuleIdCollection.scala if you are adding a new rule." + ] + }, + "_LEGACY_ERROR_TEMP_2176" : { + "message" : [ + "Cannot create array with elements of data due to exceeding the limit elements for ArrayData. " + ] + }, + "_LEGACY_ERROR_TEMP_2178" : { + "message" : [ + "Remote operations not supported." + ] + }, + "_LEGACY_ERROR_TEMP_2179" : { + "message" : [ + "HiveServer2 Kerberos principal or keytab is not correctly configured." + ] + }, + "_LEGACY_ERROR_TEMP_2180" : { + "message" : [ + "Parent SparkUI to attach this tab to not found." + ] + }, + "_LEGACY_ERROR_TEMP_2181" : { + "message" : [ + "inferSchema is not supported for hive data source." + ] + }, + "_LEGACY_ERROR_TEMP_2182" : { + "message" : [ + "Requested partitioning does not match the table:", + "Requested partitions: .", + "Table partitions: ." + ] + }, + "_LEGACY_ERROR_TEMP_2183" : { + "message" : [ + "Dynamic partition key is not among written partition paths." + ] + }, + "_LEGACY_ERROR_TEMP_2184" : { + "message" : [ + "Cannot remove partition directory ''." + ] + }, + "_LEGACY_ERROR_TEMP_2185" : { + "message" : [ + "Cannot create staging directory: " + ] + }, + "_LEGACY_ERROR_TEMP_2186" : { + "message" : [ + "The SerDe interface removed since Hive 2.3(HIVE-15167). Please migrate your custom SerDes to Hive 2.3. See HIVE-15167 for more details." + ] + }, + "_LEGACY_ERROR_TEMP_2187" : { + "message" : [ + ", db: , table: ." + ] + }, + "_LEGACY_ERROR_TEMP_2189" : { + "message" : [ + "Hive 2.2 and lower versions don't support getTablesByType. Please use Hive 2.3 or higher version." + ] + }, + "_LEGACY_ERROR_TEMP_2190" : { + "message" : [ + "DROP TABLE ... PURGE." + ] + }, + "_LEGACY_ERROR_TEMP_2191" : { + "message" : [ + "ALTER TABLE ... DROP PARTITION ... PURGE." + ] + }, + "_LEGACY_ERROR_TEMP_2192" : { + "message" : [ + "Partition filter cannot have both `\"` and `'` characters." + ] + }, + "_LEGACY_ERROR_TEMP_2193" : { + "message" : [ + "Caught Hive MetaException attempting to get partition metadata by filter from Hive. You can set the Spark configuration setting to true to work around this problem, however this will result in degraded performance. Please report a bug: https://issues.apache.org/jira/browse/SPARK." + ] + }, + "_LEGACY_ERROR_TEMP_2194" : { + "message" : [ + "Unsupported Hive Metastore version . Please set with a valid version." + ] + }, + "_LEGACY_ERROR_TEMP_2195" : { + "message" : [ + " when creating Hive client using classpath: Please make sure that jars for your version of hive and hadoop are included in the paths passed to ." + ] + }, + "_LEGACY_ERROR_TEMP_2196" : { + "message" : [ + "Unable to fetch tables of db ." + ] + }, + "_LEGACY_ERROR_TEMP_2197" : { + "message" : [ + "LOCATION clause illegal for view partition." + ] + }, + "_LEGACY_ERROR_TEMP_2198" : { + "message" : [ + "Failed to rename as already exists." + ] + }, + "_LEGACY_ERROR_TEMP_2200" : { + "message" : [ + "Error: we detected a possible problem with the location of your \"_spark_metadata\"", + "directory and you likely need to move it before restarting this query.", + "", + "Earlier version of Spark incorrectly escaped paths when writing out the", + "\"_spark_metadata\" directory for structured streaming. While this was corrected in", + "Spark 3.0, it appears that your query was started using an earlier version that", + "", + "Correct \"_spark_metadata\" Directory: ", + "Incorrect \"_spark_metadata\" Directory: ", + "", + "Please move the data from the incorrect directory to the correct one, delete the", + "incorrect directory, and then restart this query. If you believe you are receiving", + "this message in error, you can disable it with the SQL conf", + "." + ] + }, + "_LEGACY_ERROR_TEMP_2201" : { + "message" : [ + "Partition column not found in schema ." + ] + }, + "_LEGACY_ERROR_TEMP_2203" : { + "message" : [ + "Cannot set timeout duration without enabling processing time timeout in [map|flatMap]GroupsWithState." + ] + }, + "_LEGACY_ERROR_TEMP_2204" : { + "message" : [ + "Cannot get event time watermark timestamp without setting watermark before [map|flatMap]GroupsWithState." + ] + }, + "_LEGACY_ERROR_TEMP_2205" : { + "message" : [ + "Cannot set timeout timestamp without enabling event time timeout in [map|flatMapGroupsWithState." + ] + }, + "_LEGACY_ERROR_TEMP_2207" : { + "message" : [ + "Multiple streaming queries are concurrently using ." + ] + }, + "_LEGACY_ERROR_TEMP_2208" : { + "message" : [ + " does not support adding files with an absolute path." + ] + }, + "_LEGACY_ERROR_TEMP_2209" : { + "message" : [ + "Data source does not support microbatch processing.", + "", + "Either the data source is disabled at", + "SQLConf.get.DISABLED_V2_STREAMING_MICROBATCH_READERS.key (The disabled sources", + "are []) or the table
does not have MICRO_BATCH_READ", + "capability. Meanwhile, the fallback, data source v1, is not available.\"" + ] + }, + "_LEGACY_ERROR_TEMP_2210" : { + "message" : [ + "StreamingRelationExec cannot be executed." + ] + }, + "_LEGACY_ERROR_TEMP_2211" : { + "message" : [ + "Invalid output mode: ." + ] + }, + "_LEGACY_ERROR_TEMP_2212" : { + "message" : [ + "Invalid catalog name: ." + ] + }, + "_LEGACY_ERROR_TEMP_2214" : { + "message" : [ + "Plugin class for catalog '' does not implement CatalogPlugin: ." + ] + }, + "_LEGACY_ERROR_TEMP_2215" : { + "message" : [ + "Cannot find catalog plugin class for catalog '': ." + ] + }, + "_LEGACY_ERROR_TEMP_2216" : { + "message" : [ + "Failed to find public no-arg constructor for catalog '': )." + ] + }, + "_LEGACY_ERROR_TEMP_2217" : { + "message" : [ + "Failed to call public no-arg constructor for catalog '': )." + ] + }, + "_LEGACY_ERROR_TEMP_2218" : { + "message" : [ + "Cannot instantiate abstract catalog plugin class for catalog '': ." + ] + }, + "_LEGACY_ERROR_TEMP_2219" : { + "message" : [ + "Failed during instantiating constructor for catalog '': ." + ] + }, + "_LEGACY_ERROR_TEMP_2220" : { + "message" : [ + "" + ] + }, + "_LEGACY_ERROR_TEMP_2222" : { + "message" : [ + "Cannot mutate ReadOnlySQLConf." + ] + }, + "_LEGACY_ERROR_TEMP_2223" : { + "message" : [ + "Cannot clone/copy ReadOnlySQLConf." + ] + }, + "_LEGACY_ERROR_TEMP_2224" : { + "message" : [ + "Cannot get SQLConf inside scheduler event loop thread." + ] + }, + "_LEGACY_ERROR_TEMP_2225" : { + "message" : [ + "" + ] + }, + "_LEGACY_ERROR_TEMP_2226" : { + "message" : [ + "null literals can't be casted to ." + ] + }, + "_LEGACY_ERROR_TEMP_2227" : { + "message" : [ + " is not an UserDefinedType. Please make sure registering an UserDefinedType for ." + ] + }, + "_LEGACY_ERROR_TEMP_2228" : { + "message" : [ + "Can not load in UserDefinedType for user class ." + ] + }, + "_LEGACY_ERROR_TEMP_2229" : { + "message" : [ + " is not a public class. Only public classes are supported." + ] + }, + "_LEGACY_ERROR_TEMP_2230" : { + "message" : [ + "Primitive types are not supported." + ] + }, + "_LEGACY_ERROR_TEMP_2231" : { + "message" : [ + "fieldIndex on a Row without schema is undefined." + ] + }, + "_LEGACY_ERROR_TEMP_2232" : { + "message" : [ + "Value at index is null." + ] + }, + "_LEGACY_ERROR_TEMP_2233" : { + "message" : [ + "Only Data Sources providing FileFormat are supported: ." + ] + }, + "_LEGACY_ERROR_TEMP_2234" : { + "message" : [ + "Failed to set original ACL back to the created path: . Exception: " + ] + }, + "_LEGACY_ERROR_TEMP_2235" : { + "message" : [ + "Multiple failures in stage materialization." + ] + }, + "_LEGACY_ERROR_TEMP_2236" : { + "message" : [ + "Unrecognized compression scheme type ID: ." + ] + }, + "_LEGACY_ERROR_TEMP_2237" : { + "message" : [ + ".getParentLogger is not yet implemented." + ] + }, + "_LEGACY_ERROR_TEMP_2238" : { + "message" : [ + "Unable to create Parquet converter for whose Parquet type is without decimal metadata. Please read this column/field as Spark BINARY type." + ] + }, + "_LEGACY_ERROR_TEMP_2239" : { + "message" : [ + "Unable to create Parquet converter for decimal type whose Parquet type is . Parquet DECIMAL type can only be backed by INT32, INT64, FIXED_LEN_BYTE_ARRAY, or BINARY." + ] + }, + "_LEGACY_ERROR_TEMP_2240" : { + "message" : [ + "Unable to create Parquet converter for data type whose Parquet type is ." + ] + }, + "_LEGACY_ERROR_TEMP_2241" : { + "message" : [ + "Nonatomic partition table can not add multiple partitions." + ] + }, + "_LEGACY_ERROR_TEMP_2242" : { + "message" : [ + " source does not support user-specified schema." + ] + }, + "_LEGACY_ERROR_TEMP_2243" : { + "message" : [ + "Nonatomic partition table can not drop multiple partitions." + ] + }, + "_LEGACY_ERROR_TEMP_2244" : { + "message" : [ + "The table does not support truncation of multiple partition." + ] + }, + "_LEGACY_ERROR_TEMP_2245" : { + "message" : [ + "Table does not support overwrite by expression:
." + ] + }, + "_LEGACY_ERROR_TEMP_2246" : { + "message" : [ + "Table does not support dynamic partition overwrite:
." + ] + }, + "_LEGACY_ERROR_TEMP_2248" : { + "message" : [ + "Cannot broadcast the table over rows: rows." + ] + }, + "_LEGACY_ERROR_TEMP_2249" : { + "message" : [ + "Cannot broadcast the table that is larger than : ." + ] + }, + "_LEGACY_ERROR_TEMP_2250" : { + "message" : [ + "Not enough memory to build and broadcast the table to all worker nodes. As a workaround, you can either disable broadcast by setting to -1 or increase the spark driver memory by setting to a higher value" + ] + }, + "_LEGACY_ERROR_TEMP_2251" : { + "message" : [ + " does not support the execute() code path." + ] + }, + "_LEGACY_ERROR_TEMP_2252" : { + "message" : [ + "Cannot merge with ." + ] + }, + "_LEGACY_ERROR_TEMP_2253" : { + "message" : [ + "Data source does not support continuous processing." + ] + }, + "_LEGACY_ERROR_TEMP_2254" : { + "message" : [ + "Data read failed." + ] + }, + "_LEGACY_ERROR_TEMP_2255" : { + "message" : [ + "Epoch marker generation failed." + ] + }, + "_LEGACY_ERROR_TEMP_2256" : { + "message" : [ + "Foreach writer has been aborted due to a task failure." + ] + }, + "_LEGACY_ERROR_TEMP_2258" : { + "message" : [ + "Error reading delta file of : key size cannot be ." + ] + }, + "_LEGACY_ERROR_TEMP_2259" : { + "message" : [ + "Error reading snapshot file of : " + ] + }, + "_LEGACY_ERROR_TEMP_2260" : { + "message" : [ + "Cannot purge as it might break internal state." + ] + }, + "_LEGACY_ERROR_TEMP_2261" : { + "message" : [ + "Clean up source files is not supported when reading from the output directory of FileStreamSink." + ] + }, + "_LEGACY_ERROR_TEMP_2262" : { + "message" : [ + "latestOffset(Offset, ReadLimit) should be called instead of this method." + ] + }, + "_LEGACY_ERROR_TEMP_2263" : { + "message" : [ + "Error: we detected a possible problem with the location of your checkpoint and you", + "likely need to move it before restarting this query.", + "", + "Earlier version of Spark incorrectly escaped paths when writing out checkpoints for", + "structured streaming. While this was corrected in Spark 3.0, it appears that your", + "query was started using an earlier version that incorrectly handled the checkpoint", + "path.", + "", + "Correct Checkpoint Directory: ", + "Incorrect Checkpoint Directory: ", + "", + "Please move the data from the incorrect directory to the correct one, delete the", + "incorrect directory, and then restart this query. If you believe you are receiving", + "this message in error, you can disable it with the SQL conf", + "." + ] + }, + "_LEGACY_ERROR_TEMP_2264" : { + "message" : [ + "Subprocess exited with status . Error: ." + ] + }, + "_LEGACY_ERROR_TEMP_2265" : { + "message" : [ + " without serde does not support
as output data type." + ] + }, + "_LEGACY_ERROR_TEMP_2266" : { + "message" : [ + "Invalid `startIndex` provided for generating iterator over the array. Total elements: , requested `startIndex`: ." + ] + }, + "_LEGACY_ERROR_TEMP_2267" : { + "message" : [ + "The backing has been modified since the creation of this Iterator." + ] + }, + "_LEGACY_ERROR_TEMP_2268" : { + "message" : [ + " does not implement doExecuteBroadcast." + ] + }, + "_LEGACY_ERROR_TEMP_2269" : { + "message" : [ + " is a system preserved database, please rename your existing database to resolve the name conflict, or set a different value for , and launch your Spark application again." + ] + }, + "_LEGACY_ERROR_TEMP_2270" : { + "message" : [ + "comment on table is not supported." + ] + }, + "_LEGACY_ERROR_TEMP_2271" : { + "message" : [ + "UpdateColumnNullability is not supported." + ] + }, + "_LEGACY_ERROR_TEMP_2272" : { + "message" : [ + "Rename column is only supported for MySQL version 8.0 and above." + ] + }, + "_LEGACY_ERROR_TEMP_2273" : { + "message" : [ + "" + ] + }, + "_LEGACY_ERROR_TEMP_2277" : { + "message" : [ + "Number of dynamic partitions created is , which is more than . To solve this try to set to at least ." + ] + }, + "_LEGACY_ERROR_TEMP_2330" : { + "message" : [ + "Cannot change nullable column to non-nullable: ." + ] + }, + "_LEGACY_ERROR_TEMP_2446" : { + "message" : [ + "Operation not allowed: only works on table with location provided: " + ] + }, + "_LEGACY_ERROR_TEMP_3000" : { + "message" : [ + "Unexpected Py4J server ." + ] + }, + "_LEGACY_ERROR_TEMP_3001" : { + "message" : [ + "EOFException occurred while reading the port number from 's stdout." + ] + }, + "_LEGACY_ERROR_TEMP_3002" : { + "message" : [ + "Data of type is not supported" + ] + }, + "_LEGACY_ERROR_TEMP_3003" : { + "message" : [ + "Could not compute split, block of RDD not found" + ] + }, + "_LEGACY_ERROR_TEMP_3004" : { + "message" : [ + "Attempted to use after its blocks have been removed!" + ] + }, + "_LEGACY_ERROR_TEMP_3005" : { + "message" : [ + "Histogram on either an empty RDD or RDD containing +/-infinity or NaN" + ] + }, + "_LEGACY_ERROR_TEMP_3006" : { + "message" : [ + "empty RDD" + ] + }, + "_LEGACY_ERROR_TEMP_3007" : { + "message" : [ + "Checkpoint block not found! Either the executor", + "that originally checkpointed this partition is no longer alive, or the original RDD is", + "unpersisted. If this problem persists, you may consider using `rdd.checkpoint()`", + "instead, which is slower than local checkpointing but more fault-tolerant." + ] + }, + "_LEGACY_ERROR_TEMP_3008" : { + "message" : [ + "Cannot use map-side combining with array keys." + ] + }, + "_LEGACY_ERROR_TEMP_3009" : { + "message" : [ + "HashPartitioner cannot partition array keys." + ] + }, + "_LEGACY_ERROR_TEMP_3010" : { + "message" : [ + "reduceByKeyLocally() does not support array keys" + ] + }, + "_LEGACY_ERROR_TEMP_3011" : { + "message" : [ + "This RDD lacks a SparkContext. It could happen in the following cases:", + "(1) RDD transformations and actions are NOT invoked by the driver, but inside of other transformations; for example, rdd1.map(x => rdd2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the rdd1.map transformation. For more information, see SPARK-5063.", + "(2) When a Spark Streaming job recovers from checkpoint, this exception will be hit if a reference to an RDD not defined by the streaming job is used in DStream operations. For more information, See SPARK-13758." + ] + }, + "_LEGACY_ERROR_TEMP_3012" : { + "message" : [ + "Cannot change storage level of an RDD after it was already assigned a level" + ] + }, + "_LEGACY_ERROR_TEMP_3013" : { + "message" : [ + "Can only zip RDDs with same number of elements in each partition" + ] + }, + "_LEGACY_ERROR_TEMP_3014" : { + "message" : [ + "empty collection" + ] + }, + "_LEGACY_ERROR_TEMP_3015" : { + "message" : [ + "countByValueApprox() does not support arrays" + ] + }, + "_LEGACY_ERROR_TEMP_3016" : { + "message" : [ + "Checkpoint directory has not been set in the SparkContext" + ] + }, + "_LEGACY_ERROR_TEMP_3017" : { + "message" : [ + "Invalid checkpoint file: " + ] + }, + "_LEGACY_ERROR_TEMP_3018" : { + "message" : [ + "Failed to create checkpoint path " + ] + }, + "_LEGACY_ERROR_TEMP_3019" : { + "message" : [ + "Checkpoint RDD has a different number of partitions from original RDD. Original", + "RDD [ID: , num of partitions: ];", + "Checkpoint RDD [ID: , num of partitions: ]." + ] + }, + "_LEGACY_ERROR_TEMP_3020" : { + "message" : [ + "Checkpoint dir must be specified." + ] + }, + "_LEGACY_ERROR_TEMP_3021" : { + "message" : [ + "Error asking standalone scheduler to shut down executors" + ] + }, + "_LEGACY_ERROR_TEMP_3022" : { + "message" : [ + "Error stopping standalone scheduler's driver endpoint" + ] + }, + "_LEGACY_ERROR_TEMP_3023" : { + "message" : [ + "Can't run submitMapStage on RDD with 0 partitions" + ] + }, + "_LEGACY_ERROR_TEMP_3024" : { + "message" : [ + "attempted to access non-existent accumulator " + ] + }, + "_LEGACY_ERROR_TEMP_3025" : { + "message" : [ + "TaskSetManagers should only send Resubmitted task statuses for tasks in ShuffleMapStages." + ] + }, + "_LEGACY_ERROR_TEMP_3026" : { + "message" : [ + "duration() called on unfinished task" + ] + }, + "_LEGACY_ERROR_TEMP_3027" : { + "message" : [ + "Unrecognized : " + ] + }, + "_LEGACY_ERROR_TEMP_3028" : { + "message" : [ + "" + ] + }, + "_LEGACY_ERROR_TEMP_3029" : { + "message" : [ + "Exiting due to error from cluster scheduler: " + ] + }, + "_LEGACY_ERROR_TEMP_3030" : { + "message" : [ + "Task has not locked block for writing" + ] + }, + "_LEGACY_ERROR_TEMP_3031" : { + "message" : [ + "Block does not exist" + ] + }, + "_LEGACY_ERROR_TEMP_3032" : { + "message" : [ + "Error occurred while waiting for replication to finish" + ] + }, + "_LEGACY_ERROR_TEMP_3033" : { + "message" : [ + "Unable to register with external shuffle server due to : " + ] + }, + "_LEGACY_ERROR_TEMP_3034" : { + "message" : [ + "Error occurred while waiting for async. reregistration" + ] + }, + "_LEGACY_ERROR_TEMP_3035" : { + "message" : [ + "Unexpected shuffle block with unsupported shuffle resolver " + ] + }, + "_LEGACY_ERROR_TEMP_3036" : { + "message" : [ + "Failure while trying to store block on ." + ] + }, + "_LEGACY_ERROR_TEMP_3037" : { + "message" : [ + "Block was not found even though it's read-locked" + ] + }, + "_LEGACY_ERROR_TEMP_3038" : { + "message" : [ + "get() failed for block even though we held a lock" + ] + }, + "_LEGACY_ERROR_TEMP_3039" : { + "message" : [ + "BlockManager returned null for BlockStatus query: " + ] + }, + "_LEGACY_ERROR_TEMP_3040" : { + "message" : [ + "BlockManagerMasterEndpoint returned false, expected true." + ] + }, + "_LEGACY_ERROR_TEMP_3041" : { + "message" : [ + "" + ] + }, + "_LEGACY_ERROR_TEMP_3042" : { + "message" : [ + "Failed to get block , which is not a shuffle block" + ] + } +} diff --git a/docs/sql-error-conditions-duplicate-routine-parameter-assignment-error-class.md b/docs/sql-error-conditions-duplicate-routine-parameter-assignment-error-class.md new file mode 100644 index 0000000000000..d9f14b5a55ef8 --- /dev/null +++ b/docs/sql-error-conditions-duplicate-routine-parameter-assignment-error-class.md @@ -0,0 +1,36 @@ +--- +layout: global +title: DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT error class +displayTitle: DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT error class +license: | + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--- + +[SQLSTATE: 4274K](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Call to function `` is invalid because it includes multiple argument assignments to the same parameter name ``. + +This error class has the following derived error classes: + +## BOTH_POSITIONAL_AND_NAMED + +A positional argument and named argument both referred to the same parameter. + +## DOUBLE_NAMED_ARGUMENT_REFERENCE + +More than one named argument referred to the same parameter. + + diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md index 91b77a6452bc5..5686324a0558b 100644 --- a/docs/sql-error-conditions.md +++ b/docs/sql-error-conditions.md @@ -456,6 +456,14 @@ Found duplicate clauses: ``. Please, remove one of them. Found duplicate keys ``. +### [DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT](sql-error-conditions-duplicate-routine-parameter-assignment-error-class.html) + +[SQLSTATE: 4274K](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Call to function `` is invalid because it includes multiple argument assignments to the same parameter name ``. + +For more details see [DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT](sql-error-conditions-duplicate-routine-parameter-assignment-error-class.html) + ### EMPTY_JSON_FIELD_VALUE [SQLSTATE: 42604](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) @@ -1210,7 +1218,13 @@ SQLSTATE: none assigned Not allowed to implement multiple UDF interfaces, UDF class ``. -### NAMED_ARGUMENTS_SUPPORT_DISABLED +### NAMED_PARAMETERS_NOT_SUPPORTED + +[SQLSTATE: 4274K](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Named parameters are not supported for function ``; please retry the query with positional arguments to the function call instead. + +### NAMED_PARAMETER_SUPPORT_DISABLED SQLSTATE: none assigned @@ -1521,6 +1535,12 @@ Failed to rename as `` was not found. The `` clause may be used at most once per `` operation. +### REQUIRED_PARAMETER_NOT_FOUND + +[SQLSTATE: 4274K](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot invoke function `` because the parameter named `` is required, but the function call did not supply a value. Please update the function call to supply an argument value (either positionally or by name) and retry the query again. + ### REQUIRES_SINGLE_PART_NAMESPACE [SQLSTATE: 42K05](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) @@ -1724,6 +1744,12 @@ Found an unclosed bracketed comment. Please, append */ at the end of the comment Parameter `` of function `` requires the `` type, however `` has the type ``. +### UNEXPECTED_POSITIONAL_ARGUMENT + +[SQLSTATE: 4274K](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot invoke function `` because it contains positional argument(s) following named argument(s); please rearrange them so the positional arguments come first and then retry the query again. + ### UNKNOWN_PROTOBUF_MESSAGE_TYPE SQLSTATE: none assigned @@ -1754,6 +1780,12 @@ Unpivot value columns must share a least common type, some types do not: [``). +### UNRECOGNIZED_PARAMETER_NAME + +[SQLSTATE: 4274K](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot invoke function `` because the function call included a named argument reference for the argument named ``, but this function does not include any signature containing an argument with this name. Did you mean one of the following? [``]. + ### UNRECOGNIZED_SQL_TYPE [SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) diff --git a/docs/sql-error-conditions.md.orig b/docs/sql-error-conditions.md.orig new file mode 100644 index 0000000000000..91b77a6452bc5 --- /dev/null +++ b/docs/sql-error-conditions.md.orig @@ -0,0 +1,2020 @@ +--- +layout: global +title: Error Conditions +displayTitle: Error Conditions +license: | + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--- + +This is a list of common, named error conditions returned by Spark SQL. + +Also see [SQLSTATE Codes](sql-error-conditions-sqlstates.html). + +### AGGREGATE_FUNCTION_WITH_NONDETERMINISTIC_EXPRESSION + +SQLSTATE: none assigned + +Non-deterministic expression `` should not appear in the arguments of an aggregate function. + +### ALL_PARTITION_COLUMNS_NOT_ALLOWED + +SQLSTATE: none assigned + +Cannot use all columns for partition columns. + +### ALTER_TABLE_COLUMN_DESCRIPTOR_DUPLICATE + +[SQLSTATE: 42710](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +ALTER TABLE `` column `` specifies descriptor "``" more than once, which is invalid. + +### AMBIGUOUS_ALIAS_IN_NESTED_CTE + +SQLSTATE: none assigned + +Name `` is ambiguous in nested CTE. +Please set `` to "CORRECTED" so that name defined in inner CTE takes precedence. If set it to "LEGACY", outer CTE definitions will take precedence. +See '``/sql-migration-guide.html#query-engine'. + +### AMBIGUOUS_COLUMN_OR_FIELD + +[SQLSTATE: 42702](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Column or field `` is ambiguous and has `` matches. + +### AMBIGUOUS_LATERAL_COLUMN_ALIAS + +[SQLSTATE: 42702](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Lateral column alias `` is ambiguous and has `` matches. + +### AMBIGUOUS_REFERENCE + +[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Reference `` is ambiguous, could be: ``. + +### AMBIGUOUS_REFERENCE_TO_FIELDS + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Ambiguous reference to the field ``. It appears `` times in the schema. + +### ARITHMETIC_OVERFLOW + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +``.`` If necessary set `` to "false" to bypass this error. + +### [AS_OF_JOIN](sql-error-conditions-as-of-join-error-class.html) + +SQLSTATE: none assigned + +Invalid as-of join. + +For more details see [AS_OF_JOIN](sql-error-conditions-as-of-join-error-class.html) + +### AVRO_INCORRECT_TYPE + +SQLSTATE: none assigned + +Cannot convert Avro `` to SQL `` because the original encoded data type is ``, however you're trying to read the field as ``, which would lead to an incorrect answer. To allow reading this field, enable the SQL configuration: ``. + +### AVRO_LOWER_PRECISION + +SQLSTATE: none assigned + +Cannot convert Avro `` to SQL `` because the original encoded data type is ``, however you're trying to read the field as ``, which leads to data being read as null. Please provide a wider decimal type to get the correct result. To allow reading null to this field, enable the SQL configuration: ``. + +### BATCH_METADATA_NOT_FOUND + +[SQLSTATE: 42K03](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Unable to find batch ``. + +### BINARY_ARITHMETIC_OVERFLOW + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +`` `` `` caused overflow. + +### CALL_ON_STREAMING_DATASET_UNSUPPORTED + +SQLSTATE: none assigned + +The method `` can not be called on streaming Dataset/DataFrame. + +### CANNOT_CAST_DATATYPE + +[SQLSTATE: 42846](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot cast `` to ``. + +### CANNOT_CONVERT_PROTOBUF_FIELD_TYPE_TO_SQL_TYPE + +SQLSTATE: none assigned + +Cannot convert Protobuf `` to SQL `` because schema is incompatible (protobufType = ``, sqlType = ``). + +### CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE + +SQLSTATE: none assigned + +Unable to convert `` of Protobuf to SQL type ``. + +### CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_FIELD_TYPE + +SQLSTATE: none assigned + +Cannot convert SQL `` to Protobuf `` because schema is incompatible (protobufType = ``, sqlType = ``). + +### CANNOT_CONVERT_SQL_VALUE_TO_PROTOBUF_ENUM_TYPE + +SQLSTATE: none assigned + +Cannot convert SQL `` to Protobuf `` because `` is not in defined values for enum: ``. + +### CANNOT_DECODE_URL + +[SQLSTATE: 22546](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The provided URL cannot be decoded: ``. Please ensure that the URL is properly formatted and try again. + +### CANNOT_INVOKE_IN_TRANSFORMATIONS + +SQLSTATE: none assigned + +Dataset transformations and actions can only be invoked by the driver, not inside of other Dataset transformations; for example, dataset1.map(x => dataset2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the dataset1.map transformation. For more information, see SPARK-28702. + +### CANNOT_LOAD_FUNCTION_CLASS + +SQLSTATE: none assigned + +Cannot load class `` when registering the function ``, please make sure it is on the classpath. + +### CANNOT_LOAD_PROTOBUF_CLASS + +SQLSTATE: none assigned + +Could not load Protobuf class with name ``. ``. + +### CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE + +[SQLSTATE: 42825](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Failed to merge incompatible data types `` and ``. Please check the data types of the columns being merged and ensure that they are compatible. If necessary, consider casting the columns to compatible data types before attempting the merge. + +### CANNOT_MERGE_SCHEMAS + +[SQLSTATE: 42KD9](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Failed merging schemas: +Initial schema: +`` +Schema that cannot be merged with the initial schema: +``. + +### CANNOT_MODIFY_CONFIG + +[SQLSTATE: 46110](sql-error-conditions-sqlstates.html#class-46-java-ddl-1) + +Cannot modify the value of the Spark config: ``. +See also '``/sql-migration-guide.html#ddl-statements'. + +### CANNOT_PARSE_DECIMAL + +[SQLSTATE: 22018](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Cannot parse decimal. Please ensure that the input is a valid number with optional decimal point or comma separators. + +### CANNOT_PARSE_INTERVAL + +SQLSTATE: none assigned + +Unable to parse ``. Please ensure that the value provided is in a valid format for defining an interval. You can reference the documentation for the correct format. If the issue persists, please double check that the input value is not null or empty and try again. + +### CANNOT_PARSE_JSON_FIELD + +[SQLSTATE: 2203G](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Cannot parse the field name `` and the value `` of the JSON token type `` to target Spark data type ``. + +### CANNOT_PARSE_PROTOBUF_DESCRIPTOR + +SQLSTATE: none assigned + +Error parsing descriptor bytes into Protobuf FileDescriptorSet. + +### CANNOT_PARSE_TIMESTAMP + +[SQLSTATE: 22007](sql-error-conditions-sqlstates.html#class-22-data-exception) + +``. If necessary set `` to "false" to bypass this error. + +### CANNOT_READ_FILE_FOOTER + +SQLSTATE: none assigned + +Could not read footer for file: ``. Please ensure that the file is in either ORC or Parquet format. If not, please convert it to a valid format. If the file is in the valid format, please check if it is corrupt. If it is, you can choose to either ignore it or fix the corruption. + +### CANNOT_RECOGNIZE_HIVE_TYPE + +[SQLSTATE: 429BB](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot recognize hive type string: ``, column: ``. The specified data type for the field cannot be recognized by Spark SQL. Please check the data type of the specified field and ensure that it is a valid Spark SQL data type. Refer to the Spark SQL documentation for a list of valid data types and their format. If the data type is correct, please ensure that you are using a supported version of Spark SQL. + +### CANNOT_RENAME_ACROSS_SCHEMA + +[SQLSTATE: 0AKD0](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +Renaming a `` across schemas is not allowed. + +### CANNOT_RESOLVE_STAR_EXPAND + +SQLSTATE: none assigned + +Cannot resolve ``.* given input columns ``. Please check that the specified table or struct exists and is accessible in the input columns. + +### CANNOT_RESTORE_PERMISSIONS_FOR_PATH + +SQLSTATE: none assigned + +Failed to set permissions on created path `` back to ``. + +### [CANNOT_UPDATE_FIELD](sql-error-conditions-cannot-update-field-error-class.html) + +SQLSTATE: none assigned + +Cannot update `
` field `` type: + +For more details see [CANNOT_UPDATE_FIELD](sql-error-conditions-cannot-update-field-error-class.html) + +### CANNOT_UP_CAST_DATATYPE + +SQLSTATE: none assigned + +Cannot up cast `` from `` to ``. +`
` + +### CAST_INVALID_INPUT + +[SQLSTATE: 22018](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The value `` of the type `` cannot be cast to `` because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set `` to "false" to bypass this error. + +### CAST_OVERFLOW + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The value `` of the type `` cannot be cast to `` due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set `` to "false" to bypass this error. + +### CAST_OVERFLOW_IN_TABLE_INSERT + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Fail to insert a value of `` type into the `` type column `` due to an overflow. Use `try_cast` on the input value to tolerate overflow and return NULL instead. + +### CODEC_NOT_AVAILABLE + +SQLSTATE: none assigned + +The codec `` is not available. Consider to set the config `` to ``. + +### CODEC_SHORT_NAME_NOT_FOUND + +SQLSTATE: none assigned + +Cannot find a short name for the codec ``. + +### COLUMN_ALIASES_IS_NOT_ALLOWED + +SQLSTATE: none assigned + +Columns aliases are not allowed in ``. + +### COLUMN_ALREADY_EXISTS + +[SQLSTATE: 42711](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The column `` already exists. Consider to choose another name or rename the existing column. + +### COLUMN_NOT_DEFINED_IN_TABLE + +SQLSTATE: none assigned + +`` column `` is not defined in table ``, defined table columns are: ``. + +### COLUMN_NOT_FOUND + +[SQLSTATE: 42703](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The column `` cannot be found. Verify the spelling and correctness of the column name according to the SQL config ``. + +### COMPARATOR_RETURNS_NULL + +SQLSTATE: none assigned + +The comparator has returned a NULL for a comparison between `` and ``. It should return a positive integer for "greater than", 0 for "equal" and a negative integer for "less than". To revert to deprecated behavior where NULL is treated as 0 (equal), you must set "spark.sql.legacy.allowNullComparisonResultInArraySort" to "true". + +### CONCURRENT_QUERY + +SQLSTATE: none assigned + +Another instance of this query was just started by a concurrent session. + +### CONCURRENT_STREAM_LOG_UPDATE + +SQLSTATE: 40000 + +Concurrent update to the log. Multiple streaming jobs detected for ``. +Please make sure only one streaming job runs on a specific checkpoint location at a time. + +### [CONNECT](sql-error-conditions-connect-error-class.html) + +SQLSTATE: none assigned + +Generic Spark Connect error. + +For more details see [CONNECT](sql-error-conditions-connect-error-class.html) + +### CONVERSION_INVALID_INPUT + +[SQLSTATE: 22018](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The value `` (``) cannot be converted to `` because it is malformed. Correct the value as per the syntax, or change its format. Use `` to tolerate malformed input and return NULL instead. + +### CREATE_PERMANENT_VIEW_WITHOUT_ALIAS + +SQLSTATE: none assigned + +Not allowed to create the permanent view `` without explicitly assigning an alias for the expression ``. + +### CREATE_TABLE_COLUMN_DESCRIPTOR_DUPLICATE + +[SQLSTATE: 42710](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +CREATE TABLE column `` specifies descriptor "``" more than once, which is invalid. + +### [CREATE_VIEW_COLUMN_ARITY_MISMATCH](sql-error-conditions-create-view-column-arity-mismatch-error-class.html) + +[SQLSTATE: 21S01](sql-error-conditions-sqlstates.html#class-21-cardinality-violation) + +Cannot create view ``, the reason is + +For more details see [CREATE_VIEW_COLUMN_ARITY_MISMATCH](sql-error-conditions-create-view-column-arity-mismatch-error-class.html) + +### [DATATYPE_MISMATCH](sql-error-conditions-datatype-mismatch-error-class.html) + +[SQLSTATE: 42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot resolve `` due to data type mismatch: + +For more details see [DATATYPE_MISMATCH](sql-error-conditions-datatype-mismatch-error-class.html) + +### DATATYPE_MISSING_SIZE + +[SQLSTATE: 42K01](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +DataType `` requires a length parameter, for example ``(10). Please specify the length. + +### DATA_SOURCE_NOT_FOUND + +[SQLSTATE: 42K02](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Failed to find the data source: ``. Please find packages at `https://spark.apache.org/third-party-projects.html`. + +### DATETIME_OVERFLOW + +[SQLSTATE: 22008](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Datetime operation overflow: ``. + +### DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Decimal precision `` exceeds max precision ``. + +### DEFAULT_DATABASE_NOT_EXISTS + +[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Default database `` does not exist, please create it first or change default database to ````. + +### DISTINCT_WINDOW_FUNCTION_UNSUPPORTED + +SQLSTATE: none assigned + +Distinct window functions are not supported: ``. + +### DIVIDE_BY_ZERO + +[SQLSTATE: 22012](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set `` to "false" to bypass this error. + +### DUPLICATED_FIELD_NAME_IN_ARROW_STRUCT + +SQLSTATE: none assigned + +Duplicated field names in Arrow Struct are not allowed, got ``. + +### DUPLICATED_MAP_KEY + +[SQLSTATE: 23505](sql-error-conditions-sqlstates.html#class-23-integrity-constraint-violation) + +Duplicate map key `` was found, please check the input data. If you want to remove the duplicated keys, you can set `` to "LAST_WIN" so that the key inserted at last takes precedence. + +### DUPLICATED_METRICS_NAME + +SQLSTATE: none assigned + +The metric name is not unique: ``. The same name cannot be used for metrics with different results. However multiple instances of metrics with with same result and name are allowed (e.g. self-joins). + +### DUPLICATE_CLAUSES + +SQLSTATE: none assigned + +Found duplicate clauses: ``. Please, remove one of them. + +### DUPLICATE_KEY + +[SQLSTATE: 23505](sql-error-conditions-sqlstates.html#class-23-integrity-constraint-violation) + +Found duplicate keys ``. + +### EMPTY_JSON_FIELD_VALUE + +[SQLSTATE: 42604](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Failed to parse an empty string for data type ``. + +### ENCODER_NOT_FOUND + +SQLSTATE: none assigned + +Not found an encoder of the type `` to Spark SQL internal representation. Consider to change the input type to one of supported at '``/sql-ref-datatypes.html'. + +### EVENT_TIME_IS_NOT_ON_TIMESTAMP_TYPE + +SQLSTATE: none assigned + +The event time `` has the invalid type ``, but expected "TIMESTAMP". + +### EXCEED_LIMIT_LENGTH + +SQLSTATE: none assigned + +Exceeds char/varchar type length limitation: ``. + +### EXPRESSION_TYPE_IS_NOT_ORDERABLE + +SQLSTATE: none assigned + +Column expression `` cannot be sorted because its type `` is not orderable. + +### FAILED_EXECUTE_UDF + +[SQLSTATE: 39000](sql-error-conditions-sqlstates.html#class-39-external-routine-invocation-exception) + +Failed to execute user defined function (``: (``) => ``). + +### FAILED_FUNCTION_CALL + +[SQLSTATE: 38000](sql-error-conditions-sqlstates.html#class-38-external-routine-exception) + +Failed preparing of the function `` for call. Please, double check function's arguments. + +### FAILED_PARSE_STRUCT_TYPE + +[SQLSTATE: 22018](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Failed parsing struct: ``. + +### FAILED_RENAME_PATH + +[SQLSTATE: 42K04](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Failed to rename `` to `` as destination already exists. + +### FAILED_RENAME_TEMP_FILE + +SQLSTATE: none assigned + +Failed to rename temp file `` to `` as FileSystem.rename returned false. + +### FIELDS_ALREADY_EXISTS + +SQLSTATE: none assigned + +Cannot `` column, because `` already exists in ``. + +### FIELD_NOT_FOUND + +[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +No such struct field `` in ``. + +### FORBIDDEN_OPERATION + +[SQLSTATE: 42809](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The operation `` is not allowed on the ``: ``. + +### GENERATED_COLUMN_WITH_DEFAULT_VALUE + +SQLSTATE: none assigned + +A column cannot have both a default value and a generation expression but column `` has default value: (``) and generation expression: (``). + +### GRAPHITE_SINK_INVALID_PROTOCOL + +SQLSTATE: none assigned + +Invalid Graphite protocol: ``. + +### GRAPHITE_SINK_PROPERTY_MISSING + +SQLSTATE: none assigned + +Graphite sink requires '``' property. + +### GROUPING_COLUMN_MISMATCH + +[SQLSTATE: 42803](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Column of grouping (``) can't be found in grouping columns ``. + +### GROUPING_ID_COLUMN_MISMATCH + +[SQLSTATE: 42803](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Columns of grouping_id (``) does not match grouping columns (``). + +### GROUPING_SIZE_LIMIT_EXCEEDED + +[SQLSTATE: 54000](sql-error-conditions-sqlstates.html#class-54-program-limit-exceeded) + +Grouping sets size cannot be greater than ``. + +### GROUP_BY_AGGREGATE + +[SQLSTATE: 42903](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Aggregate functions are not allowed in GROUP BY, but found ``. + +### GROUP_BY_POS_AGGREGATE + +[SQLSTATE: 42903](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +GROUP BY `` refers to an expression `` that contains an aggregate function. Aggregate functions are not allowed in GROUP BY. + +### GROUP_BY_POS_OUT_OF_RANGE + +[SQLSTATE: 42805](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +GROUP BY position `` is not in select list (valid range is [1, ``]). + +### GROUP_EXPRESSION_TYPE_IS_NOT_ORDERABLE + +SQLSTATE: none assigned + +The expression `` cannot be used as a grouping expression because its data type `` is not an orderable data type. + +### HLL_INVALID_INPUT_SKETCH_BUFFER + +SQLSTATE: none assigned + +Invalid call to ``; only valid HLL sketch buffers are supported as inputs (such as those produced by the `hll_sketch_agg` function). + +### HLL_INVALID_LG_K + +SQLSTATE: none assigned + +Invalid call to ``; the `lgConfigK` value must be between `` and ``, inclusive: ``. + +### HLL_UNION_DIFFERENT_LG_K + +SQLSTATE: none assigned + +Sketches have different `lgConfigK` values: `` and ``. Set the `allowDifferentLgConfigK` parameter to true to call `` with different `lgConfigK` values. + +### IDENTIFIER_TOO_MANY_NAME_PARTS + +[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +`` is not a valid identifier as it has more than 2 name parts. + +### INCOMPARABLE_PIVOT_COLUMN + +[SQLSTATE: 42818](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Invalid pivot column ``. Pivot columns must be comparable. + +### INCOMPATIBLE_COLUMN_TYPE + +[SQLSTATE: 42825](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +`` can only be performed on tables with compatible column types. The `` column of the `` table is `` type which is not compatible with `` at the same column of the first table.``. + +### INCOMPATIBLE_DATASOURCE_REGISTER + +SQLSTATE: none assigned + +Detected an incompatible DataSourceRegister. Please remove the incompatible library from classpath or upgrade it. Error: `` + +### [INCOMPATIBLE_DATA_FOR_TABLE](sql-error-conditions-incompatible-data-for-table-error-class.html) + +SQLSTATE: KD000 + +Cannot write incompatible data for the table ``: + +For more details see [INCOMPATIBLE_DATA_FOR_TABLE](sql-error-conditions-incompatible-data-for-table-error-class.html) + +### INCOMPATIBLE_JOIN_TYPES + +[SQLSTATE: 42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The join types `` and `` are incompatible. + +### INCOMPATIBLE_VIEW_SCHEMA_CHANGE + +SQLSTATE: none assigned + +The SQL query of view `` has an incompatible schema change and column `` cannot be resolved. Expected `` columns named `` but got ``. +Please try to re-create the view by running: ``. + +### [INCOMPLETE_TYPE_DEFINITION](sql-error-conditions-incomplete-type-definition-error-class.html) + +[SQLSTATE: 42K01](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Incomplete complex type: + +For more details see [INCOMPLETE_TYPE_DEFINITION](sql-error-conditions-incomplete-type-definition-error-class.html) + +### [INCONSISTENT_BEHAVIOR_CROSS_VERSION](sql-error-conditions-inconsistent-behavior-cross-version-error-class.html) + +[SQLSTATE: 42K0B](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +You may get a different result due to the upgrading to + +For more details see [INCONSISTENT_BEHAVIOR_CROSS_VERSION](sql-error-conditions-inconsistent-behavior-cross-version-error-class.html) + +### INCORRECT_END_OFFSET + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Max offset with `` rowsPerSecond is ``, but it's `` now. + +### INCORRECT_RAMP_UP_RATE + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Max offset with `` rowsPerSecond is ``, but 'rampUpTimeSeconds' is ``. + +### INDEX_ALREADY_EXISTS + +[SQLSTATE: 42710](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot create the index `` on table `` because it already exists. + +### INDEX_NOT_FOUND + +[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot find the index `` on table ``. + +### [INSERT_COLUMN_ARITY_MISMATCH](sql-error-conditions-insert-column-arity-mismatch-error-class.html) + +[SQLSTATE: 21S01](sql-error-conditions-sqlstates.html#class-21-cardinality-violation) + +Cannot write to ``, the reason is + +For more details see [INSERT_COLUMN_ARITY_MISMATCH](sql-error-conditions-insert-column-arity-mismatch-error-class.html) + +### INSERT_PARTITION_COLUMN_ARITY_MISMATCH + +[SQLSTATE: 21S01](sql-error-conditions-sqlstates.html#class-21-cardinality-violation) + +Cannot write to '``', ``: +Table columns: ``. +Partition columns with static values: ``. +Data columns: ``. + +### [INSUFFICIENT_TABLE_PROPERTY](sql-error-conditions-insufficient-table-property-error-class.html) + +SQLSTATE: none assigned + +Can't find table property: + +For more details see [INSUFFICIENT_TABLE_PROPERTY](sql-error-conditions-insufficient-table-property-error-class.html) + +### INTERNAL_ERROR + +[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) + +`` + +### INTERNAL_ERROR_BROADCAST + +[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) + +`` + +### INTERNAL_ERROR_EXECUTOR + +[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) + +`` + +### INTERNAL_ERROR_MEMORY + +[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) + +`` + +### INTERNAL_ERROR_NETWORK + +[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) + +`` + +### INTERNAL_ERROR_SHUFFLE + +[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) + +`` + +### INTERNAL_ERROR_STORAGE + +[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) + +`` + +### INTERVAL_ARITHMETIC_OVERFLOW + +[SQLSTATE: 22015](sql-error-conditions-sqlstates.html#class-22-data-exception) + +``.`` + +### INTERVAL_DIVIDED_BY_ZERO + +[SQLSTATE: 22012](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. + +### INVALID_ARRAY_INDEX + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The index `` is out of bounds. The array has `` elements. Use the SQL function `get()` to tolerate accessing element at invalid index and return NULL instead. If necessary set `` to "false" to bypass this error. + +### INVALID_ARRAY_INDEX_IN_ELEMENT_AT + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The index `` is out of bounds. The array has `` elements. Use `try_element_at` to tolerate accessing element at invalid index and return NULL instead. If necessary set `` to "false" to bypass this error. + +### INVALID_BITMAP_POSITION + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The 0-indexed bitmap position `` is out of bounds. The bitmap has `` bits (`` bytes). + +### [INVALID_BOUNDARY](sql-error-conditions-invalid-boundary-error-class.html) + +SQLSTATE: none assigned + +The boundary `` is invalid: ``. + +For more details see [INVALID_BOUNDARY](sql-error-conditions-invalid-boundary-error-class.html) + +### INVALID_BUCKET_FILE + +SQLSTATE: none assigned + +Invalid bucket file: ``. + +### INVALID_BYTE_STRING + +SQLSTATE: none assigned + +The expected format is ByteString, but was `` (``). + +### INVALID_COLUMN_NAME_AS_PATH + +[SQLSTATE: 46121](sql-error-conditions-sqlstates.html#class-46-java-ddl-1) + +The datasource `` cannot save the column `` because its name contains some characters that are not allowed in file paths. Please, use an alias to rename it. + +### INVALID_COLUMN_OR_FIELD_DATA_TYPE + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Column or field `` is of type `` while it's required to be ``. + +### [INVALID_DEFAULT_VALUE](sql-error-conditions-invalid-default-value-error-class.html) + +SQLSTATE: none assigned + +Failed to execute `` command because the destination table column `` has a DEFAULT value ``, + +For more details see [INVALID_DEFAULT_VALUE](sql-error-conditions-invalid-default-value-error-class.html) + +### INVALID_DRIVER_MEMORY + +SQLSTATE: F0000 + +System memory `` must be at least ``. Please increase heap size using the --driver-memory option or "``" in Spark configuration. + +### INVALID_EMPTY_LOCATION + +[SQLSTATE: 42K05](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The location name cannot be empty string, but ```` was given. + +### INVALID_ESC + +SQLSTATE: none assigned + +Found an invalid escape string: ``. The escape string must contain only one character. + +### INVALID_ESCAPE_CHAR + +SQLSTATE: none assigned + +`EscapeChar` should be a string literal of length one, but got ``. + +### INVALID_EXECUTOR_MEMORY + +SQLSTATE: F0000 + +Executor memory `` must be at least ``. Please increase executor memory using the --executor-memory option or "``" in Spark configuration. + +### INVALID_EXTRACT_BASE_FIELD_TYPE + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Can't extract a value from ``. Need a complex type [STRUCT, ARRAY, MAP] but got ``. + +### INVALID_EXTRACT_FIELD + +[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot extract `` from ``. + +### INVALID_EXTRACT_FIELD_TYPE + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Field name should be a non-null string literal, but it's ``. + +### INVALID_FIELD_NAME + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Field name `` is invalid: `` is not a struct. + +### [INVALID_FORMAT](sql-error-conditions-invalid-format-error-class.html) + +[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The format is invalid: ``. + +For more details see [INVALID_FORMAT](sql-error-conditions-invalid-format-error-class.html) + +### INVALID_FRACTION_OF_SECOND + +[SQLSTATE: 22023](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The fraction of sec must be zero. Valid range is [0, 60]. If necessary set `` to "false" to bypass this error. + +### INVALID_HIVE_COLUMN_NAME + +SQLSTATE: none assigned + +Cannot create the table `` having the nested column `` whose name contains invalid characters `` in Hive metastore. + +### INVALID_IDENTIFIER + +[SQLSTATE: 42602](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The identifier `` is invalid. Please, consider quoting it with back-quotes as ````. + +### INVALID_INDEX_OF_ZERO + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The index 0 is invalid. An index shall be either `< 0 or >` 0 (the first element has index 1). + +### [INVALID_INLINE_TABLE](sql-error-conditions-invalid-inline-table-error-class.html) + +SQLSTATE: none assigned + +Invalid inline table. + +For more details see [INVALID_INLINE_TABLE](sql-error-conditions-invalid-inline-table-error-class.html) + +### INVALID_JSON_ROOT_FIELD + +[SQLSTATE: 22032](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Cannot convert JSON root field to target Spark type. + +### INVALID_JSON_SCHEMA_MAP_TYPE + +[SQLSTATE: 22032](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Input schema `` can only contain STRING as a key type for a MAP. + +### [INVALID_LAMBDA_FUNCTION_CALL](sql-error-conditions-invalid-lambda-function-call-error-class.html) + +SQLSTATE: none assigned + +Invalid lambda function call. + +For more details see [INVALID_LAMBDA_FUNCTION_CALL](sql-error-conditions-invalid-lambda-function-call-error-class.html) + +### INVALID_LATERAL_JOIN_TYPE + +[SQLSTATE: 42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The `` JOIN with LATERAL correlation is not allowed because an OUTER subquery cannot correlate to its join partner. Remove the LATERAL correlation or use an INNER JOIN, or LEFT OUTER JOIN instead. + +### [INVALID_LIMIT_LIKE_EXPRESSION](sql-error-conditions-invalid-limit-like-expression-error-class.html) + +SQLSTATE: none assigned + +The limit like expression `` is invalid. + +For more details see [INVALID_LIMIT_LIKE_EXPRESSION](sql-error-conditions-invalid-limit-like-expression-error-class.html) + +### INVALID_NON_DETERMINISTIC_EXPRESSIONS + +SQLSTATE: none assigned + +The operator expects a deterministic expression, but the actual expression is ``. + +### INVALID_NUMERIC_LITERAL_RANGE + +SQLSTATE: none assigned + +Numeric literal `` is outside the valid range for `` with minimum value of `` and maximum value of ``. Please adjust the value accordingly. + +### [INVALID_OBSERVED_METRICS](sql-error-conditions-invalid-observed-metrics-error-class.html) + +SQLSTATE: none assigned + +Invalid observed metrics. + +For more details see [INVALID_OBSERVED_METRICS](sql-error-conditions-invalid-observed-metrics-error-class.html) + +### [INVALID_OPTIONS](sql-error-conditions-invalid-options-error-class.html) + +[SQLSTATE: 42K06](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Invalid options: + +For more details see [INVALID_OPTIONS](sql-error-conditions-invalid-options-error-class.html) + +### INVALID_PANDAS_UDF_PLACEMENT + +[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +The group aggregate pandas UDF `` cannot be invoked together with as other, non-pandas aggregate functions. + +### [INVALID_PARAMETER_VALUE](sql-error-conditions-invalid-parameter-value-error-class.html) + +[SQLSTATE: 22023](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The value of parameter(s) `` in `` is invalid: + +For more details see [INVALID_PARAMETER_VALUE](sql-error-conditions-invalid-parameter-value-error-class.html) + +### [INVALID_PARTITION_OPERATION](sql-error-conditions-invalid-partition-operation-error-class.html) + +SQLSTATE: none assigned + +The partition command is invalid. + +For more details see [INVALID_PARTITION_OPERATION](sql-error-conditions-invalid-partition-operation-error-class.html) + +### INVALID_PROPERTY_KEY + +[SQLSTATE: 42602](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +`` is an invalid property key, please use quotes, e.g. SET ``=``. + +### INVALID_PROPERTY_VALUE + +[SQLSTATE: 42602](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +`` is an invalid property value, please use quotes, e.g. SET ``=`` + +### [INVALID_SCHEMA](sql-error-conditions-invalid-schema-error-class.html) + +[SQLSTATE: 42K07](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The input schema `` is not a valid schema string. + +For more details see [INVALID_SCHEMA](sql-error-conditions-invalid-schema-error-class.html) + +### INVALID_SET_SYNTAX + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Expected format is 'SET', 'SET key', or 'SET key=value'. If you want to include special characters in key, or include semicolon in value, please use backquotes, e.g., SET `key`=`value`. + +### INVALID_SQL_ARG + +SQLSTATE: none assigned + +The argument `` of `sql()` is invalid. Consider to replace it by a SQL literal. + +### [INVALID_SQL_SYNTAX](sql-error-conditions-invalid-sql-syntax-error-class.html) + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Invalid SQL syntax: + +For more details see [INVALID_SQL_SYNTAX](sql-error-conditions-invalid-sql-syntax-error-class.html) + +### [INVALID_SUBQUERY_EXPRESSION](sql-error-conditions-invalid-subquery-expression-error-class.html) + +[SQLSTATE: 42823](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Invalid subquery: + +For more details see [INVALID_SUBQUERY_EXPRESSION](sql-error-conditions-invalid-subquery-expression-error-class.html) + +### INVALID_TEMP_OBJ_REFERENCE + +SQLSTATE: none assigned + +Cannot create the persistent object `` of the type `` because it references to the temporary object `` of the type ``. Please make the temporary object `` persistent, or make the persistent object `` temporary. + +### [INVALID_TIME_TRAVEL_TIMESTAMP_EXPR](sql-error-conditions-invalid-time-travel-timestamp-expr-error-class.html) + +SQLSTATE: none assigned + +The time travel timestamp expression `` is invalid. + +For more details see [INVALID_TIME_TRAVEL_TIMESTAMP_EXPR](sql-error-conditions-invalid-time-travel-timestamp-expr-error-class.html) + +### INVALID_TYPED_LITERAL + +[SQLSTATE: 42604](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The value of the typed literal `` is invalid: ``. + +### INVALID_UDF_IMPLEMENTATION + +SQLSTATE: none assigned + +Function `` does not implement ScalarFunction or AggregateFunction. + +### INVALID_URL + +SQLSTATE: none assigned + +The url is invalid: ``. If necessary set `` to "false" to bypass this error. + +### INVALID_USAGE_OF_STAR_OR_REGEX + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Invalid usage of `` in ``. + +### INVALID_VIEW_TEXT + +SQLSTATE: none assigned + +The view `` cannot be displayed due to invalid view text: ``. This may be caused by an unauthorized modification of the view or an incorrect query syntax. Please check your query syntax and verify that the view has not been tampered with. + +### INVALID_WHERE_CONDITION + +[SQLSTATE: 42903](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The WHERE condition `` contains invalid expressions: ``. +Rewrite the query to avoid window functions, aggregate functions, and generator functions in the WHERE clause. + +### INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC + +SQLSTATE: none assigned + +Cannot specify ORDER BY or a window frame for ``. + +### [INVALID_WRITE_DISTRIBUTION](sql-error-conditions-invalid-write-distribution-error-class.html) + +SQLSTATE: none assigned + +The requested write distribution is invalid. + +For more details see [INVALID_WRITE_DISTRIBUTION](sql-error-conditions-invalid-write-distribution-error-class.html) + +### JOIN_CONDITION_IS_NOT_BOOLEAN_TYPE + +SQLSTATE: none assigned + +The join condition `` has the invalid type ``, expected "BOOLEAN". + +### LOAD_DATA_PATH_NOT_EXISTS + +SQLSTATE: none assigned + +LOAD DATA input path does not exist: ``. + +### LOCAL_MUST_WITH_SCHEMA_FILE + +SQLSTATE: none assigned + +LOCAL must be used together with the schema of `file`, but got: ````. + +### LOCATION_ALREADY_EXISTS + +[SQLSTATE: 42710](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot name the managed table as ``, as its associated location `` already exists. Please pick a different table name, or remove the existing location first. + +### MALFORMED_CSV_RECORD + +SQLSTATE: none assigned + +Malformed CSV record: `` + +### MALFORMED_PROTOBUF_MESSAGE + +SQLSTATE: none assigned + +Malformed Protobuf messages are detected in message deserialization. Parse Mode: ``. To process malformed protobuf message as null result, try setting the option 'mode' as 'PERMISSIVE'. + +### [MALFORMED_RECORD_IN_PARSING](sql-error-conditions-malformed-record-in-parsing-error-class.html) + +[SQLSTATE: 22023](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Malformed records are detected in record parsing: ``. +Parse Mode: ``. To process malformed records as null result, try setting the option 'mode' as 'PERMISSIVE'. + +For more details see [MALFORMED_RECORD_IN_PARSING](sql-error-conditions-malformed-record-in-parsing-error-class.html) + +### MERGE_CARDINALITY_VIOLATION + +[SQLSTATE: 23K01](sql-error-conditions-sqlstates.html#class-23-integrity-constraint-violation) + +The ON search condition of the MERGE statement matched a single row from the target table with multiple rows of the source table. +This could result in the target row being operated on more than once with an update or delete operation and is not allowed. + +### MISSING_AGGREGATION + +[SQLSTATE: 42803](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The non-aggregating expression `` is based on columns which are not participating in the GROUP BY clause. +Add the columns or the expression to the GROUP BY, aggregate the expression, or use `` if you do not care which of the values within a group is returned. + +### [MISSING_ATTRIBUTES](sql-error-conditions-missing-attributes-error-class.html) + +SQLSTATE: none assigned + +Resolved attribute(s) `` missing from `` in operator ``. + +For more details see [MISSING_ATTRIBUTES](sql-error-conditions-missing-attributes-error-class.html) + +### MISSING_GROUP_BY + +[SQLSTATE: 42803](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The query does not include a GROUP BY clause. Add GROUP BY or turn it into the window functions using OVER clauses. + +### MULTI_SOURCES_UNSUPPORTED_FOR_EXPRESSION + +SQLSTATE: none assigned + +The expression `` does not support more than one source. + +### MULTI_UDF_INTERFACE_ERROR + +SQLSTATE: none assigned + +Not allowed to implement multiple UDF interfaces, UDF class ``. + +### NAMED_ARGUMENTS_SUPPORT_DISABLED + +SQLSTATE: none assigned + +Cannot call function `` because named argument references are not enabled here. In this case, the named argument reference was ``. Set "spark.sql.allowNamedFunctionArguments" to "true" to turn on feature. + +### NESTED_AGGREGATE_FUNCTION + +[SQLSTATE: 42607](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query. + +### NON_LAST_MATCHED_CLAUSE_OMIT_CONDITION + +[SQLSTATE: 42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +When there are more than one MATCHED clauses in a MERGE statement, only the last MATCHED clause can omit the condition. + +### NON_LAST_NOT_MATCHED_BY_SOURCE_CLAUSE_OMIT_CONDITION + +[SQLSTATE: 42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +When there are more than one NOT MATCHED BY SOURCE clauses in a MERGE statement, only the last NOT MATCHED BY SOURCE clause can omit the condition. + +### NON_LAST_NOT_MATCHED_BY_TARGET_CLAUSE_OMIT_CONDITION + +[SQLSTATE: 42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +When there are more than one NOT MATCHED [BY TARGET] clauses in a MERGE statement, only the last NOT MATCHED [BY TARGET] clause can omit the condition. + +### NON_LITERAL_PIVOT_VALUES + +[SQLSTATE: 42K08](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Literal expressions required for pivot values, found ``. + +### NON_PARTITION_COLUMN + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +PARTITION clause cannot contain the non-partition column: ``. + +### NON_TIME_WINDOW_NOT_SUPPORTED_IN_STREAMING + +SQLSTATE: none assigned + +Window function is not supported in `` (as column ``) on streaming DataFrames/Datasets. Structured Streaming only supports time-window aggregation using the WINDOW function. (window specification: ``) + +### [NOT_ALLOWED_IN_FROM](sql-error-conditions-not-allowed-in-from-error-class.html) + +SQLSTATE: none assigned + +Not allowed in the FROM clause: + +For more details see [NOT_ALLOWED_IN_FROM](sql-error-conditions-not-allowed-in-from-error-class.html) + +### [NOT_A_CONSTANT_STRING](sql-error-conditions-not-a-constant-string-error-class.html) + +[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The expression `` used for the routine or clause `` must be a constant STRING which is NOT NULL. + +For more details see [NOT_A_CONSTANT_STRING](sql-error-conditions-not-a-constant-string-error-class.html) + +### NOT_A_PARTITIONED_TABLE + +SQLSTATE: none assigned + +Operation `` is not allowed for `` because it is not a partitioned table. + +### [NOT_NULL_CONSTRAINT_VIOLATION](sql-error-conditions-not-null-constraint-violation-error-class.html) + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Assigning a NULL is not allowed here. + +For more details see [NOT_NULL_CONSTRAINT_VIOLATION](sql-error-conditions-not-null-constraint-violation-error-class.html) + +### NOT_SUPPORTED_CHANGE_COLUMN + +SQLSTATE: none assigned + +ALTER TABLE ALTER/CHANGE COLUMN is not supported for changing `
`'s column `` with type `` to `` with type ``. + +### NOT_SUPPORTED_COMMAND_FOR_V2_TABLE + +[SQLSTATE: 46110](sql-error-conditions-sqlstates.html#class-46-java-ddl-1) + +`` is not supported for v2 tables. + +### NOT_SUPPORTED_COMMAND_WITHOUT_HIVE_SUPPORT + +SQLSTATE: none assigned + +`` is not supported, if you want to enable it, please set "spark.sql.catalogImplementation" to "hive". + +### [NOT_SUPPORTED_IN_JDBC_CATALOG](sql-error-conditions-not-supported-in-jdbc-catalog-error-class.html) + +[SQLSTATE: 46110](sql-error-conditions-sqlstates.html#class-46-java-ddl-1) + +Not supported command in JDBC catalog: + +For more details see [NOT_SUPPORTED_IN_JDBC_CATALOG](sql-error-conditions-not-supported-in-jdbc-catalog-error-class.html) + +### NO_DEFAULT_COLUMN_VALUE_AVAILABLE + +[SQLSTATE: 42608](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Can't determine the default value for `` since it is not nullable and it has no default value. + +### NO_HANDLER_FOR_UDAF + +SQLSTATE: none assigned + +No handler for UDAF '``'. Use sparkSession.udf.register(...) instead. + +### NO_SQL_TYPE_IN_PROTOBUF_SCHEMA + +SQLSTATE: none assigned + +Cannot find `` in Protobuf schema. + +### NO_UDF_INTERFACE + +SQLSTATE: none assigned + +UDF class `` doesn't implement any UDF interface. + +### NULLABLE_COLUMN_OR_FIELD + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Column or field `` is nullable while it's required to be non-nullable. + +### NULLABLE_ROW_ID_ATTRIBUTES + +[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Row ID attributes cannot be nullable: ``. + +### NULL_MAP_KEY + +[SQLSTATE: 2200E](sql-error-conditions-sqlstates.html#class-22-data-exception) + +Cannot use null as map key. + +### NUMERIC_OUT_OF_SUPPORTED_RANGE + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The value `` cannot be interpreted as a numeric since it has more than 38 digits. + +### NUMERIC_VALUE_OUT_OF_RANGE + +[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) + +`` cannot be represented as Decimal(``, ``). If necessary set `` to "false" to bypass this error, and return NULL instead. + +### NUM_COLUMNS_MISMATCH + +[SQLSTATE: 42826](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +`` can only be performed on inputs with the same number of columns, but the first input has `` columns and the `` input has `` columns. + +### NUM_TABLE_VALUE_ALIASES_MISMATCH + +SQLSTATE: none assigned + +Number of given aliases does not match number of output columns. Function name: ``; number of aliases: ``; number of output columns: ``. + +### OPERATION_CANCELED + +[SQLSTATE: HY008](sql-error-conditions-sqlstates.html#class-HY-cli-specific-condition) + +Operation has been canceled. + +### ORDER_BY_POS_OUT_OF_RANGE + +[SQLSTATE: 42805](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +ORDER BY position `` is not in select list (valid range is [1, ``]). + +### PARSE_EMPTY_STATEMENT + +[SQLSTATE: 42617](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Syntax error, unexpected empty statement. + +### PARSE_SYNTAX_ERROR + +[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Syntax error at or near ````. + +### PARTITIONS_ALREADY_EXIST + +[SQLSTATE: 428FT](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot ADD or RENAME TO partition(s) `` in table `` because they already exist. +Choose a different name, drop the existing partition, or add the IF NOT EXISTS clause to tolerate a pre-existing partition. + +### PARTITIONS_NOT_FOUND + +[SQLSTATE: 428FT](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The partition(s) `` cannot be found in table ``. +Verify the partition specification and table name. +To tolerate the error on drop use ALTER TABLE … DROP IF EXISTS PARTITION. + +### PATH_ALREADY_EXISTS + +[SQLSTATE: 42K04](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Path `` already exists. Set mode as "overwrite" to overwrite the existing path. + +### PATH_NOT_FOUND + +[SQLSTATE: 42K03](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Path does not exist: ``. + +### PIVOT_VALUE_DATA_TYPE_MISMATCH + +[SQLSTATE: 42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Invalid pivot value '``': value data type `` does not match pivot column data type ``. + +### PLAN_VALIDATION_FAILED_RULE_EXECUTOR + +SQLSTATE: none assigned + +The input plan of `` is invalid: `` + +### PLAN_VALIDATION_FAILED_RULE_IN_BATCH + +SQLSTATE: none assigned + +Rule `` in batch `` generated an invalid plan: `` + +### PROTOBUF_DEPENDENCY_NOT_FOUND + +SQLSTATE: none assigned + +Could not find dependency: ``. + +### PROTOBUF_DESCRIPTOR_FILE_NOT_FOUND + +SQLSTATE: none assigned + +Error reading Protobuf descriptor file at path: ``. + +### PROTOBUF_FIELD_MISSING + +SQLSTATE: none assigned + +Searching for `` in Protobuf schema at `` gave `` matches. Candidates: ``. + +### PROTOBUF_FIELD_MISSING_IN_SQL_SCHEMA + +SQLSTATE: none assigned + +Found `` in Protobuf schema but there is no match in the SQL schema. + +### PROTOBUF_FIELD_TYPE_MISMATCH + +SQLSTATE: none assigned + +Type mismatch encountered for field: ``. + +### PROTOBUF_MESSAGE_NOT_FOUND + +SQLSTATE: none assigned + +Unable to locate Message `` in Descriptor. + +### PROTOBUF_TYPE_NOT_SUPPORT + +SQLSTATE: none assigned + +Protobuf type not yet supported: ``. + +### RECURSIVE_PROTOBUF_SCHEMA + +SQLSTATE: none assigned + +Found recursive reference in Protobuf schema, which can not be processed by Spark by default: ``. try setting the option `recursive.fields.max.depth` 0 to 10. Going beyond 10 levels of recursion is not allowed. + +### RECURSIVE_VIEW + +SQLSTATE: none assigned + +Recursive view `` detected (cycle: ``). + +### REF_DEFAULT_VALUE_IS_NOT_ALLOWED_IN_PARTITION + +SQLSTATE: none assigned + +References to DEFAULT column values are not allowed within the PARTITION clause. + +### RENAME_SRC_PATH_NOT_FOUND + +[SQLSTATE: 42K03](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Failed to rename as `` was not found. + +### REPEATED_CLAUSE + +[SQLSTATE: 42614](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The `` clause may be used at most once per `` operation. + +### REQUIRES_SINGLE_PART_NAMESPACE + +[SQLSTATE: 42K05](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +`` requires a single-part namespace, but got ``. + +### ROUTINE_ALREADY_EXISTS + +[SQLSTATE: 42723](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot create the function `` because it already exists. +Choose a different name, drop or replace the existing function, or add the IF NOT EXISTS clause to tolerate a pre-existing function. + +### ROUTINE_NOT_FOUND + +[SQLSTATE: 42883](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The function `` cannot be found. Verify the spelling and correctness of the schema and catalog. +If you did not qualify the name with a schema and catalog, verify the current_schema() output, or qualify the name with the correct schema and catalog. +To tolerate the error on drop use DROP FUNCTION IF EXISTS. + +### SCALAR_SUBQUERY_IS_IN_GROUP_BY_OR_AGGREGATE_FUNCTION + +SQLSTATE: none assigned + +The correlated scalar subquery '``' is neither present in GROUP BY, nor in an aggregate function. Add it to GROUP BY using ordinal position or wrap it in `first()` (or `first_value`) if you don't care which value you get. + +### SCALAR_SUBQUERY_TOO_MANY_ROWS + +[SQLSTATE: 21000](sql-error-conditions-sqlstates.html#class-21-cardinality-violation) + +More than one row returned by a subquery used as an expression. + +### SCHEMA_ALREADY_EXISTS + +[SQLSTATE: 42P06](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot create schema `` because it already exists. +Choose a different name, drop the existing schema, or add the IF NOT EXISTS clause to tolerate pre-existing schema. + +### SCHEMA_NOT_EMPTY + +[SQLSTATE: 2BP01](sql-error-conditions-sqlstates.html#class-2B-dependent-privilege-descriptors-still-exist) + +Cannot drop a schema `` because it contains objects. +Use DROP SCHEMA ... CASCADE to drop the schema and all its objects. + +### SCHEMA_NOT_FOUND + +[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The schema `` cannot be found. Verify the spelling and correctness of the schema and catalog. +If you did not qualify the name with a catalog, verify the current_schema() output, or qualify the name with the correct catalog. +To tolerate the error on drop use DROP SCHEMA IF EXISTS. + +### SECOND_FUNCTION_ARGUMENT_NOT_INTEGER + +[SQLSTATE: 22023](sql-error-conditions-sqlstates.html#class-22-data-exception) + +The second argument of `` function needs to be an integer. + +### SEED_EXPRESSION_IS_UNFOLDABLE + +SQLSTATE: none assigned + +The seed expression `` of the expression `` must be foldable. + +### SORT_BY_WITHOUT_BUCKETING + +SQLSTATE: none assigned + +sortBy must be used together with bucketBy. + +### SPECIFY_BUCKETING_IS_NOT_ALLOWED + +SQLSTATE: none assigned + +Cannot specify bucketing information if the table schema is not specified when creating and will be inferred at runtime. + +### SPECIFY_PARTITION_IS_NOT_ALLOWED + +SQLSTATE: none assigned + +It is not allowed to specify partition columns when the table schema is not defined. When the table schema is not provided, schema and partition columns will be inferred. + +### SQL_CONF_NOT_FOUND + +SQLSTATE: none assigned + +The SQL config `` cannot be found. Please verify that the config exists. + +### STAR_GROUP_BY_POS + +[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +Star (*) is not allowed in a select list when GROUP BY an ordinal position is used. + +### STATIC_PARTITION_COLUMN_IN_INSERT_COLUMN_LIST + +SQLSTATE: none assigned + +Static partition column `` is also specified in the column list. + +### STREAM_FAILED + +SQLSTATE: none assigned + +Query [id = ``, runId = ``] terminated with exception: `` + +### SUM_OF_LIMIT_AND_OFFSET_EXCEEDS_MAX_INT + +SQLSTATE: none assigned + +The sum of the LIMIT clause and the OFFSET clause must not be greater than the maximum 32-bit integer value (2,147,483,647) but found limit = ``, offset = ``. + +### TABLE_OR_VIEW_ALREADY_EXISTS + +[SQLSTATE: 42P07](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot create table or view `` because it already exists. +Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects. + +### TABLE_OR_VIEW_NOT_FOUND + +[SQLSTATE: 42P01](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The table or view `` cannot be found. Verify the spelling and correctness of the schema and catalog. +If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog. +To tolerate the error on drop use DROP VIEW IF EXISTS or DROP TABLE IF EXISTS. + +### TABLE_VALUED_FUNCTION_TOO_MANY_TABLE_ARGUMENTS + +SQLSTATE: none assigned + +There are too many table arguments for table-valued function. It allows one table argument, but got: ``. If you want to allow it, please set "spark.sql.allowMultipleTableArguments.enabled" to "true" + +### TASK_WRITE_FAILED + +SQLSTATE: none assigned + +Task failed while writing rows to ``. + +### TEMP_TABLE_OR_VIEW_ALREADY_EXISTS + +[SQLSTATE: 42P07](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot create the temporary view `` because it already exists. +Choose a different name, drop or replace the existing view, or add the IF NOT EXISTS clause to tolerate pre-existing views. + +### TEMP_VIEW_NAME_TOO_MANY_NAME_PARTS + +[SQLSTATE: 428EK](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +CREATE TEMPORARY VIEW or the corresponding Dataset APIs only accept single-part view names, but got: ``. + +### TOO_MANY_ARRAY_ELEMENTS + +[SQLSTATE: 54000](sql-error-conditions-sqlstates.html#class-54-program-limit-exceeded) + +Cannot initialize array with `` elements of size ``. + +### UDTF_ALIAS_NUMBER_MISMATCH + +SQLSTATE: none assigned + +The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF. Expected `` aliases, but got ``. Please ensure that the number of aliases provided matches the number of columns output by the UDTF. + +### UNABLE_TO_ACQUIRE_MEMORY + +[SQLSTATE: 53200](sql-error-conditions-sqlstates.html#class-53-insufficient-resources) + +Unable to acquire `` bytes of memory, got ``. + +### UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE + +SQLSTATE: none assigned + +Unable to convert SQL type `` to Protobuf type ``. + +### UNABLE_TO_INFER_SCHEMA + +[SQLSTATE: 42KD9](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Unable to infer schema for ``. It must be specified manually. + +### UNBOUND_SQL_PARAMETER + +[SQLSTATE: 42P02](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Found the unbound parameter: ``. Please, fix `args` and provide a mapping of the parameter to a SQL literal. + +### UNCLOSED_BRACKETED_COMMENT + +[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Found an unclosed bracketed comment. Please, append */ at the end of the comment. + +### UNEXPECTED_INPUT_TYPE + +[SQLSTATE: 42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Parameter `` of function `` requires the `` type, however `` has the type ``. + +### UNKNOWN_PROTOBUF_MESSAGE_TYPE + +SQLSTATE: none assigned + +Attempting to treat `` as a Message, but it was ``. + +### UNPIVOT_REQUIRES_ATTRIBUTES + +[SQLSTATE: 42K0A](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +UNPIVOT requires all given `` expressions to be columns when no `` expressions are given. These are not columns: [``]. + +### UNPIVOT_REQUIRES_VALUE_COLUMNS + +[SQLSTATE: 42K0A](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +At least one value column needs to be specified for UNPIVOT, all columns specified as ids. + +### UNPIVOT_VALUE_DATA_TYPE_MISMATCH + +[SQLSTATE: 42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Unpivot value columns must share a least common type, some types do not: [``]. + +### UNPIVOT_VALUE_SIZE_MISMATCH + +[SQLSTATE: 428C4](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +All unpivot value columns must have the same size as there are value column names (``). + +### UNRECOGNIZED_SQL_TYPE + +[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Unrecognized SQL type - name: ``, id: ``. + +### UNRESOLVABLE_TABLE_VALUED_FUNCTION + +SQLSTATE: none assigned + +Could not resolve `` to a table-valued function. Please make sure that `` is defined as a table-valued function and that all required parameters are provided correctly. If `` is not defined, please create the table-valued function before using it. For more information about defining table-valued functions, please refer to the Apache Spark documentation. + +### UNRESOLVED_ALL_IN_GROUP_BY + +[SQLSTATE: 42803](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot infer grouping columns for GROUP BY ALL based on the select clause. Please explicitly specify the grouping columns. + +### [UNRESOLVED_COLUMN](sql-error-conditions-unresolved-column-error-class.html) + +[SQLSTATE: 42703](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +A column or function parameter with name `` cannot be resolved. + +For more details see [UNRESOLVED_COLUMN](sql-error-conditions-unresolved-column-error-class.html) + +### [UNRESOLVED_FIELD](sql-error-conditions-unresolved-field-error-class.html) + +[SQLSTATE: 42703](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +A field with name `` cannot be resolved with the struct-type column ``. + +For more details see [UNRESOLVED_FIELD](sql-error-conditions-unresolved-field-error-class.html) + +### [UNRESOLVED_MAP_KEY](sql-error-conditions-unresolved-map-key-error-class.html) + +[SQLSTATE: 42703](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot resolve column `` as a map key. If the key is a string literal, add the single quotes '' around it. + +For more details see [UNRESOLVED_MAP_KEY](sql-error-conditions-unresolved-map-key-error-class.html) + +### UNRESOLVED_ROUTINE + +[SQLSTATE: 42883](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot resolve function `` on search path ``. + +### UNRESOLVED_USING_COLUMN_FOR_JOIN + +[SQLSTATE: 42703](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +USING column `` cannot be resolved on the `` side of the join. The ``-side columns: [``]. + +### UNSET_NONEXISTENT_PROPERTIES + +SQLSTATE: none assigned + +Attempted to unset non-existent properties [``] in table `
`. + +### [UNSUPPORTED_ADD_FILE](sql-error-conditions-unsupported-add-file-error-class.html) + +SQLSTATE: none assigned + +Don't support add file. + +For more details see [UNSUPPORTED_ADD_FILE](sql-error-conditions-unsupported-add-file-error-class.html) + +### UNSUPPORTED_ARROWTYPE + +[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +Unsupported arrow type ``. + +### UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING + +SQLSTATE: none assigned + +The char/varchar type can't be used in the table schema. If you want Spark treat them as string type as same as Spark 3.0 and earlier, please set "spark.sql.legacy.charVarcharAsString" to "true". + +### UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY + +SQLSTATE: none assigned + +Unsupported data source type for direct query on files: `` + +### UNSUPPORTED_DATATYPE + +[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +Unsupported data type ``. + +### UNSUPPORTED_DATA_SOURCE_FOR_DIRECT_QUERY + +SQLSTATE: none assigned + +The direct query on files does not support the data source type: ``. Please try a different data source type or consider using a different query method. + +### UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE + +SQLSTATE: none assigned + +The `` datasource doesn't support the column `` of the type ``. + +### [UNSUPPORTED_DEFAULT_VALUE](sql-error-conditions-unsupported-default-value-error-class.html) + +SQLSTATE: none assigned + +DEFAULT column values is not supported. + +For more details see [UNSUPPORTED_DEFAULT_VALUE](sql-error-conditions-unsupported-default-value-error-class.html) + +### [UNSUPPORTED_DESERIALIZER](sql-error-conditions-unsupported-deserializer-error-class.html) + +[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +The deserializer is not supported: + +For more details see [UNSUPPORTED_DESERIALIZER](sql-error-conditions-unsupported-deserializer-error-class.html) + +### UNSUPPORTED_EXPRESSION_GENERATED_COLUMN + +SQLSTATE: none assigned + +Cannot create generated column `` with generation expression `` because ``. + +### UNSUPPORTED_EXPR_FOR_OPERATOR + +SQLSTATE: none assigned + +A query operator contains one or more unsupported expressions. Consider to rewrite it to avoid window functions, aggregate functions, and generator functions in the WHERE clause. +Invalid expressions: [``] + +### UNSUPPORTED_EXPR_FOR_WINDOW + +[SQLSTATE: 42P20](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Expression `` not supported within a window function. + +### [UNSUPPORTED_FEATURE](sql-error-conditions-unsupported-feature-error-class.html) + +[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +The feature is not supported: + +For more details see [UNSUPPORTED_FEATURE](sql-error-conditions-unsupported-feature-error-class.html) + +### [UNSUPPORTED_GENERATOR](sql-error-conditions-unsupported-generator-error-class.html) + +[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +The generator is not supported: + +For more details see [UNSUPPORTED_GENERATOR](sql-error-conditions-unsupported-generator-error-class.html) + +### UNSUPPORTED_GROUPING_EXPRESSION + +SQLSTATE: none assigned + +grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup. + +### [UNSUPPORTED_INSERT](sql-error-conditions-unsupported-insert-error-class.html) + +SQLSTATE: none assigned + +Can't insert into the target. + +For more details see [UNSUPPORTED_INSERT](sql-error-conditions-unsupported-insert-error-class.html) + +### [UNSUPPORTED_MERGE_CONDITION](sql-error-conditions-unsupported-merge-condition-error-class.html) + +SQLSTATE: none assigned + +MERGE operation contains unsupported `` condition. + +For more details see [UNSUPPORTED_MERGE_CONDITION](sql-error-conditions-unsupported-merge-condition-error-class.html) + +### [UNSUPPORTED_OVERWRITE](sql-error-conditions-unsupported-overwrite-error-class.html) + +SQLSTATE: none assigned + +Can't overwrite the target that is also being read from. + +For more details see [UNSUPPORTED_OVERWRITE](sql-error-conditions-unsupported-overwrite-error-class.html) + +### [UNSUPPORTED_SAVE_MODE](sql-error-conditions-unsupported-save-mode-error-class.html) + +SQLSTATE: none assigned + +The save mode `` is not supported for: + +For more details see [UNSUPPORTED_SAVE_MODE](sql-error-conditions-unsupported-save-mode-error-class.html) + +### [UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY](sql-error-conditions-unsupported-subquery-expression-category-error-class.html) + +[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +Unsupported subquery expression: + +For more details see [UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY](sql-error-conditions-unsupported-subquery-expression-category-error-class.html) + +### UNSUPPORTED_TYPED_LITERAL + +[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) + +Literals of the type `` are not supported. Supported types are ``. + +### UNTYPED_SCALA_UDF + +SQLSTATE: none assigned + +You're using untyped Scala UDF, which does not have the input type information. Spark may blindly pass null to the Scala closure with primitive-type argument, and the closure will see the default value of the Java type for the null argument, e.g. `udf((x: Int) => x, IntegerType)`, the result is 0 for null input. To get rid of this error, you could: +1. use typed Scala UDF APIs(without return type parameter), e.g. `udf((x: Int) => x)`. +2. use Java UDF APIs, e.g. `udf(new UDF1[String, Integer] { override def call(s: String): Integer = s.length() }, IntegerType)`, if input types are all non primitive. +3. set "spark.sql.legacy.allowUntypedScalaUDF" to "true" and use this API with caution. + +### VIEW_ALREADY_EXISTS + +[SQLSTATE: 42P07](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +Cannot create view `` because it already exists. +Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects. + +### VIEW_NOT_FOUND + +[SQLSTATE: 42P01](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The view `` cannot be found. Verify the spelling and correctness of the schema and catalog. +If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog. +To tolerate the error on drop use DROP VIEW IF EXISTS. + +### WINDOW_FUNCTION_AND_FRAME_MISMATCH + +SQLSTATE: none assigned + +`` function can only be evaluated in an ordered row-based window frame with a single offset: ``. + +### WINDOW_FUNCTION_WITHOUT_OVER_CLAUSE + +SQLSTATE: none assigned + +Window function `` requires an OVER clause. + +### WRITE_STREAM_NOT_ALLOWED + +SQLSTATE: none assigned + +`writeStream` can be called only on streaming Dataset/DataFrame. + +### WRONG_COMMAND_FOR_OBJECT_TYPE + +SQLSTATE: none assigned + +The operation `` requires a ``. But `` is a ``. Use `` instead. + +### [WRONG_NUM_ARGS](sql-error-conditions-wrong-num-args-error-class.html) + +[SQLSTATE: 42605](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) + +The `` requires `` parameters but the actual number is ``. + +For more details see [WRONG_NUM_ARGS](sql-error-conditions-wrong-num-args-error-class.html) + + diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala index a9bda2e0b7c99..558579cdb80ac 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala @@ -29,7 +29,7 @@ import org.apache.spark.sql.catalyst.FunctionIdentifier import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.expressions.aggregate._ import org.apache.spark.sql.catalyst.expressions.xml._ -import org.apache.spark.sql.catalyst.plans.logical.{Generate, LogicalPlan, OneRowRelation, Range} +import org.apache.spark.sql.catalyst.plans.logical.{FunctionBuilderBase, Generate, LogicalPlan, OneRowRelation, Range} import org.apache.spark.sql.catalyst.trees.TreeNodeTag import org.apache.spark.sql.errors.QueryCompilationErrors import org.apache.spark.sql.types._ @@ -358,8 +358,8 @@ object FunctionRegistry { // misc non-aggregate functions expression[Abs]("abs"), expression[Coalesce]("coalesce"), - expression[Explode]("explode"), - expressionGeneratorOuter[Explode]("explode_outer"), + expressionBuilder("explode", ExplodeExpressionBuilder), + expressionGeneratorBuilderOuter("explode_outer", ExplodeExpressionBuilder), expression[Greatest]("greatest"), expression[If]("if"), expression[Inline]("inline"), @@ -491,7 +491,7 @@ object FunctionRegistry { expression[CollectList]("collect_list"), expression[CollectList]("array_agg", true, Some("3.3.0")), expression[CollectSet]("collect_set"), - expression[CountMinSketchAgg]("count_min_sketch"), + expressionBuilder("count_min_sketch", CountMinSketchAggExpressionBuilder), expression[BoolAnd]("every", true), expression[BoolAnd]("bool_and"), expression[BoolOr]("any", true), @@ -823,7 +823,7 @@ object FunctionRegistry { castAlias("string", StringType), // mask functions - expression[Mask]("mask"), + expressionBuilder("mask", MaskExpressionBuilder), // csv expression[CsvToStructs]("from_csv"), @@ -887,6 +887,9 @@ object FunctionRegistry { since: Option[String] = None): (String, (ExpressionInfo, FunctionBuilder)) = { val (expressionInfo, builder) = FunctionRegistryBase.build[T](name, since) val newBuilder = (expressions: Seq[Expression]) => { + if (expressions.exists(_.isInstanceOf[NamedArgumentExpression])) { + throw QueryCompilationErrors.namedArgumentsNotSupported(name) + } val expr = builder(expressions) if (setAlias) expr.setTagValue(FUNC_ALIAS, name) expr @@ -894,6 +897,32 @@ object FunctionRegistry { (name, (expressionInfo, newBuilder)) } + /** + * This method will be used to rearrange the arguments provided in function invocation + * in the order defined by the function signature given in the builder instance. + * + * @param name The name of the function + * @param builder The builder of the function expression + * @param expressions The argument list passed in function invocation + * @tparam T The class of the builder + * @return An argument list in positional order defined by the builder + */ + def rearrangeExpressions[T <: FunctionBuilderBase[_]]( + name: String, + builder: T, + expressions: Seq[Expression]) : Seq[Expression] = { + val rearrangedExpressions = if (!builder.functionSignature.isEmpty) { + val functionSignature = builder.functionSignature.get + builder.rearrange(functionSignature, expressions, name) + } else { + expressions + } + if (rearrangedExpressions.exists(_.isInstanceOf[NamedArgumentExpression])) { + throw QueryCompilationErrors.namedArgumentsNotSupported(name) + } + rearrangedExpressions + } + private def expressionBuilder[T <: ExpressionBuilder : ClassTag]( name: String, builder: T, @@ -902,7 +931,8 @@ object FunctionRegistry { val info = FunctionRegistryBase.expressionInfo[T](name, since) val funcBuilder = (expressions: Seq[Expression]) => { assert(expressions.forall(_.resolved), "function arguments must be resolved.") - val expr = builder.build(name, expressions) + val rearrangedExpressions = rearrangeExpressions(name, builder, expressions) + val expr = builder.build(name, rearrangedExpressions) if (setAlias) expr.setTagValue(FUNC_ALIAS, name) expr } @@ -935,9 +965,22 @@ object FunctionRegistry { private def expressionGeneratorOuter[T <: Generator : ClassTag](name: String) : (String, (ExpressionInfo, FunctionBuilder)) = { - val (_, (info, generatorBuilder)) = expression[T](name) + val (_, (info, builder)) = expression[T](name) val outerBuilder = (args: Seq[Expression]) => { - GeneratorOuter(generatorBuilder(args).asInstanceOf[Generator]) + GeneratorOuter(builder(args).asInstanceOf[Generator]) + } + (name, (info, outerBuilder)) + } + + private def expressionGeneratorBuilderOuter[T <: ExpressionBuilder : ClassTag] + (name: String, builder: T) : (String, (ExpressionInfo, FunctionBuilder)) = { + val info = FunctionRegistryBase.expressionInfo[T](name, since = None) + val outerBuilder = (args: Seq[Expression]) => { + val rearrangedArgs = + FunctionRegistry.rearrangeExpressions(name, builder, args) + val generator = builder.build(name, rearrangedArgs) + assert(generator.isInstanceOf[Generator]) + GeneratorOuter(generator.asInstanceOf[Generator]) } (name, (info, outerBuilder)) } @@ -980,6 +1023,30 @@ object TableFunctionRegistry { (name, (info, (expressions: Seq[Expression]) => builder(expressions))) } + /** + * A function used for table-valued functions to return a builder that + * when given input arguments, will return a function expression representing + * the table-valued functions. + * + * @param name Name of the function + * @param builder Object which will build the expression given input arguments + * @param since Time of implementation + * @tparam T Type of the builder + * @return A tuple of the function name, expression info, and function builder + */ + def generatorBuilder[T <: GeneratorBuilder : ClassTag]( + name: String, + builder: T, + since: Option[String] = None): (String, (ExpressionInfo, TableFunctionBuilder)) = { + val info = FunctionRegistryBase.expressionInfo[T](name, since) + val funcBuilder = (expressions: Seq[Expression]) => { + assert(expressions.forall(_.resolved), "function arguments must be resolved.") + val rearrangedExpressions = FunctionRegistry.rearrangeExpressions(name, builder, expressions) + builder.build(name, rearrangedExpressions) + } + (name, (info, funcBuilder)) + } + def generator[T <: Generator : ClassTag](name: String, outer: Boolean = false) : (String, (ExpressionInfo, TableFunctionBuilder)) = { val (info, builder) = FunctionRegistryBase.build[T](name, since = None) @@ -999,8 +1066,8 @@ object TableFunctionRegistry { val logicalPlans: Map[String, (ExpressionInfo, TableFunctionBuilder)] = Map( logicalPlan[Range]("range"), - generator[Explode]("explode"), - generator[Explode]("explode_outer", outer = true), + generatorBuilder("explode", ExplodeGeneratorBuilder), + generatorBuilder("explode_outer", ExplodeOuterGeneratorBuilder), generator[Inline]("inline"), generator[Inline]("inline_outer", outer = true), generator[JsonTuple]("json_tuple"), @@ -1022,6 +1089,28 @@ object TableFunctionRegistry { val functionSet: Set[FunctionIdentifier] = builtin.listFunction().toSet } -trait ExpressionBuilder { - def build(funcName: String, expressions: Seq[Expression]): Expression +/** + * This is a trait used for scalar valued functions that defines how their expression + * representations are constructed in [[FunctionRegistry]]. + */ +trait ExpressionBuilder extends FunctionBuilderBase[Expression] + +/** + * This is a trait used for table valued functions that defines how their expression + * representations are constructed in [[TableFunctionRegistry]]. + */ +trait GeneratorBuilder extends FunctionBuilderBase[LogicalPlan] { + override final def build(funcName: String, expressions: Seq[Expression]) : LogicalPlan = { + Generate( + buildGenerator(funcName, expressions), + unrequiredChildIndex = Nil, + outer = isOuter, + qualifier = None, + generatorOutput = Nil, + child = OneRowRelation()) + } + + def isOuter: Boolean + + def buildGenerator(funcName: String, expressions: Seq[Expression]) : Generator } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala index 6cefca418cea0..f450d0781f499 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala @@ -18,9 +18,10 @@ package org.apache.spark.sql.catalyst.expressions.aggregate import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.analysis.TypeCheckResult +import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, TypeCheckResult} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{DataTypeMismatch, TypeCheckSuccess} import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, Expression, ExpressionDescription, Literal} +import org.apache.spark.sql.catalyst.plans.logical.{FunctionSignature, NamedArgument} import org.apache.spark.sql.catalyst.trees.QuaternaryLike import org.apache.spark.sql.errors.QueryErrorsBase import org.apache.spark.sql.types._ @@ -39,22 +40,6 @@ import org.apache.spark.util.sketch.CountMinSketch * @param confidenceExpression confidence, must be positive and less than 1.0 * @param seedExpression random seed */ -// scalastyle:off line.size.limit -@ExpressionDescription( - usage = """ - _FUNC_(col, eps, confidence, seed) - Returns a count-min sketch of a column with the given esp, - confidence and seed. The result is an array of bytes, which can be deserialized to a - `CountMinSketch` before usage. Count-min sketch is a probabilistic data structure used for - cardinality estimation using sub-linear space. - """, - examples = """ - Examples: - > SELECT hex(_FUNC_(col, 0.5d, 0.5d, 1)) FROM VALUES (1), (2), (1) AS tab(col); - 0000000100000000000000030000000100000004000000005D8D6AB90000000000000000000000000000000200000000000000010000000000000000 - """, - group = "agg_funcs", - since = "2.2.0") -// scalastyle:on line.size.limit case class CountMinSketchAgg( child: Expression, epsExpression: Expression, @@ -208,3 +193,32 @@ case class CountMinSketchAgg( confidenceExpression = third, seedExpression = fourth) } + +// scalastyle:off line.size.limit +@ExpressionDescription( + usage = """ + _FUNC_(col, eps, confidence, seed) - Returns a count-min sketch of a column with the given esp, + confidence and seed. The result is an array of bytes, which can be deserialized to a + `CountMinSketch` before usage. Count-min sketch is a probabilistic data structure used for + cardinality estimation using sub-linear space. + """, + examples = """ + Examples: + > SELECT hex(_FUNC_(col, 0.5d, 0.5d, 1)) FROM VALUES (1), (2), (1) AS tab(col); + 0000000100000000000000030000000100000004000000005D8D6AB90000000000000000000000000000000200000000000000010000000000000000 + """, + group = "agg_funcs", + since = "2.2.0") +// scalastyle:on line.size.limit +object CountMinSketchAggExpressionBuilder extends ExpressionBuilder { + final val defaultFunctionSignature = FunctionSignature(Seq( + NamedArgument("column"), + NamedArgument("epsilon"), + NamedArgument("confidence"), + NamedArgument("seed") + )) + override def functionSignature: Option[FunctionSignature] = Some(defaultFunctionSignature) + override def build(funcName: String, expressions: Seq[Expression]): Expression = { + new CountMinSketchAgg(expressions(0), expressions(1), expressions(2), expressions(3)) + } +} diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala index 6ae7ea206c833..9a6c2ced1ea3c 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala @@ -21,11 +21,12 @@ import scala.collection.mutable import org.apache.spark.sql.Row import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow} -import org.apache.spark.sql.catalyst.analysis.TypeCheckResult +import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, GeneratorBuilder, TypeCheckResult} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.expressions.Cast._ import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ +import org.apache.spark.sql.catalyst.plans.logical.{FunctionSignature, NamedArgument} import org.apache.spark.sql.catalyst.trees.TreePattern.{GENERATOR, TreePattern} import org.apache.spark.sql.catalyst.util.{ArrayData, MapData} import org.apache.spark.sql.catalyst.util.SQLKeywordUtils._ @@ -413,6 +414,19 @@ abstract class ExplodeBase extends UnaryExpression with CollectionGenerator with * 20 * }}} */ +case class Explode(child: Expression) extends ExplodeBase { + override val position: Boolean = false + override protected def withNewChildInternal(newChild: Expression): Explode = + copy(child = newChild) +} + +trait ExplodeGeneratorBuilderBase extends GeneratorBuilder { + override def functionSignature: Option[FunctionSignature] = + Some(FunctionSignature(Seq(NamedArgument("collection")))) + override def buildGenerator(funcName: String, expressions: Seq[Expression]): Generator = + Explode(expressions(0)) +} + // scalastyle:off line.size.limit @ExpressionDescription( usage = "_FUNC_(expr) - Separates the elements of array `expr` into multiple rows, or the elements of map `expr` into multiple rows and columns. Unless specified otherwise, uses the default column name `col` for elements of the array or `key` and `value` for the elements of the map.", @@ -421,16 +435,60 @@ abstract class ExplodeBase extends UnaryExpression with CollectionGenerator with > SELECT _FUNC_(array(10, 20)); 10 20 + > SELECT _FUNC_(collection => array(10, 20)); + 10 + 20 """, since = "1.0.0", group = "generator_funcs") // scalastyle:on line.size.limit -case class Explode(child: Expression) extends ExplodeBase { - override val position: Boolean = false - override protected def withNewChildInternal(newChild: Expression): Explode = - copy(child = newChild) +object ExplodeExpressionBuilder extends ExpressionBuilder { + override def functionSignature: Option[FunctionSignature] = + Some(FunctionSignature(Seq(NamedArgument("collection")))) + + override def build(funcName: String, expressions: Seq[Expression]) : Expression = + Explode(expressions(0)) +} + +// scalastyle:off line.size.limit +@ExpressionDescription( + usage = "_FUNC_(expr) - Separates the elements of array `expr` into multiple rows, or the elements of map `expr` into multiple rows and columns. Unless specified otherwise, uses the default column name `col` for elements of the array or `key` and `value` for the elements of the map.", + examples = """ + Examples: + > SELECT _FUNC_(array(10, 20)); + 10 + 20 + > SELECT _FUNC_(collection => array(10, 20)); + 10 + 20 + """, + since = "1.0.0", + group = "generator_funcs") +// scalastyle:on line.size.limit +object ExplodeGeneratorBuilder extends ExplodeGeneratorBuilderBase { + override def isOuter: Boolean = false +} + +// scalastyle:off line.size.limit +@ExpressionDescription( + usage = "_FUNC_(expr) - Separates the elements of array `expr` into multiple rows, or the elements of map `expr` into multiple rows and columns. Unless specified otherwise, uses the default column name `col` for elements of the array or `key` and `value` for the elements of the map.", + examples = """ + Examples: + > SELECT _FUNC_(array(10, 20)); + 10 + 20 + > SELECT _FUNC_(collection => array(10, 20)); + 10 + 20 + """, + since = "1.0.0", + group = "generator_funcs") +// scalastyle:on line.size.limit +object ExplodeOuterGeneratorBuilder extends ExplodeGeneratorBuilderBase { + override def isOuter: Boolean = true } + /** * Given an input array produces a sequence of rows for each position and value in the array. * diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala index af74e7c0f7b24..64fa5c194db25 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala @@ -18,63 +18,15 @@ package org.apache.spark.sql.catalyst.expressions import org.apache.spark.sql.catalyst.InternalRow -import org.apache.spark.sql.catalyst.analysis.TypeCheckResult +import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, TypeCheckResult} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ +import org.apache.spark.sql.catalyst.plans.logical.{FunctionSignature, NamedArgument} import org.apache.spark.sql.errors.QueryErrorsBase import org.apache.spark.sql.types.{AbstractDataType, DataType, StringType} import org.apache.spark.unsafe.types.UTF8String -// scalastyle:off line.size.limit -@ExpressionDescription( - usage = - """_FUNC_(input[, upperChar, lowerChar, digitChar, otherChar]) - masks the given string value. - The function replaces characters with 'X' or 'x', and numbers with 'n'. - This can be useful for creating copies of tables with sensitive information removed. - """, - arguments = """ - Arguments: - * input - string value to mask. Supported types: STRING, VARCHAR, CHAR - * upperChar - character to replace upper-case characters with. Specify NULL to retain original character. Default value: 'X' - * lowerChar - character to replace lower-case characters with. Specify NULL to retain original character. Default value: 'x' - * digitChar - character to replace digit characters with. Specify NULL to retain original character. Default value: 'n' - * otherChar - character to replace all other characters with. Specify NULL to retain original character. Default value: NULL - """, - examples = """ - Examples: - > SELECT _FUNC_('abcd-EFGH-8765-4321'); - xxxx-XXXX-nnnn-nnnn - > SELECT _FUNC_('abcd-EFGH-8765-4321', 'Q'); - xxxx-QQQQ-nnnn-nnnn - > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q'); - QqQQnnn-@$# - > SELECT _FUNC_('AbCD123-@$#'); - XxXXnnn-@$# - > SELECT _FUNC_('AbCD123-@$#', 'Q'); - QxQQnnn-@$# - > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q'); - QqQQnnn-@$# - > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q', 'd'); - QqQQddd-@$# - > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q', 'd', 'o'); - QqQQdddoooo - > SELECT _FUNC_('AbCD123-@$#', NULL, 'q', 'd', 'o'); - AqCDdddoooo - > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, 'd', 'o'); - AbCDdddoooo - > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, NULL, 'o'); - AbCD123oooo - > SELECT _FUNC_(NULL, NULL, NULL, NULL, 'o'); - NULL - > SELECT _FUNC_(NULL); - NULL - > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, NULL, NULL); - AbCD123-@$# - """, - since = "3.4.0", - group = "string_funcs") -// scalastyle:on line.size.limit case class Mask( input: Expression, upperChar: Expression, @@ -277,13 +229,13 @@ case class MaskArgument(maskChar: Char, ignore: Boolean) object Mask { // Default character to replace upper-case characters - private val MASKED_UPPERCASE = 'X' + val MASKED_UPPERCASE = 'X' // Default character to replace lower-case characters - private val MASKED_LOWERCASE = 'x' + val MASKED_LOWERCASE = 'x' // Default character to replace digits - private val MASKED_DIGIT = 'n' + val MASKED_DIGIT = 'n' // This value helps to retain original value in the input by ignoring the replacement rules - private val MASKED_IGNORE = null + val MASKED_IGNORE = null def transformInput( input: Any, @@ -321,3 +273,72 @@ object Mask { } } } + +// scalastyle:off line.size.limit +@ExpressionDescription( + usage = + """_FUNC_(input[, upperChar, lowerChar, digitChar, otherChar]) - masks the given string value. + The function replaces characters with 'X' or 'x', and numbers with 'n'. + This can be useful for creating copies of tables with sensitive information removed. + """, + arguments = """ + Arguments: + * input - string value to mask. Supported types: STRING, VARCHAR, CHAR + * upperChar - character to replace upper-case characters with. Specify NULL to retain original character. Default value: 'X' + * lowerChar - character to replace lower-case characters with. Specify NULL to retain original character. Default value: 'x' + * digitChar - character to replace digit characters with. Specify NULL to retain original character. Default value: 'n' + * otherChar - character to replace all other characters with. Specify NULL to retain original character. Default value: NULL + """, + examples = """ + Examples: + > SELECT _FUNC_('abcd-EFGH-8765-4321'); + xxxx-XXXX-nnnn-nnnn + > SELECT _FUNC_('abcd-EFGH-8765-4321', 'Q'); + xxxx-QQQQ-nnnn-nnnn + > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q'); + QqQQnnn-@$# + > SELECT _FUNC_('AbCD123-@$#'); + XxXXnnn-@$# + > SELECT _FUNC_('AbCD123-@$#', 'Q'); + QxQQnnn-@$# + > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q'); + QqQQnnn-@$# + > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q', 'd'); + QqQQddd-@$# + > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q', 'd', 'o'); + QqQQdddoooo + > SELECT _FUNC_('AbCD123-@$#', NULL, 'q', 'd', 'o'); + AqCDdddoooo + > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, 'd', 'o'); + AbCDdddoooo + > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, NULL, 'o'); + AbCD123oooo + > SELECT _FUNC_(NULL, NULL, NULL, NULL, 'o'); + NULL + > SELECT _FUNC_(NULL); + NULL + > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, NULL, NULL); + AbCD123-@$# + """, + since = "3.4.0", + group = "string_funcs") +// scalastyle:on line.size.limit +object MaskExpressionBuilder extends ExpressionBuilder { + override def functionSignature: Option[FunctionSignature] = { + val strArg = NamedArgument("str") + val upperCharArg = NamedArgument("upperChar", Some(Literal(Mask.MASKED_UPPERCASE))) + val lowerCharArg = NamedArgument("lowerChar", Some(Literal(Mask.MASKED_LOWERCASE))) + val digitCharArg = NamedArgument("digitChar", Some(Literal(Mask.MASKED_DIGIT))) + val otherCharArg = NamedArgument( + "otherChar", + Some(Literal(Mask.MASKED_IGNORE, StringType))) + val functionSignature: FunctionSignature = FunctionSignature(Seq( + strArg, upperCharArg, lowerCharArg, digitCharArg, otherCharArg)) + Some(functionSignature) + } + + override def build(funcName: String, expressions: Seq[Expression]): Expression = { + new Mask(expressions(0), expressions(1), expressions(2), expressions(3), expressions(4)) + } +} + diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala new file mode 100644 index 0000000000000..04de88acc09e4 --- /dev/null +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala @@ -0,0 +1,179 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.sql.catalyst.plans.logical + +import org.apache.spark.sql.catalyst.expressions.{Expression, NamedArgumentExpression} +import org.apache.spark.sql.errors.QueryCompilationErrors + +/** + * This is a base trait that is used for implementing builder classes that can be used to construct + * expressions or logical plans depending on if it is a table-valued or scalar-valued function. + * + * Two classes of builders currently exist for this trait: [[GeneratorBuilder]] and + * [[ExpressionBuilder]]. If a new class of functions are to be added, a new trait should also be + * created which extends this trait. + * + * @tparam T The type that is expected to be returned by the [[FunctionBuilderBase.build]] function + */ +trait FunctionBuilderBase[T] { + /** + * A method that returns the method signature for this function. + * Each function signature includes a list of parameters to which the analyzer can + * compare a function call with provided arguments to determine if that function + * call is a match for the function signature. + * + * IMPORTANT: For now, each function expression builder should have only one function signature. + * Also, for any function signature, required arguments must always come before optional ones. + */ + def functionSignature: Option[FunctionSignature] = None + + /** + * This function rearranges the arguments provided during function invocation in positional order + * according to the function signature. This method will fill in the default values if optional + * parameters do not have their values specified. Any function which supports named arguments + * will have this routine invoked, even if no named arguments are present in the argument list. + * This is done to eliminate constructor overloads in some methods which use them for default + * values prior to the implementation of the named argument framework. This function will also + * check if the number of arguments are correct. If that is not the case, then an error will be + * thrown. + * + * IMPORTANT: This method will be called before the [[FunctionBuilderBase.build]] method is + * invoked. It is guaranteed that the expressions provided to the [[FunctionBuilderBase.build]] + * functions forms a valid set of argument expressions that can be used in the construction of + * the function expression. + * + * @param expectedSignature The method signature which we rearrange our arguments according to + * @param providedArguments The list of arguments passed from function invocation + * @param functionName The name of the function + * @return The rearranged argument list with arguments in positional order + */ + def rearrange( + expectedSignature: FunctionSignature, + providedArguments: Seq[Expression], + functionName: String) : Seq[Expression] = { + NamedArgumentsSupport.defaultRearrange(expectedSignature, providedArguments, functionName) + } + + def build(funcName: String, expressions: Seq[Expression]): T +} + +object NamedArgumentsSupport { + /** + * This method is the default routine which rearranges the arguments in positional order according + * to the function signature provided. This will also fill in any default values that exists for + * optional arguments. This method will also be invoked even if there are no named arguments in + * the argument list. This method will keep all positional arguments in their original order. + * + * @param functionSignature The function signature that defines the positional ordering + * @param args The argument list provided in function invocation + * @param functionName The name of the function + * @return A list of arguments rearranged in positional order defined by the provided signature + */ + final def defaultRearrange( + functionSignature: FunctionSignature, + args: Seq[Expression], + functionName: String): Seq[Expression] = { + val parameters: Seq[NamedArgument] = functionSignature.parameters + if (parameters.dropWhile(_.default.isEmpty).exists(_.default.isEmpty)) { + throw QueryCompilationErrors.unexpectedRequiredParameterInFunctionSignature( + functionName, functionSignature) + } + + val (positionalArgs, namedArgs) = args.span(!_.isInstanceOf[NamedArgumentExpression]) + val namedParameters: Seq[NamedArgument] = parameters.drop(positionalArgs.size) + + // The following loop checks for the following: + // 1. Unrecognized parameter names + // 2. Duplicate routine parameter assignments + val allParameterNames: Seq[String] = parameters.map(_.name) + val parameterNamesSet: Set[String] = allParameterNames.toSet + val positionalParametersSet = allParameterNames.take(positionalArgs.size).toSet + val namedParametersSet = collection.mutable.Set[String]() + + for (arg <- namedArgs) { + arg match { + case namedArg: NamedArgumentExpression => + val parameterName = namedArg.key + if (!parameterNamesSet.contains(parameterName)) { + throw QueryCompilationErrors.unrecognizedParameterName(functionName, namedArg.key, + parameterNamesSet.toSeq) + } + if (positionalParametersSet.contains(parameterName)) { + throw QueryCompilationErrors.positionalAndNamedArgumentDoubleReference( + functionName, namedArg.key) + } + if (namedParametersSet.contains(parameterName)) { + throw QueryCompilationErrors.doubleNamedArgumentReference( + functionName, namedArg.key) + } + namedParametersSet.add(namedArg.key) + case _ => + throw QueryCompilationErrors.unexpectedPositionalArgument(functionName) + } + } + + // Check the argument list size against the provided parameter list length. + if (parameters.size < args.length) { + val validParameterSizes = + Array.range(parameters.count(_.default.isEmpty), parameters.size + 1).toSeq + throw QueryCompilationErrors.wrongNumArgsError( + functionName, validParameterSizes, args.length) + } + + // This constructs a map from argument name to value for argument rearrangement. + val namedArgMap = namedArgs.map { arg => + val namedArg = arg.asInstanceOf[NamedArgumentExpression] + namedArg.key -> namedArg.value + }.toMap + + // We rearrange named arguments to match their positional order. + val rearrangedNamedArgs: Seq[Expression] = namedParameters.map { param => + namedArgMap.getOrElse( + param.name, + if (param.default.isEmpty) { + throw QueryCompilationErrors.requiredParameterNotFound(functionName, param.name) + } else { + param.default.get + } + ) + } + val rearrangedArgs = positionalArgs ++ rearrangedNamedArgs + assert(rearrangedArgs.size == parameters.size) + rearrangedArgs + } +} + +/** + * Represents a parameter of a function expression. Function expressions should use this class + * to construct the argument lists returned in [[Builder]] + * + * @param name The name of the string. + * @param default The default value of the argument. If the default is none, then that means the + * argument is required. If no argument is provided, an exception is thrown. + */ +case class NamedArgument( + name: String, + default: Option[Expression] = None) + +/** + * Represents a method signature and the list of arguments it receives as input. + * Currently, overloads are not supported and only one FunctionSignature is allowed + * per function expression. + * + * @param parameters The list of arguments which the function takes + */ +case class FunctionSignature(parameters: Seq[NamedArgument]) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 955046e74e1ed..346f25580aaeb 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -30,7 +30,7 @@ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, AttributeReference, AttributeSet, CreateMap, CreateStruct, Expression, GroupingID, NamedExpression, SpecifiedWindowFrame, WindowFrame, WindowFunction, WindowSpecDefinition} import org.apache.spark.sql.catalyst.expressions.aggregate.AnyValue import org.apache.spark.sql.catalyst.plans.JoinType -import org.apache.spark.sql.catalyst.plans.logical.{Assignment, Join, LogicalPlan, SerdeInfo, Window} +import org.apache.spark.sql.catalyst.plans.logical.{Assignment, FunctionSignature, Join, LogicalPlan, SerdeInfo, Window} import org.apache.spark.sql.catalyst.trees.{Origin, TreeNode} import org.apache.spark.sql.catalyst.util.{quoteIdentifier, FailFastMode, ParseMode, PermissiveMode} import org.apache.spark.sql.connector.catalog._ @@ -50,6 +50,78 @@ import org.apache.spark.sql.types._ */ private[sql] object QueryCompilationErrors extends QueryErrorsBase { + def unexpectedRequiredParameterInFunctionSignature( + functionName: String, functionSignature: FunctionSignature) : Throwable = { + val errorMessage = s"Function $functionName has an unexpected required argument for" + + s" the provided function signature $functionSignature. All required arguments should" + + " come before optional arguments." + SparkException.internalError(errorMessage) + } + + def namedArgumentsNotSupported(functionName: String) : Throwable = { + new AnalysisException( + errorClass = "NAMED_PARAMETERS_NOT_SUPPORTED", + messageParameters = Map("functionName" -> toSQLId(functionName)) + ) + } + + def positionalAndNamedArgumentDoubleReference( + functionName: String, parameterName: String) : Throwable = { + val errorClass = + "DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT.BOTH_POSITIONAL_AND_NAMED" + new AnalysisException( + errorClass = errorClass, + messageParameters = Map( + "functionName" -> toSQLId(functionName), + "parameterName" -> toSQLId(parameterName)) + ) + } + + def doubleNamedArgumentReference( + functionName: String, parameterName: String): Throwable = { + val errorClass = + "DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT.DOUBLE_NAMED_ARGUMENT_REFERENCE" + new AnalysisException( + errorClass = errorClass, + messageParameters = Map( + "functionName" -> toSQLId(functionName), + "parameterName" -> toSQLId(parameterName)) + ) + } + + def requiredParameterNotFound( + functionName: String, parameterName: String) : Throwable = { + new AnalysisException( + errorClass = "REQUIRED_PARAMETER_NOT_FOUND", + messageParameters = Map( + "functionName" -> toSQLId(functionName), + "parameterName" -> toSQLId(parameterName)) + ) + } + + def unrecognizedParameterName( + functionName: String, argumentName: String, candidates: Seq[String]): Throwable = { + import org.apache.spark.sql.catalyst.util.StringUtils.orderSuggestedIdentifiersBySimilarity + + val inputs = candidates.map(candidate => Seq(candidate)).toSeq + val recommendations = orderSuggestedIdentifiersBySimilarity(argumentName, inputs) + .take(3) + new AnalysisException( + errorClass = "UNRECOGNIZED_PARAMETER_NAME", + messageParameters = Map( + "functionName" -> toSQLId(functionName), + "argumentName" -> toSQLId(argumentName), + "proposal" -> recommendations.mkString(" ")) + ) + } + + def unexpectedPositionalArgument(functionName: String): Throwable = { + new AnalysisException( + errorClass = "UNEXPECTED_POSITIONAL_ARGUMENT", + messageParameters = Map("functionName" -> toSQLId(functionName)) + ) + } + def groupingIDMismatchError(groupingID: GroupingID, groupByExprs: Seq[Expression]): Throwable = { new AnalysisException( errorClass = "GROUPING_ID_COLUMN_MISMATCH", @@ -195,7 +267,7 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase { def namedArgumentsNotEnabledError(functionName: String, argumentName: String): Throwable = { new AnalysisException( - errorClass = "NAMED_ARGUMENTS_SUPPORT_DISABLED", + errorClass = "NAMED_PARAMETER_SUPPORT_DISABLED", messageParameters = Map( "functionName" -> toSQLId(functionName), "argument" -> toSQLId(argumentName)) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedArgumentFunctionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedArgumentFunctionSuite.scala new file mode 100644 index 0000000000000..fd27512a2eda6 --- /dev/null +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedArgumentFunctionSuite.scala @@ -0,0 +1,151 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.sql.catalyst.analysis + +import org.apache.spark.SparkThrowable +import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.expressions.{Expression, Literal, NamedArgumentExpression} +import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode} +import org.apache.spark.sql.catalyst.plans.logical.{FunctionBuilderBase, FunctionSignature, NamedArgument, NamedArgumentsSupport} +import org.apache.spark.sql.catalyst.util.TypeUtils.toSQLId +import org.apache.spark.sql.types.DataType + + +case class DummyExpression( + k1: Expression, + k2: Expression, + k3: Expression, + k4: Expression) extends Expression { + override def nullable: Boolean = false + override def eval(input: InternalRow): Any = None + override protected def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = null + override def dataType: DataType = null + override def children: Seq[Expression] = Nil + override protected def withNewChildrenInternal( + newChildren: IndexedSeq[Expression]): Expression = null +} + +object DummyExpressionBuilder extends ExpressionBuilder { + def defaultFunctionSignature: FunctionSignature = { + FunctionSignature(Seq(NamedArgument("k1"), + NamedArgument("k2"), + NamedArgument("k3"), + NamedArgument("k4"))) + } + + override def functionSignature: Option[FunctionSignature] = + Some(defaultFunctionSignature) + override def build(funcName: String, expressions: Seq[Expression]): Expression = + DummyExpression(expressions(0), expressions(1), expressions(2), expressions(3)) +} + +class NamedArgumentFunctionSuite extends AnalysisTest { + + final val k1Arg = Literal("v1") + final val k2Arg = NamedArgumentExpression("k2", Literal("v2")) + final val k3Arg = NamedArgumentExpression("k3", Literal("v3")) + final val k4Arg = NamedArgumentExpression("k4", Literal("v4")) + final val namedK1Arg = NamedArgumentExpression("k1", Literal("v1-2")) + final val args = Seq(k1Arg, k4Arg, k2Arg, k3Arg) + final val expectedSeq = Seq(Literal("v1"), Literal("v2"), Literal("v3"), Literal("v4")) + final val signature = DummyExpressionBuilder.defaultFunctionSignature + final val illegalSignature = FunctionSignature(Seq( + NamedArgument("k1"), NamedArgument("k2", Option(Literal("v2"))), NamedArgument("k3"))) + + test("Check rearrangement of expressions") { + val rearrangedArgs = NamedArgumentsSupport.defaultRearrange( + signature, args, "function") + for ((returnedArg, expectedArg) <- rearrangedArgs.zip(expectedSeq)) { + assert(returnedArg == expectedArg) + } + val rearrangedArgsWithBuilder = + FunctionRegistry.rearrangeExpressions("function", DummyExpressionBuilder, args) + for ((returnedArg, expectedArg) <- rearrangedArgsWithBuilder.zip(expectedSeq)) { + assert(returnedArg == expectedArg) + } + } + + private def parseRearrangeException(functionSignature: FunctionSignature, + expressions: Seq[Expression], + functionName: String = "function"): SparkThrowable = { + intercept[SparkThrowable]( + NamedArgumentsSupport.defaultRearrange(functionSignature, expressions, functionName)) + } + + private def parseExternalException[T <: FunctionBuilderBase[_]]( + functionName: String, + builder: T, + expressions: Seq[Expression]) : SparkThrowable = { + intercept[SparkThrowable]( + FunctionRegistry.rearrangeExpressions[T](functionName, builder, expressions)) + } + + test("DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT") { + val errorClass = + "DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT.BOTH_POSITIONAL_AND_NAMED" + checkError( + exception = parseRearrangeException( + signature, Seq(k1Arg, k2Arg, k3Arg, k4Arg, namedK1Arg), "foo"), + errorClass = errorClass, + parameters = Map("functionName" -> toSQLId("foo"), "parameterName" -> toSQLId("k1")) + ) + checkError( + exception = parseRearrangeException( + signature, Seq(k1Arg, k2Arg, k3Arg, k4Arg, k4Arg), "foo"), + errorClass = "DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT.DOUBLE_NAMED_ARGUMENT_REFERENCE", + parameters = Map("functionName" -> toSQLId("foo"), "parameterName" -> toSQLId("k4")) + ) + } + + test("REQUIRED_PARAMETER_NOT_FOUND") { + checkError( + exception = parseRearrangeException(signature, Seq(k1Arg, k2Arg, k3Arg), "foo"), + errorClass = "REQUIRED_PARAMETER_NOT_FOUND", + parameters = Map("functionName" -> toSQLId("foo"), "parameterName" -> toSQLId("k4")) + ) + } + + test("UNRECOGNIZED_PARAMETER_NAME") { + checkError( + exception = parseRearrangeException(signature, + Seq(k1Arg, k2Arg, k3Arg, k4Arg, NamedArgumentExpression("k5", Literal("k5"))), "foo"), + errorClass = "UNRECOGNIZED_PARAMETER_NAME", + parameters = Map("functionName" -> toSQLId("foo"), "argumentName" -> toSQLId("k5"), + "proposal" -> (toSQLId("k1") + " " + toSQLId("k2") + " " + toSQLId("k3"))) + ) + } + + test("UNEXPECTED_POSITIONAL_ARGUMENT") { + checkError( + exception = parseRearrangeException(signature, + Seq(k2Arg, k3Arg, k1Arg, k4Arg), "foo"), + errorClass = "UNEXPECTED_POSITIONAL_ARGUMENT", + parameters = Map("functionName" -> toSQLId("foo")) + ) + } + + test("INTERNAL_ERROR: Enforce optional arguments after required arguments") { + val errorMessage = s"Function foo has an unexpected required argument for the provided" + + s" function signature ${illegalSignature}. All required arguments should come before" + + s" optional arguments." + checkError( + exception = parseRearrangeException(illegalSignature, args, "foo"), + errorClass = "INTERNAL_ERROR", + parameters = Map("message" -> errorMessage) + ) + } +} diff --git a/sql/core/src/test/resources/sql-tests/analyzer-results/named-function-arguments.sql.out b/sql/core/src/test/resources/sql-tests/analyzer-results/named-function-arguments.sql.out index faa05535cb322..e01e0ca5ee011 100644 --- a/sql/core/src/test/resources/sql-tests/analyzer-results/named-function-arguments.sql.out +++ b/sql/core/src/test/resources/sql-tests/analyzer-results/named-function-arguments.sql.out @@ -2,111 +2,368 @@ -- !query SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd') -- !query analysis +Project [mask(AbCD123-@$#, Q, q, d, o) AS mask(AbCD123-@$#, Q, q, d, o)#x] ++- OneRowRelation + + +-- !query +SELECT mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbCD123-@$#') +-- !query analysis +Project [mask(AbCD123-@$#, Q, q, d, o) AS mask(AbCD123-@$#, Q, q, d, o)#x] ++- OneRowRelation + + +-- !query +SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', digitChar => 'd') +-- !query analysis +Project [mask(AbCD123-@$#, Q, q, d, null) AS mask(AbCD123-@$#, Q, q, d, NULL)#x] ++- OneRowRelation + + +-- !query +SELECT mask(lowerChar => 'q', upperChar => 'Q', digitChar => 'd', str => 'AbCD123-@$#') +-- !query analysis +Project [mask(AbCD123-@$#, Q, q, d, null) AS mask(AbCD123-@$#, Q, q, d, NULL)#x] ++- OneRowRelation + + +-- !query +create temporary view t2 as select * from values + ('val2a', 6S, 12, 14L, float(15), 20D, 20E2, timestamp '2014-04-04 01:01:00.000', date '2014-04-04'), + ('val1b', 10S, 12, 19L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', date '2014-05-04'), + ('val1b', 8S, 16, 119L, float(17), 25D, 26E2, timestamp '2015-05-04 01:01:00.000', date '2015-05-04'), + ('val1c', 12S, 16, 219L, float(17), 25D, 26E2, timestamp '2016-05-04 01:01:00.000', date '2016-05-04'), + ('val1b', null, 16, 319L, float(17), 25D, 26E2, timestamp '2017-05-04 01:01:00.000', null), + ('val2e', 8S, null, 419L, float(17), 25D, 26E2, timestamp '2014-06-04 01:01:00.000', date '2014-06-04'), + ('val1f', 19S, null, 519L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', date '2014-05-04'), + ('val1b', 10S, 12, 19L, float(17), 25D, 26E2, timestamp '2014-06-04 01:01:00.000', date '2014-06-04'), + ('val1b', 8S, 16, 19L, float(17), 25D, 26E2, timestamp '2014-07-04 01:01:00.000', date '2014-07-04'), + ('val1c', 12S, 16, 19L, float(17), 25D, 26E2, timestamp '2014-08-04 01:01:00.000', date '2014-08-05'), + ('val1e', 8S, null, 19L, float(17), 25D, 26E2, timestamp '2014-09-04 01:01:00.000', date '2014-09-04'), + ('val1f', 19S, null, 19L, float(17), 25D, 26E2, timestamp '2014-10-04 01:01:00.000', date '2014-10-04'), + ('val1b', null, 16, 19L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', null) + as t2(t2a, t2b, t2c, t2d, t2e, t2f, t2g, t2h, t2i) +-- !query analysis +CreateViewCommand `t2`, select * from values + ('val2a', 6S, 12, 14L, float(15), 20D, 20E2, timestamp '2014-04-04 01:01:00.000', date '2014-04-04'), + ('val1b', 10S, 12, 19L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', date '2014-05-04'), + ('val1b', 8S, 16, 119L, float(17), 25D, 26E2, timestamp '2015-05-04 01:01:00.000', date '2015-05-04'), + ('val1c', 12S, 16, 219L, float(17), 25D, 26E2, timestamp '2016-05-04 01:01:00.000', date '2016-05-04'), + ('val1b', null, 16, 319L, float(17), 25D, 26E2, timestamp '2017-05-04 01:01:00.000', null), + ('val2e', 8S, null, 419L, float(17), 25D, 26E2, timestamp '2014-06-04 01:01:00.000', date '2014-06-04'), + ('val1f', 19S, null, 519L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', date '2014-05-04'), + ('val1b', 10S, 12, 19L, float(17), 25D, 26E2, timestamp '2014-06-04 01:01:00.000', date '2014-06-04'), + ('val1b', 8S, 16, 19L, float(17), 25D, 26E2, timestamp '2014-07-04 01:01:00.000', date '2014-07-04'), + ('val1c', 12S, 16, 19L, float(17), 25D, 26E2, timestamp '2014-08-04 01:01:00.000', date '2014-08-05'), + ('val1e', 8S, null, 19L, float(17), 25D, 26E2, timestamp '2014-09-04 01:01:00.000', date '2014-09-04'), + ('val1f', 19S, null, 19L, float(17), 25D, 26E2, timestamp '2014-10-04 01:01:00.000', date '2014-10-04'), + ('val1b', null, 16, 19L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', null) + as t2(t2a, t2b, t2c, t2d, t2e, t2f, t2g, t2h, t2i), false, false, LocalTempView, true + +- Project [t2a#x, t2b#x, t2c#x, t2d#xL, t2e#x, t2f#x, t2g#x, t2h#x, t2i#x] + +- SubqueryAlias t2 + +- LocalRelation [t2a#x, t2b#x, t2c#x, t2d#xL, t2e#x, t2f#x, t2g#x, t2h#x, t2i#x] + + +-- !query +SELECT hex(count_min_sketch(t2d, seed => 1, epsilon => 0.5d, confidence => 0.5d)) FROM t2 +-- !query analysis +Aggregate [hex(count_min_sketch(t2d#xL, 0.5, 0.5, 1, 0, 0)) AS hex(count_min_sketch(t2d, 0.5, 0.5, 1))#x] ++- SubqueryAlias t2 + +- View (`t2`, [t2a#x,t2b#x,t2c#x,t2d#xL,t2e#x,t2f#x,t2g#x,t2h#x,t2i#x]) + +- Project [cast(t2a#x as string) AS t2a#x, cast(t2b#x as smallint) AS t2b#x, cast(t2c#x as int) AS t2c#x, cast(t2d#xL as bigint) AS t2d#xL, cast(t2e#x as float) AS t2e#x, cast(t2f#x as double) AS t2f#x, cast(t2g#x as double) AS t2g#x, cast(t2h#x as timestamp) AS t2h#x, cast(t2i#x as date) AS t2i#x] + +- Project [t2a#x, t2b#x, t2c#x, t2d#xL, t2e#x, t2f#x, t2g#x, t2h#x, t2i#x] + +- SubqueryAlias t2 + +- LocalRelation [t2a#x, t2b#x, t2c#x, t2d#xL, t2e#x, t2f#x, t2g#x, t2h#x, t2i#x] + + +-- !query +SELECT hex(count_min_sketch(seed => 1, epsilon => 0.5d, confidence => 0.5d, column => t2d)) FROM t2 +-- !query analysis +Aggregate [hex(count_min_sketch(t2d#xL, 0.5, 0.5, 1, 0, 0)) AS hex(count_min_sketch(t2d, 0.5, 0.5, 1))#x] ++- SubqueryAlias t2 + +- View (`t2`, [t2a#x,t2b#x,t2c#x,t2d#xL,t2e#x,t2f#x,t2g#x,t2h#x,t2i#x]) + +- Project [cast(t2a#x as string) AS t2a#x, cast(t2b#x as smallint) AS t2b#x, cast(t2c#x as int) AS t2c#x, cast(t2d#xL as bigint) AS t2d#xL, cast(t2e#x as float) AS t2e#x, cast(t2f#x as double) AS t2f#x, cast(t2g#x as double) AS t2g#x, cast(t2h#x as timestamp) AS t2h#x, cast(t2i#x as date) AS t2i#x] + +- Project [t2a#x, t2b#x, t2c#x, t2d#xL, t2e#x, t2f#x, t2g#x, t2h#x, t2i#x] + +- SubqueryAlias t2 + +- LocalRelation [t2a#x, t2b#x, t2c#x, t2d#xL, t2e#x, t2f#x, t2g#x, t2h#x, t2i#x] + + +-- !query +SELECT hex(count_min_sketch(t2d, 0.5d, seed => 1, confidence => 0.5d)) FROM t2 +-- !query analysis +Aggregate [hex(count_min_sketch(t2d#xL, 0.5, 0.5, 1, 0, 0)) AS hex(count_min_sketch(t2d, 0.5, 0.5, 1))#x] ++- SubqueryAlias t2 + +- View (`t2`, [t2a#x,t2b#x,t2c#x,t2d#xL,t2e#x,t2f#x,t2g#x,t2h#x,t2i#x]) + +- Project [cast(t2a#x as string) AS t2a#x, cast(t2b#x as smallint) AS t2b#x, cast(t2c#x as int) AS t2c#x, cast(t2d#xL as bigint) AS t2d#xL, cast(t2e#x as float) AS t2e#x, cast(t2f#x as double) AS t2f#x, cast(t2g#x as double) AS t2g#x, cast(t2h#x as timestamp) AS t2h#x, cast(t2i#x as date) AS t2i#x] + +- Project [t2a#x, t2b#x, t2c#x, t2d#xL, t2e#x, t2f#x, t2g#x, t2h#x, t2i#x] + +- SubqueryAlias t2 + +- LocalRelation [t2a#x, t2b#x, t2c#x, t2d#xL, t2e#x, t2f#x, t2g#x, t2h#x, t2i#x] + + +-- !query +SELECT * FROM explode(collection => array(1, 2)) +-- !query analysis +Project [col#x] ++- Generate explode(array(1, 2)), false, [col#x] + +- OneRowRelation + + +-- !query +SELECT * FROM explode_outer(collection => map('a', 1, 'b', 2)) +-- !query analysis +Project [key#x, value#x] ++- Generate explode(map(a, 1, b, 2)), true, [key#x, value#x] + +- OneRowRelation + + +-- !query +SELECT * FROM explode(array(1, 2)), explode(array(3, 4)) +-- !query analysis +Project [col#x, col#x] ++- Join Inner + :- Generate explode(array(1, 2)), false, [col#x] + : +- OneRowRelation + +- Generate explode(array(3, 4)), false, [col#x] + +- OneRowRelation + + +-- !query +SELECT * FROM explode(array(1, 2)) AS t, LATERAL explode(array(3 * t.col, 4 * t.col)) +-- !query analysis +Project [col#x, col#x] ++- LateralJoin lateral-subquery#x [col#x && col#x], Inner + : +- Generate explode(array((3 * outer(col#x)), (4 * outer(col#x)))), false, [col#x] + : +- OneRowRelation + +- SubqueryAlias t + +- Generate explode(array(1, 2)), false, [col#x] + +- OneRowRelation + + +-- !query +SELECT num, val, 'Spark' FROM explode(map(1, 'a', 2, 'b')) AS t(num, val) +-- !query analysis +Project [num#x, val#x, Spark AS Spark#x] ++- SubqueryAlias t + +- Project [key#x AS num#x, value#x AS val#x] + +- Generate explode(map(1, a, 2, b)), false, [key#x, value#x] + +- OneRowRelation + + +-- !query +SELECT * FROM explode(collection => explode(array(1))) +-- !query analysis +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "UNSUPPORTED_GENERATOR.NESTED_IN_EXPRESSIONS", + "sqlState" : "0A000", + "messageParameters" : { + "expression" : "\"explode(explode(array(1)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 54, + "fragment" : "explode(collection => explode(array(1)))" + } ] +} + + +-- !query +SELECT * FROM explode(collection => explode(collection => array(1))) +-- !query analysis +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "UNSUPPORTED_GENERATOR.NESTED_IN_EXPRESSIONS", + "sqlState" : "0A000", + "messageParameters" : { + "expression" : "\"explode(explode(array(1)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 68, + "fragment" : "explode(collection => explode(collection => array(1)))" + } ] +} + + +-- !query +CREATE OR REPLACE TEMPORARY VIEW v AS SELECT id FROM range(0, 8) +-- !query analysis +CreateViewCommand `v`, SELECT id FROM range(0, 8), false, true, LocalTempView, true + +- Project [id#xL] + +- Range (0, 8, step=1, splits=None) + + +-- !query +SELECT * FROM explode(collection => TABLE(v)) +-- !query analysis org.apache.spark.sql.AnalysisException { - "errorClass" : "DATATYPE_MISMATCH.NON_FOLDABLE_INPUT", + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", "sqlState" : "42K09", "messageParameters" : { - "inputExpr" : "\"namedargumentexpression(q)\"", - "inputName" : "upperChar", - "inputType" : "\"STRING\"", - "sqlExpr" : "\"mask(AbCD123-@$#, namedargumentexpression(q), namedargumentexpression(Q), namedargumentexpression(o), namedargumentexpression(d))\"" + "inputSql" : "\"outer(__auto_generated_subquery_name_0.c)\"", + "inputType" : "\"STRUCT\"", + "paramIndex" : "1", + "requiredType" : "(\"ARRAY\" or \"MAP\")", + "sqlExpr" : "\"explode(outer(__auto_generated_subquery_name_0.c))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 45, + "fragment" : "explode(collection => TABLE(v))" + } ] +} + + +-- !query +SELECT mask(lowerChar => 'q', 'AbCD123-@$#', upperChar => 'Q', otherChar => 'o', digitChar => 'd') +-- !query analysis +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "UNEXPECTED_POSITIONAL_ARGUMENT", + "sqlState" : "4274K", + "messageParameters" : { + "functionName" : "`mask`" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, "stopIndex" : 98, - "fragment" : "mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd')" + "fragment" : "mask(lowerChar => 'q', 'AbCD123-@$#', upperChar => 'Q', otherChar => 'o', digitChar => 'd')" } ] } -- !query -SELECT mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbCD123-@$#') +SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', digitChar => 'e') -- !query analysis org.apache.spark.sql.AnalysisException { - "errorClass" : "DATATYPE_MISMATCH.NON_FOLDABLE_INPUT", - "sqlState" : "42K09", + "errorClass" : "DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT.DOUBLE_NAMED_ARGUMENT_REFERENCE", + "sqlState" : "4274K", "messageParameters" : { - "inputExpr" : "\"namedargumentexpression(Q)\"", - "inputName" : "upperChar", - "inputType" : "\"STRING\"", - "sqlExpr" : "\"mask(namedargumentexpression(q), namedargumentexpression(Q), namedargumentexpression(o), namedargumentexpression(d), namedargumentexpression(AbCD123-@$#))\"" + "functionName" : "`mask`", + "parameterName" : "`digitChar`" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 105, - "fragment" : "mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbCD123-@$#')" + "stopIndex" : 116, + "fragment" : "mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', digitChar => 'e')" } ] } -- !query -SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', digitChar => 'd') +SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbC') -- !query analysis org.apache.spark.sql.AnalysisException { - "errorClass" : "DATATYPE_MISMATCH.NON_FOLDABLE_INPUT", - "sqlState" : "42K09", + "errorClass" : "DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT.BOTH_POSITIONAL_AND_NAMED", + "sqlState" : "4274K", "messageParameters" : { - "inputExpr" : "\"namedargumentexpression(q)\"", - "inputName" : "upperChar", - "inputType" : "\"STRING\"", - "sqlExpr" : "\"mask(AbCD123-@$#, namedargumentexpression(q), namedargumentexpression(Q), namedargumentexpression(d), NULL)\"" + "functionName" : "`mask`", + "parameterName" : "`str`" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 80, - "fragment" : "mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', digitChar => 'd')" + "stopIndex" : 112, + "fragment" : "mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbC')" } ] } -- !query -SELECT mask(lowerChar => 'q', upperChar => 'Q', digitChar => 'd', str => 'AbCD123-@$#') +SELECT mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd') -- !query analysis org.apache.spark.sql.AnalysisException { - "errorClass" : "DATATYPE_MISMATCH.NON_FOLDABLE_INPUT", - "sqlState" : "42K09", + "errorClass" : "REQUIRED_PARAMETER_NOT_FOUND", + "sqlState" : "4274K", "messageParameters" : { - "inputExpr" : "\"namedargumentexpression(Q)\"", - "inputName" : "upperChar", - "inputType" : "\"STRING\"", - "sqlExpr" : "\"mask(namedargumentexpression(q), namedargumentexpression(Q), namedargumentexpression(d), namedargumentexpression(AbCD123-@$#), NULL)\"" + "functionName" : "`mask`", + "parameterName" : "`str`" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 87, - "fragment" : "mask(lowerChar => 'q', upperChar => 'Q', digitChar => 'd', str => 'AbCD123-@$#')" + "stopIndex" : 83, + "fragment" : "mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd')" } ] } -- !query -SELECT mask(lowerChar => 'q', 'AbCD123-@$#', upperChar => 'Q', otherChar => 'o', digitChar => 'd') +SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', cellular => 'automata') -- !query analysis org.apache.spark.sql.AnalysisException { - "errorClass" : "DATATYPE_MISMATCH.INPUT_SIZE_NOT_ONE", - "sqlState" : "42K09", + "errorClass" : "UNRECOGNIZED_PARAMETER_NAME", + "sqlState" : "4274K", "messageParameters" : { - "exprName" : "upperChar", - "sqlExpr" : "\"mask(namedargumentexpression(q), AbCD123-@$#, namedargumentexpression(Q), namedargumentexpression(o), namedargumentexpression(d))\"" + "argumentName" : "`cellular`", + "functionName" : "`mask`", + "proposal" : "`str` `upperChar` `otherChar`" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 98, - "fragment" : "mask(lowerChar => 'q', 'AbCD123-@$#', upperChar => 'Q', otherChar => 'o', digitChar => 'd')" + "stopIndex" : 122, + "fragment" : "mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', cellular => 'automata')" + } ] +} + + +-- !query +SELECT encode(str => 'a', charset => 'utf-8') +-- !query analysis +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "NAMED_PARAMETERS_NOT_SUPPORTED", + "sqlState" : "4274K", + "messageParameters" : { + "functionName" : "`encode`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "encode(str => 'a', charset => 'utf-8')" + } ] +} + + +-- !query +SELECT mask('AbCD123-@$#', 'Q', 'q', 'd', 'o', 'k') +-- !query analysis +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION", + "sqlState" : "42605", + "messageParameters" : { + "actualNum" : "6", + "docroot" : "https://spark.apache.org/docs/latest", + "expectedNum" : "[1, 2, 3, 4, 5]", + "functionName" : "`mask`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "mask('AbCD123-@$#', 'Q', 'q', 'd', 'o', 'k')" } ] } diff --git a/sql/core/src/test/resources/sql-tests/inputs/named-function-arguments.sql b/sql/core/src/test/resources/sql-tests/inputs/named-function-arguments.sql index aeb7b1e85cd8c..99f33d7815255 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/named-function-arguments.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/named-function-arguments.sql @@ -1,5 +1,60 @@ +-- Test for named arguments for Mask SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd'); SELECT mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbCD123-@$#'); SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', digitChar => 'd'); SELECT mask(lowerChar => 'q', upperChar => 'Q', digitChar => 'd', str => 'AbCD123-@$#'); + +-- Test for named arguments for CountMinSketchAgg +create temporary view t2 as select * from values + ('val2a', 6S, 12, 14L, float(15), 20D, 20E2, timestamp '2014-04-04 01:01:00.000', date '2014-04-04'), + ('val1b', 10S, 12, 19L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', date '2014-05-04'), + ('val1b', 8S, 16, 119L, float(17), 25D, 26E2, timestamp '2015-05-04 01:01:00.000', date '2015-05-04'), + ('val1c', 12S, 16, 219L, float(17), 25D, 26E2, timestamp '2016-05-04 01:01:00.000', date '2016-05-04'), + ('val1b', null, 16, 319L, float(17), 25D, 26E2, timestamp '2017-05-04 01:01:00.000', null), + ('val2e', 8S, null, 419L, float(17), 25D, 26E2, timestamp '2014-06-04 01:01:00.000', date '2014-06-04'), + ('val1f', 19S, null, 519L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', date '2014-05-04'), + ('val1b', 10S, 12, 19L, float(17), 25D, 26E2, timestamp '2014-06-04 01:01:00.000', date '2014-06-04'), + ('val1b', 8S, 16, 19L, float(17), 25D, 26E2, timestamp '2014-07-04 01:01:00.000', date '2014-07-04'), + ('val1c', 12S, 16, 19L, float(17), 25D, 26E2, timestamp '2014-08-04 01:01:00.000', date '2014-08-05'), + ('val1e', 8S, null, 19L, float(17), 25D, 26E2, timestamp '2014-09-04 01:01:00.000', date '2014-09-04'), + ('val1f', 19S, null, 19L, float(17), 25D, 26E2, timestamp '2014-10-04 01:01:00.000', date '2014-10-04'), + ('val1b', null, 16, 19L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', null) + as t2(t2a, t2b, t2c, t2d, t2e, t2f, t2g, t2h, t2i); + +SELECT hex(count_min_sketch(t2d, seed => 1, epsilon => 0.5d, confidence => 0.5d)) FROM t2; +SELECT hex(count_min_sketch(seed => 1, epsilon => 0.5d, confidence => 0.5d, column => t2d)) FROM t2; +SELECT hex(count_min_sketch(t2d, 0.5d, seed => 1, confidence => 0.5d)) FROM t2; + +-- Test for tabled value functions explode and explode_outer +SELECT * FROM explode(collection => array(1, 2)); +SELECT * FROM explode_outer(collection => map('a', 1, 'b', 2)); +SELECT * FROM explode(array(1, 2)), explode(array(3, 4)); +SELECT * FROM explode(array(1, 2)) AS t, LATERAL explode(array(3 * t.col, 4 * t.col)); +SELECT num, val, 'Spark' FROM explode(map(1, 'a', 2, 'b')) AS t(num, val); + +-- Test for wrapped EXPLODE call to check error preservation +SELECT * FROM explode(collection => explode(array(1))); +SELECT * FROM explode(collection => explode(collection => array(1))); + +-- Test with TABLE parser rule +CREATE OR REPLACE TEMPORARY VIEW v AS SELECT id FROM range(0, 8); +SELECT * FROM explode(collection => TABLE(v)); + +-- Unexpected positional argument SELECT mask(lowerChar => 'q', 'AbCD123-@$#', upperChar => 'Q', otherChar => 'o', digitChar => 'd'); + +-- Duplicate parameter assignment +SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', digitChar => 'e'); +SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbC'); + +-- Required parameter not found +SELECT mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd'); + +-- Unrecognized parameter name +SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', cellular => 'automata'); + +-- Named arguments not supported +SELECT encode(str => 'a', charset => 'utf-8'); + +-- Wrong number of arguments +SELECT mask('AbCD123-@$#', 'Q', 'q', 'd', 'o', 'k'); diff --git a/sql/core/src/test/resources/sql-tests/results/named-function-arguments.sql.out b/sql/core/src/test/resources/sql-tests/results/named-function-arguments.sql.out index 842374542ec6e..3b223cc0e1529 100644 --- a/sql/core/src/test/resources/sql-tests/results/named-function-arguments.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/named-function-arguments.sql.out @@ -2,121 +2,365 @@ -- !query SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd') -- !query schema +struct +-- !query output +QqQQdddoooo + + +-- !query +SELECT mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbCD123-@$#') +-- !query schema +struct +-- !query output +QqQQdddoooo + + +-- !query +SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', digitChar => 'd') +-- !query schema +struct +-- !query output +QqQQddd-@$# + + +-- !query +SELECT mask(lowerChar => 'q', upperChar => 'Q', digitChar => 'd', str => 'AbCD123-@$#') +-- !query schema +struct +-- !query output +QqQQddd-@$# + + +-- !query +create temporary view t2 as select * from values + ('val2a', 6S, 12, 14L, float(15), 20D, 20E2, timestamp '2014-04-04 01:01:00.000', date '2014-04-04'), + ('val1b', 10S, 12, 19L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', date '2014-05-04'), + ('val1b', 8S, 16, 119L, float(17), 25D, 26E2, timestamp '2015-05-04 01:01:00.000', date '2015-05-04'), + ('val1c', 12S, 16, 219L, float(17), 25D, 26E2, timestamp '2016-05-04 01:01:00.000', date '2016-05-04'), + ('val1b', null, 16, 319L, float(17), 25D, 26E2, timestamp '2017-05-04 01:01:00.000', null), + ('val2e', 8S, null, 419L, float(17), 25D, 26E2, timestamp '2014-06-04 01:01:00.000', date '2014-06-04'), + ('val1f', 19S, null, 519L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', date '2014-05-04'), + ('val1b', 10S, 12, 19L, float(17), 25D, 26E2, timestamp '2014-06-04 01:01:00.000', date '2014-06-04'), + ('val1b', 8S, 16, 19L, float(17), 25D, 26E2, timestamp '2014-07-04 01:01:00.000', date '2014-07-04'), + ('val1c', 12S, 16, 19L, float(17), 25D, 26E2, timestamp '2014-08-04 01:01:00.000', date '2014-08-05'), + ('val1e', 8S, null, 19L, float(17), 25D, 26E2, timestamp '2014-09-04 01:01:00.000', date '2014-09-04'), + ('val1f', 19S, null, 19L, float(17), 25D, 26E2, timestamp '2014-10-04 01:01:00.000', date '2014-10-04'), + ('val1b', null, 16, 19L, float(17), 25D, 26E2, timestamp '2014-05-04 01:01:00.000', null) + as t2(t2a, t2b, t2c, t2d, t2e, t2f, t2g, t2h, t2i) +-- !query schema +struct<> +-- !query output + + + +-- !query +SELECT hex(count_min_sketch(t2d, seed => 1, epsilon => 0.5d, confidence => 0.5d)) FROM t2 +-- !query schema +struct +-- !query output +00000001000000000000000D0000000100000004000000005D8D6AB90000000000000002000000000000000700000000000000010000000000000003 + + +-- !query +SELECT hex(count_min_sketch(seed => 1, epsilon => 0.5d, confidence => 0.5d, column => t2d)) FROM t2 +-- !query schema +struct +-- !query output +00000001000000000000000D0000000100000004000000005D8D6AB90000000000000002000000000000000700000000000000010000000000000003 + + +-- !query +SELECT hex(count_min_sketch(t2d, 0.5d, seed => 1, confidence => 0.5d)) FROM t2 +-- !query schema +struct +-- !query output +00000001000000000000000D0000000100000004000000005D8D6AB90000000000000002000000000000000700000000000000010000000000000003 + + +-- !query +SELECT * FROM explode(collection => array(1, 2)) +-- !query schema +struct +-- !query output +1 +2 + + +-- !query +SELECT * FROM explode_outer(collection => map('a', 1, 'b', 2)) +-- !query schema +struct +-- !query output +a 1 +b 2 + + +-- !query +SELECT * FROM explode(array(1, 2)), explode(array(3, 4)) +-- !query schema +struct +-- !query output +1 3 +1 4 +2 3 +2 4 + + +-- !query +SELECT * FROM explode(array(1, 2)) AS t, LATERAL explode(array(3 * t.col, 4 * t.col)) +-- !query schema +struct +-- !query output +1 3 +1 4 +2 6 +2 8 + + +-- !query +SELECT num, val, 'Spark' FROM explode(map(1, 'a', 2, 'b')) AS t(num, val) +-- !query schema +struct +-- !query output +1 a Spark +2 b Spark + + +-- !query +SELECT * FROM explode(collection => explode(array(1))) +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "UNSUPPORTED_GENERATOR.NESTED_IN_EXPRESSIONS", + "sqlState" : "0A000", + "messageParameters" : { + "expression" : "\"explode(explode(array(1)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 54, + "fragment" : "explode(collection => explode(array(1)))" + } ] +} + + +-- !query +SELECT * FROM explode(collection => explode(collection => array(1))) +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "UNSUPPORTED_GENERATOR.NESTED_IN_EXPRESSIONS", + "sqlState" : "0A000", + "messageParameters" : { + "expression" : "\"explode(explode(array(1)))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 68, + "fragment" : "explode(collection => explode(collection => array(1)))" + } ] +} + + +-- !query +CREATE OR REPLACE TEMPORARY VIEW v AS SELECT id FROM range(0, 8) +-- !query schema +struct<> +-- !query output + + + +-- !query +SELECT * FROM explode(collection => TABLE(v)) +-- !query schema struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "DATATYPE_MISMATCH.NON_FOLDABLE_INPUT", + "errorClass" : "DATATYPE_MISMATCH.UNEXPECTED_INPUT_TYPE", "sqlState" : "42K09", "messageParameters" : { - "inputExpr" : "\"namedargumentexpression(q)\"", - "inputName" : "upperChar", - "inputType" : "\"STRING\"", - "sqlExpr" : "\"mask(AbCD123-@$#, namedargumentexpression(q), namedargumentexpression(Q), namedargumentexpression(o), namedargumentexpression(d))\"" + "inputSql" : "\"outer(__auto_generated_subquery_name_0.c)\"", + "inputType" : "\"STRUCT\"", + "paramIndex" : "1", + "requiredType" : "(\"ARRAY\" or \"MAP\")", + "sqlExpr" : "\"explode(outer(__auto_generated_subquery_name_0.c))\"" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 15, + "stopIndex" : 45, + "fragment" : "explode(collection => TABLE(v))" + } ] +} + + +-- !query +SELECT mask(lowerChar => 'q', 'AbCD123-@$#', upperChar => 'Q', otherChar => 'o', digitChar => 'd') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "UNEXPECTED_POSITIONAL_ARGUMENT", + "sqlState" : "4274K", + "messageParameters" : { + "functionName" : "`mask`" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, "stopIndex" : 98, - "fragment" : "mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd')" + "fragment" : "mask(lowerChar => 'q', 'AbCD123-@$#', upperChar => 'Q', otherChar => 'o', digitChar => 'd')" } ] } -- !query -SELECT mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbCD123-@$#') +SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', digitChar => 'e') -- !query schema struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "DATATYPE_MISMATCH.NON_FOLDABLE_INPUT", - "sqlState" : "42K09", + "errorClass" : "DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT.DOUBLE_NAMED_ARGUMENT_REFERENCE", + "sqlState" : "4274K", "messageParameters" : { - "inputExpr" : "\"namedargumentexpression(Q)\"", - "inputName" : "upperChar", - "inputType" : "\"STRING\"", - "sqlExpr" : "\"mask(namedargumentexpression(q), namedargumentexpression(Q), namedargumentexpression(o), namedargumentexpression(d), namedargumentexpression(AbCD123-@$#))\"" + "functionName" : "`mask`", + "parameterName" : "`digitChar`" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 105, - "fragment" : "mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbCD123-@$#')" + "stopIndex" : 116, + "fragment" : "mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', digitChar => 'e')" } ] } -- !query -SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', digitChar => 'd') +SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbC') -- !query schema struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "DATATYPE_MISMATCH.NON_FOLDABLE_INPUT", - "sqlState" : "42K09", + "errorClass" : "DUPLICATE_ROUTINE_PARAMETER_ASSIGNMENT.BOTH_POSITIONAL_AND_NAMED", + "sqlState" : "4274K", "messageParameters" : { - "inputExpr" : "\"namedargumentexpression(q)\"", - "inputName" : "upperChar", - "inputType" : "\"STRING\"", - "sqlExpr" : "\"mask(AbCD123-@$#, namedargumentexpression(q), namedargumentexpression(Q), namedargumentexpression(d), NULL)\"" + "functionName" : "`mask`", + "parameterName" : "`str`" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 80, - "fragment" : "mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', digitChar => 'd')" + "stopIndex" : 112, + "fragment" : "mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', str => 'AbC')" } ] } -- !query -SELECT mask(lowerChar => 'q', upperChar => 'Q', digitChar => 'd', str => 'AbCD123-@$#') +SELECT mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd') -- !query schema struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "DATATYPE_MISMATCH.NON_FOLDABLE_INPUT", - "sqlState" : "42K09", + "errorClass" : "REQUIRED_PARAMETER_NOT_FOUND", + "sqlState" : "4274K", "messageParameters" : { - "inputExpr" : "\"namedargumentexpression(Q)\"", - "inputName" : "upperChar", - "inputType" : "\"STRING\"", - "sqlExpr" : "\"mask(namedargumentexpression(q), namedargumentexpression(Q), namedargumentexpression(d), namedargumentexpression(AbCD123-@$#), NULL)\"" + "functionName" : "`mask`", + "parameterName" : "`str`" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 87, - "fragment" : "mask(lowerChar => 'q', upperChar => 'Q', digitChar => 'd', str => 'AbCD123-@$#')" + "stopIndex" : 83, + "fragment" : "mask(lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd')" } ] } -- !query -SELECT mask(lowerChar => 'q', 'AbCD123-@$#', upperChar => 'Q', otherChar => 'o', digitChar => 'd') +SELECT mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', cellular => 'automata') -- !query schema struct<> -- !query output org.apache.spark.sql.AnalysisException { - "errorClass" : "DATATYPE_MISMATCH.INPUT_SIZE_NOT_ONE", - "sqlState" : "42K09", + "errorClass" : "UNRECOGNIZED_PARAMETER_NAME", + "sqlState" : "4274K", "messageParameters" : { - "exprName" : "upperChar", - "sqlExpr" : "\"mask(namedargumentexpression(q), AbCD123-@$#, namedargumentexpression(Q), namedargumentexpression(o), namedargumentexpression(d))\"" + "argumentName" : "`cellular`", + "functionName" : "`mask`", + "proposal" : "`str` `upperChar` `otherChar`" }, "queryContext" : [ { "objectType" : "", "objectName" : "", "startIndex" : 8, - "stopIndex" : 98, - "fragment" : "mask(lowerChar => 'q', 'AbCD123-@$#', upperChar => 'Q', otherChar => 'o', digitChar => 'd')" + "stopIndex" : 122, + "fragment" : "mask('AbCD123-@$#', lowerChar => 'q', upperChar => 'Q', otherChar => 'o', digitChar => 'd', cellular => 'automata')" + } ] +} + + +-- !query +SELECT encode(str => 'a', charset => 'utf-8') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "NAMED_PARAMETERS_NOT_SUPPORTED", + "sqlState" : "4274K", + "messageParameters" : { + "functionName" : "`encode`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 45, + "fragment" : "encode(str => 'a', charset => 'utf-8')" + } ] +} + + +-- !query +SELECT mask('AbCD123-@$#', 'Q', 'q', 'd', 'o', 'k') +-- !query schema +struct<> +-- !query output +org.apache.spark.sql.AnalysisException +{ + "errorClass" : "WRONG_NUM_ARGS.WITHOUT_SUGGESTION", + "sqlState" : "42605", + "messageParameters" : { + "actualNum" : "6", + "docroot" : "https://spark.apache.org/docs/latest", + "expectedNum" : "[1, 2, 3, 4, 5]", + "functionName" : "`mask`" + }, + "queryContext" : [ { + "objectType" : "", + "objectName" : "", + "startIndex" : 8, + "stopIndex" : 51, + "fragment" : "mask('AbCD123-@$#', 'Q', 'q', 'd', 'o', 'k')" } ] } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala index 2731760f7ef05..7ebb677b12158 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala @@ -32,16 +32,11 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession with SQL intercept[ParseException](sql(sqlText).collect()) } - test("NAMED_ARGUMENTS_SUPPORT_DISABLED: named arguments not turned on") { + test("NAMED_PARAMETER_SUPPORT_DISABLED: named arguments not turned on") { withSQLConf("spark.sql.allowNamedFunctionArguments" -> "false") { - checkError( - exception = parseException("SELECT * FROM encode(value => 'abc', charset => 'utf-8')"), - errorClass = "NAMED_ARGUMENTS_SUPPORT_DISABLED", - parameters = Map("functionName" -> toSQLId("encode"), "argument" -> toSQLId("value")) - ) checkError( exception = parseException("SELECT explode(arr => array(10, 20))"), - errorClass = "NAMED_ARGUMENTS_SUPPORT_DISABLED", + errorClass = "NAMED_PARAMETER_SUPPORT_DISABLED", parameters = Map("functionName"-> toSQLId("explode"), "argument" -> toSQLId("arr")) ) } From efa69e0c293423249c9861133f54a673245ecbae Mon Sep 17 00:00:00 2001 From: Richard Yu Date: Sat, 15 Jul 2023 16:32:10 -0700 Subject: [PATCH 2/5] Some weird files shouldn't be in here --- .../resources/error/error-classes.json.orig | 6036 ----------------- docs/sql-error-conditions.md.orig | 2020 ------ 2 files changed, 8056 deletions(-) delete mode 100644 common/utils/src/main/resources/error/error-classes.json.orig delete mode 100644 docs/sql-error-conditions.md.orig diff --git a/common/utils/src/main/resources/error/error-classes.json.orig b/common/utils/src/main/resources/error/error-classes.json.orig deleted file mode 100644 index e8cdaa6c63b3f..0000000000000 --- a/common/utils/src/main/resources/error/error-classes.json.orig +++ /dev/null @@ -1,6036 +0,0 @@ -{ - "AGGREGATE_FUNCTION_WITH_NONDETERMINISTIC_EXPRESSION" : { - "message" : [ - "Non-deterministic expression should not appear in the arguments of an aggregate function." - ] - }, - "ALL_PARTITION_COLUMNS_NOT_ALLOWED" : { - "message" : [ - "Cannot use all columns for partition columns." - ] - }, - "ALTER_TABLE_COLUMN_DESCRIPTOR_DUPLICATE" : { - "message" : [ - "ALTER TABLE column specifies descriptor \"\" more than once, which is invalid." - ], - "sqlState" : "42710" - }, - "AMBIGUOUS_ALIAS_IN_NESTED_CTE" : { - "message" : [ - "Name is ambiguous in nested CTE.", - "Please set to \"CORRECTED\" so that name defined in inner CTE takes precedence. If set it to \"LEGACY\", outer CTE definitions will take precedence.", - "See '/sql-migration-guide.html#query-engine'." - ] - }, - "AMBIGUOUS_COLUMN_OR_FIELD" : { - "message" : [ - "Column or field is ambiguous and has matches." - ], - "sqlState" : "42702" - }, - "AMBIGUOUS_LATERAL_COLUMN_ALIAS" : { - "message" : [ - "Lateral column alias is ambiguous and has matches." - ], - "sqlState" : "42702" - }, - "AMBIGUOUS_REFERENCE" : { - "message" : [ - "Reference is ambiguous, could be: ." - ], - "sqlState" : "42704" - }, - "AMBIGUOUS_REFERENCE_TO_FIELDS" : { - "message" : [ - "Ambiguous reference to the field . It appears times in the schema." - ], - "sqlState" : "42000" - }, - "ARITHMETIC_OVERFLOW" : { - "message" : [ - ". If necessary set to \"false\" to bypass this error." - ], - "sqlState" : "22003" - }, - "AS_OF_JOIN" : { - "message" : [ - "Invalid as-of join." - ], - "subClass" : { - "TOLERANCE_IS_NON_NEGATIVE" : { - "message" : [ - "The input argument `tolerance` must be non-negative." - ] - }, - "TOLERANCE_IS_UNFOLDABLE" : { - "message" : [ - "The input argument `tolerance` must be a constant." - ] - } - } - }, - "AVRO_INCORRECT_TYPE" : { - "message" : [ - "Cannot convert Avro to SQL because the original encoded data type is , however you're trying to read the field as , which would lead to an incorrect answer. To allow reading this field, enable the SQL configuration: ." - ] - }, - "AVRO_LOWER_PRECISION" : { - "message" : [ - "Cannot convert Avro to SQL because the original encoded data type is , however you're trying to read the field as , which leads to data being read as null. Please provide a wider decimal type to get the correct result. To allow reading null to this field, enable the SQL configuration: ." - ] - }, - "BATCH_METADATA_NOT_FOUND" : { - "message" : [ - "Unable to find batch ." - ], - "sqlState" : "42K03" - }, - "BINARY_ARITHMETIC_OVERFLOW" : { - "message" : [ - " caused overflow." - ], - "sqlState" : "22003" - }, - "CALL_ON_STREAMING_DATASET_UNSUPPORTED" : { - "message" : [ - "The method can not be called on streaming Dataset/DataFrame." - ] - }, - "CANNOT_CAST_DATATYPE" : { - "message" : [ - "Cannot cast to ." - ], - "sqlState" : "42846" - }, - "CANNOT_CONVERT_PROTOBUF_FIELD_TYPE_TO_SQL_TYPE" : { - "message" : [ - "Cannot convert Protobuf to SQL because schema is incompatible (protobufType = , sqlType = )." - ] - }, - "CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE" : { - "message" : [ - "Unable to convert of Protobuf to SQL type ." - ] - }, - "CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_FIELD_TYPE" : { - "message" : [ - "Cannot convert SQL to Protobuf because schema is incompatible (protobufType = , sqlType = )." - ] - }, - "CANNOT_CONVERT_SQL_VALUE_TO_PROTOBUF_ENUM_TYPE" : { - "message" : [ - "Cannot convert SQL to Protobuf because is not in defined values for enum: ." - ] - }, - "CANNOT_DECODE_URL" : { - "message" : [ - "The provided URL cannot be decoded: . Please ensure that the URL is properly formatted and try again." - ], - "sqlState" : "22546" - }, - "CANNOT_INVOKE_IN_TRANSFORMATIONS" : { - "message" : [ - "Dataset transformations and actions can only be invoked by the driver, not inside of other Dataset transformations; for example, dataset1.map(x => dataset2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the dataset1.map transformation. For more information, see SPARK-28702." - ] - }, - "CANNOT_LOAD_FUNCTION_CLASS" : { - "message" : [ - "Cannot load class when registering the function , please make sure it is on the classpath." - ] - }, - "CANNOT_LOAD_PROTOBUF_CLASS" : { - "message" : [ - "Could not load Protobuf class with name . ." - ] - }, - "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE" : { - "message" : [ - "Failed to merge incompatible data types and . Please check the data types of the columns being merged and ensure that they are compatible. If necessary, consider casting the columns to compatible data types before attempting the merge." - ], - "sqlState" : "42825" - }, - "CANNOT_MERGE_SCHEMAS" : { - "message" : [ - "Failed merging schemas:", - "Initial schema:", - "", - "Schema that cannot be merged with the initial schema:", - "." - ], - "sqlState" : "42KD9" - }, - "CANNOT_MODIFY_CONFIG" : { - "message" : [ - "Cannot modify the value of the Spark config: .", - "See also '/sql-migration-guide.html#ddl-statements'." - ], - "sqlState" : "46110" - }, - "CANNOT_PARSE_DECIMAL" : { - "message" : [ - "Cannot parse decimal. Please ensure that the input is a valid number with optional decimal point or comma separators." - ], - "sqlState" : "22018" - }, - "CANNOT_PARSE_INTERVAL" : { - "message" : [ - "Unable to parse . Please ensure that the value provided is in a valid format for defining an interval. You can reference the documentation for the correct format. If the issue persists, please double check that the input value is not null or empty and try again." - ] - }, - "CANNOT_PARSE_JSON_FIELD" : { - "message" : [ - "Cannot parse the field name and the value of the JSON token type to target Spark data type ." - ], - "sqlState" : "2203G" - }, - "CANNOT_PARSE_PROTOBUF_DESCRIPTOR" : { - "message" : [ - "Error parsing descriptor bytes into Protobuf FileDescriptorSet." - ] - }, - "CANNOT_PARSE_TIMESTAMP" : { - "message" : [ - ". If necessary set to \"false\" to bypass this error." - ], - "sqlState" : "22007" - }, - "CANNOT_READ_FILE_FOOTER" : { - "message" : [ - "Could not read footer for file: . Please ensure that the file is in either ORC or Parquet format. If not, please convert it to a valid format. If the file is in the valid format, please check if it is corrupt. If it is, you can choose to either ignore it or fix the corruption." - ] - }, - "CANNOT_RECOGNIZE_HIVE_TYPE" : { - "message" : [ - "Cannot recognize hive type string: , column: . The specified data type for the field cannot be recognized by Spark SQL. Please check the data type of the specified field and ensure that it is a valid Spark SQL data type. Refer to the Spark SQL documentation for a list of valid data types and their format. If the data type is correct, please ensure that you are using a supported version of Spark SQL." - ], - "sqlState" : "429BB" - }, - "CANNOT_RENAME_ACROSS_SCHEMA" : { - "message" : [ - "Renaming a across schemas is not allowed." - ], - "sqlState" : "0AKD0" - }, - "CANNOT_RESOLVE_STAR_EXPAND" : { - "message" : [ - "Cannot resolve .* given input columns . Please check that the specified table or struct exists and is accessible in the input columns." - ] - }, - "CANNOT_RESTORE_PERMISSIONS_FOR_PATH" : { - "message" : [ - "Failed to set permissions on created path back to ." - ] - }, - "CANNOT_UPDATE_FIELD" : { - "message" : [ - "Cannot update
field type:" - ], - "subClass" : { - "ARRAY_TYPE" : { - "message" : [ - "Update the element by updating .element." - ] - }, - "INTERVAL_TYPE" : { - "message" : [ - "Update an interval by updating its fields." - ] - }, - "MAP_TYPE" : { - "message" : [ - "Update a map by updating .key or .value." - ] - }, - "STRUCT_TYPE" : { - "message" : [ - "Update a struct by updating its fields." - ] - }, - "USER_DEFINED_TYPE" : { - "message" : [ - "Update a UserDefinedType[] by updating its fields." - ] - } - } - }, - "CANNOT_UP_CAST_DATATYPE" : { - "message" : [ - "Cannot up cast from to .", - "
" - ] - }, - "CAST_INVALID_INPUT" : { - "message" : [ - "The value of the type cannot be cast to because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set to \"false\" to bypass this error." - ], - "sqlState" : "22018" - }, - "CAST_OVERFLOW" : { - "message" : [ - "The value of the type cannot be cast to due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set to \"false\" to bypass this error." - ], - "sqlState" : "22003" - }, - "CAST_OVERFLOW_IN_TABLE_INSERT" : { - "message" : [ - "Fail to insert a value of type into the type column due to an overflow. Use `try_cast` on the input value to tolerate overflow and return NULL instead." - ], - "sqlState" : "22003" - }, - "CODEC_NOT_AVAILABLE" : { - "message" : [ - "The codec is not available. Consider to set the config to ." - ] - }, - "CODEC_SHORT_NAME_NOT_FOUND" : { - "message" : [ - "Cannot find a short name for the codec ." - ] - }, - "COLUMN_ALIASES_IS_NOT_ALLOWED" : { - "message" : [ - "Columns aliases are not allowed in ." - ] - }, - "COLUMN_ALREADY_EXISTS" : { - "message" : [ - "The column already exists. Consider to choose another name or rename the existing column." - ], - "sqlState" : "42711" - }, - "COLUMN_NOT_DEFINED_IN_TABLE" : { - "message" : [ - " column is not defined in table , defined table columns are: ." - ] - }, - "COLUMN_NOT_FOUND" : { - "message" : [ - "The column cannot be found. Verify the spelling and correctness of the column name according to the SQL config ." - ], - "sqlState" : "42703" - }, - "COMPARATOR_RETURNS_NULL" : { - "message" : [ - "The comparator has returned a NULL for a comparison between and . It should return a positive integer for \"greater than\", 0 for \"equal\" and a negative integer for \"less than\". To revert to deprecated behavior where NULL is treated as 0 (equal), you must set \"spark.sql.legacy.allowNullComparisonResultInArraySort\" to \"true\"." - ] - }, - "CONCURRENT_QUERY" : { - "message" : [ - "Another instance of this query was just started by a concurrent session." - ] - }, - "CONCURRENT_STREAM_LOG_UPDATE" : { - "message" : [ - "Concurrent update to the log. Multiple streaming jobs detected for .", - "Please make sure only one streaming job runs on a specific checkpoint location at a time." - ], - "sqlState" : "40000" - }, - "CONNECT" : { - "message" : [ - "Generic Spark Connect error." - ], - "subClass" : { - "INTERCEPTOR_CTOR_MISSING" : { - "message" : [ - "Cannot instantiate GRPC interceptor because is missing a default constructor without arguments." - ] - }, - "INTERCEPTOR_RUNTIME_ERROR" : { - "message" : [ - "Error instantiating GRPC interceptor: " - ] - }, - "PLUGIN_CTOR_MISSING" : { - "message" : [ - "Cannot instantiate Spark Connect plugin because is missing a default constructor without arguments." - ] - }, - "PLUGIN_RUNTIME_ERROR" : { - "message" : [ - "Error instantiating Spark Connect plugin: " - ] - } - } - }, - "CONVERSION_INVALID_INPUT" : { - "message" : [ - "The value () cannot be converted to because it is malformed. Correct the value as per the syntax, or change its format. Use to tolerate malformed input and return NULL instead." - ], - "sqlState" : "22018" - }, - "CREATE_PERMANENT_VIEW_WITHOUT_ALIAS" : { - "message" : [ - "Not allowed to create the permanent view without explicitly assigning an alias for the expression ." - ] - }, - "CREATE_TABLE_COLUMN_DESCRIPTOR_DUPLICATE" : { - "message" : [ - "CREATE TABLE column specifies descriptor \"\" more than once, which is invalid." - ], - "sqlState" : "42710" - }, - "CREATE_VIEW_COLUMN_ARITY_MISMATCH" : { - "message" : [ - "Cannot create view , the reason is" - ], - "subClass" : { - "NOT_ENOUGH_DATA_COLUMNS" : { - "message" : [ - "not enough data columns:", - "View columns: .", - "Data columns: ." - ] - }, - "TOO_MANY_DATA_COLUMNS" : { - "message" : [ - "too many data columns:", - "View columns: .", - "Data columns: ." - ] - } - }, - "sqlState" : "21S01" - }, - "DATATYPE_MISMATCH" : { - "message" : [ - "Cannot resolve due to data type mismatch:" - ], - "subClass" : { - "ARRAY_FUNCTION_DIFF_TYPES" : { - "message" : [ - "Input to should have been followed by a value with same element type, but it's [, ]." - ] - }, - "BINARY_ARRAY_DIFF_TYPES" : { - "message" : [ - "Input to function should have been two with same element type, but it's [, ]." - ] - }, - "BINARY_OP_DIFF_TYPES" : { - "message" : [ - "the left and right operands of the binary operator have incompatible types ( and )." - ] - }, - "BINARY_OP_WRONG_TYPE" : { - "message" : [ - "the binary operator requires the input type , not ." - ] - }, - "BLOOM_FILTER_BINARY_OP_WRONG_TYPE" : { - "message" : [ - "The Bloom filter binary input to should be either a constant value or a scalar subquery expression, but it's ." - ] - }, - "BLOOM_FILTER_WRONG_TYPE" : { - "message" : [ - "Input to function should have been followed by value with , but it's []." - ] - }, - "CANNOT_CONVERT_TO_JSON" : { - "message" : [ - "Unable to convert column of type to JSON." - ] - }, - "CANNOT_DROP_ALL_FIELDS" : { - "message" : [ - "Cannot drop all fields in struct." - ] - }, - "CAST_WITHOUT_SUGGESTION" : { - "message" : [ - "cannot cast to ." - ] - }, - "CAST_WITH_CONF_SUGGESTION" : { - "message" : [ - "cannot cast to with ANSI mode on.", - "If you have to cast to , you can set as ." - ] - }, - "CAST_WITH_FUNC_SUGGESTION" : { - "message" : [ - "cannot cast to .", - "To convert values from to , you can use the functions instead." - ] - }, - "CREATE_MAP_KEY_DIFF_TYPES" : { - "message" : [ - "The given keys of function should all be the same type, but they are ." - ] - }, - "CREATE_MAP_VALUE_DIFF_TYPES" : { - "message" : [ - "The given values of function should all be the same type, but they are ." - ] - }, - "CREATE_NAMED_STRUCT_WITHOUT_FOLDABLE_STRING" : { - "message" : [ - "Only foldable `STRING` expressions are allowed to appear at odd position, but they are ." - ] - }, - "DATA_DIFF_TYPES" : { - "message" : [ - "Input to should all be the same type, but it's ." - ] - }, - "FILTER_NOT_BOOLEAN" : { - "message" : [ - "Filter expression of type is not a boolean." - ] - }, - "HASH_MAP_TYPE" : { - "message" : [ - "Input to the function cannot contain elements of the \"MAP\" type. In Spark, same maps may have different hashcode, thus hash expressions are prohibited on \"MAP\" elements. To restore previous behavior set \"spark.sql.legacy.allowHashOnMapType\" to \"true\"." - ] - }, - "INPUT_SIZE_NOT_ONE" : { - "message" : [ - "Length of should be 1." - ] - }, - "INVALID_ARG_VALUE" : { - "message" : [ - "The value must to be a literal of , but got ." - ] - }, - "INVALID_JSON_MAP_KEY_TYPE" : { - "message" : [ - "Input schema can only contain STRING as a key type for a MAP." - ] - }, - "INVALID_JSON_SCHEMA" : { - "message" : [ - "Input schema must be a struct, an array or a map." - ] - }, - "INVALID_MAP_KEY_TYPE" : { - "message" : [ - "The key of map cannot be/contain ." - ] - }, - "INVALID_ORDERING_TYPE" : { - "message" : [ - "The does not support ordering on type ." - ] - }, - "INVALID_ROW_LEVEL_OPERATION_ASSIGNMENTS" : { - "message" : [ - "" - ] - }, - "IN_SUBQUERY_DATA_TYPE_MISMATCH" : { - "message" : [ - "The data type of one or more elements in the left hand side of an IN subquery is not compatible with the data type of the output of the subquery. Mismatched columns: [], left side: [], right side: []." - ] - }, - "IN_SUBQUERY_LENGTH_MISMATCH" : { - "message" : [ - "The number of columns in the left hand side of an IN subquery does not match the number of columns in the output of subquery. Left hand side columns(length: ): [], right hand side columns(length: ): []." - ] - }, - "MAP_CONCAT_DIFF_TYPES" : { - "message" : [ - "The should all be of type map, but it's ." - ] - }, - "MAP_FUNCTION_DIFF_TYPES" : { - "message" : [ - "Input to should have been followed by a value with same key type, but it's [, ]." - ] - }, - "MAP_ZIP_WITH_DIFF_TYPES" : { - "message" : [ - "Input to the should have been two maps with compatible key types, but it's [, ]." - ] - }, - "NON_FOLDABLE_INPUT" : { - "message" : [ - "the input should be a foldable expression; however, got ." - ] - }, - "NON_STRING_TYPE" : { - "message" : [ - "all arguments must be strings." - ] - }, - "NULL_TYPE" : { - "message" : [ - "Null typed values cannot be used as arguments of ." - ] - }, - "PARAMETER_CONSTRAINT_VIOLATION" : { - "message" : [ - "The () must be the ()." - ] - }, - "RANGE_FRAME_INVALID_TYPE" : { - "message" : [ - "The data type used in the order specification does not match the data type which is used in the range frame." - ] - }, - "RANGE_FRAME_MULTI_ORDER" : { - "message" : [ - "A range window frame with value boundaries cannot be used in a window specification with multiple order by expressions: ." - ] - }, - "RANGE_FRAME_WITHOUT_ORDER" : { - "message" : [ - "A range window frame cannot be used in an unordered window specification." - ] - }, - "SEQUENCE_WRONG_INPUT_TYPES" : { - "message" : [ - " uses the wrong parameter type. The parameter type must conform to:", - "1. The start and stop expressions must resolve to the same type.", - "2. If start and stop expressions resolve to the type, then the step expression must resolve to the type.", - "3. Otherwise, if start and stop expressions resolve to the type, then the step expression must resolve to the same type." - ] - }, - "SPECIFIED_WINDOW_FRAME_DIFF_TYPES" : { - "message" : [ - "Window frame bounds and do not have the same type: <> ." - ] - }, - "SPECIFIED_WINDOW_FRAME_INVALID_BOUND" : { - "message" : [ - "Window frame upper bound does not follow the lower bound ." - ] - }, - "SPECIFIED_WINDOW_FRAME_UNACCEPTED_TYPE" : { - "message" : [ - "The data type of the bound does not match the expected data type ." - ] - }, - "SPECIFIED_WINDOW_FRAME_WITHOUT_FOLDABLE" : { - "message" : [ - "Window frame bound is not a literal." - ] - }, - "SPECIFIED_WINDOW_FRAME_WRONG_COMPARISON" : { - "message" : [ - "The lower bound of a window frame must be to the upper bound." - ] - }, - "STACK_COLUMN_DIFF_TYPES" : { - "message" : [ - "The data type of the column () do not have the same type: () <> ()." - ] - }, - "TYPE_CHECK_FAILURE_WITH_HINT" : { - "message" : [ - "." - ] - }, - "UNEXPECTED_CLASS_TYPE" : { - "message" : [ - "class not found." - ] - }, - "UNEXPECTED_INPUT_TYPE" : { - "message" : [ - "Parameter requires the type, however has the type ." - ] - }, - "UNEXPECTED_NULL" : { - "message" : [ - "The must not be null." - ] - }, - "UNEXPECTED_RETURN_TYPE" : { - "message" : [ - "The requires return type, but the actual is type." - ] - }, - "UNEXPECTED_STATIC_METHOD" : { - "message" : [ - "cannot find a static method that matches the argument types in ." - ] - }, - "UNSUPPORTED_INPUT_TYPE" : { - "message" : [ - "The input of can't be type data." - ] - }, - "VALUE_OUT_OF_RANGE" : { - "message" : [ - "The must be between (current value = )." - ] - }, - "WRONG_NUM_ARG_TYPES" : { - "message" : [ - "The expression requires argument types but the actual number is ." - ] - }, - "WRONG_NUM_ENDPOINTS" : { - "message" : [ - "The number of endpoints must be >= 2 to construct intervals but the actual number is ." - ] - } - }, - "sqlState" : "42K09" - }, - "DATATYPE_MISSING_SIZE" : { - "message" : [ - "DataType requires a length parameter, for example (10). Please specify the length." - ], - "sqlState" : "42K01" - }, - "DATA_SOURCE_NOT_FOUND" : { - "message" : [ - "Failed to find the data source: . Please find packages at `https://spark.apache.org/third-party-projects.html`." - ], - "sqlState" : "42K02" - }, - "DATETIME_OVERFLOW" : { - "message" : [ - "Datetime operation overflow: ." - ], - "sqlState" : "22008" - }, - "DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION" : { - "message" : [ - "Decimal precision exceeds max precision ." - ], - "sqlState" : "22003" - }, - "DEFAULT_DATABASE_NOT_EXISTS" : { - "message" : [ - "Default database does not exist, please create it first or change default database to ``." - ], - "sqlState" : "42704" - }, - "DISTINCT_WINDOW_FUNCTION_UNSUPPORTED" : { - "message" : [ - "Distinct window functions are not supported: ." - ] - }, - "DIVIDE_BY_ZERO" : { - "message" : [ - "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set to \"false\" to bypass this error." - ], - "sqlState" : "22012" - }, - "DUPLICATED_FIELD_NAME_IN_ARROW_STRUCT" : { - "message" : [ - "Duplicated field names in Arrow Struct are not allowed, got ." - ] - }, - "DUPLICATED_MAP_KEY" : { - "message" : [ - "Duplicate map key was found, please check the input data. If you want to remove the duplicated keys, you can set to \"LAST_WIN\" so that the key inserted at last takes precedence." - ], - "sqlState" : "23505" - }, - "DUPLICATED_METRICS_NAME" : { - "message" : [ - "The metric name is not unique: . The same name cannot be used for metrics with different results. However multiple instances of metrics with with same result and name are allowed (e.g. self-joins)." - ] - }, - "DUPLICATE_CLAUSES" : { - "message" : [ - "Found duplicate clauses: . Please, remove one of them." - ] - }, - "DUPLICATE_KEY" : { - "message" : [ - "Found duplicate keys ." - ], - "sqlState" : "23505" - }, - "EMPTY_JSON_FIELD_VALUE" : { - "message" : [ - "Failed to parse an empty string for data type ." - ], - "sqlState" : "42604" - }, - "ENCODER_NOT_FOUND" : { - "message" : [ - "Not found an encoder of the type to Spark SQL internal representation. Consider to change the input type to one of supported at '/sql-ref-datatypes.html'." - ] - }, - "EVENT_TIME_IS_NOT_ON_TIMESTAMP_TYPE" : { - "message" : [ - "The event time has the invalid type , but expected \"TIMESTAMP\"." - ] - }, - "EXCEED_LIMIT_LENGTH" : { - "message" : [ - "Exceeds char/varchar type length limitation: ." - ] - }, - "EXPRESSION_TYPE_IS_NOT_ORDERABLE" : { - "message" : [ - "Column expression cannot be sorted because its type is not orderable." - ] - }, - "FAILED_EXECUTE_UDF" : { - "message" : [ - "Failed to execute user defined function (: () => )." - ], - "sqlState" : "39000" - }, - "FAILED_FUNCTION_CALL" : { - "message" : [ - "Failed preparing of the function for call. Please, double check function's arguments." - ], - "sqlState" : "38000" - }, - "FAILED_PARSE_STRUCT_TYPE" : { - "message" : [ - "Failed parsing struct: ." - ], - "sqlState" : "22018" - }, - "FAILED_RENAME_PATH" : { - "message" : [ - "Failed to rename to as destination already exists." - ], - "sqlState" : "42K04" - }, - "FAILED_RENAME_TEMP_FILE" : { - "message" : [ - "Failed to rename temp file to as FileSystem.rename returned false." - ] - }, - "FIELDS_ALREADY_EXISTS" : { - "message" : [ - "Cannot column, because already exists in ." - ] - }, - "FIELD_NOT_FOUND" : { - "message" : [ - "No such struct field in ." - ], - "sqlState" : "42704" - }, - "FORBIDDEN_OPERATION" : { - "message" : [ - "The operation is not allowed on the : ." - ], - "sqlState" : "42809" - }, - "GENERATED_COLUMN_WITH_DEFAULT_VALUE" : { - "message" : [ - "A column cannot have both a default value and a generation expression but column has default value: () and generation expression: ()." - ] - }, - "GRAPHITE_SINK_INVALID_PROTOCOL" : { - "message" : [ - "Invalid Graphite protocol: ." - ] - }, - "GRAPHITE_SINK_PROPERTY_MISSING" : { - "message" : [ - "Graphite sink requires '' property." - ] - }, - "GROUPING_COLUMN_MISMATCH" : { - "message" : [ - "Column of grouping () can't be found in grouping columns ." - ], - "sqlState" : "42803" - }, - "GROUPING_ID_COLUMN_MISMATCH" : { - "message" : [ - "Columns of grouping_id () does not match grouping columns ()." - ], - "sqlState" : "42803" - }, - "GROUPING_SIZE_LIMIT_EXCEEDED" : { - "message" : [ - "Grouping sets size cannot be greater than ." - ], - "sqlState" : "54000" - }, - "GROUP_BY_AGGREGATE" : { - "message" : [ - "Aggregate functions are not allowed in GROUP BY, but found ." - ], - "sqlState" : "42903" - }, - "GROUP_BY_POS_AGGREGATE" : { - "message" : [ - "GROUP BY refers to an expression that contains an aggregate function. Aggregate functions are not allowed in GROUP BY." - ], - "sqlState" : "42903" - }, - "GROUP_BY_POS_OUT_OF_RANGE" : { - "message" : [ - "GROUP BY position is not in select list (valid range is [1, ])." - ], - "sqlState" : "42805" - }, - "GROUP_EXPRESSION_TYPE_IS_NOT_ORDERABLE" : { - "message" : [ - "The expression cannot be used as a grouping expression because its data type is not an orderable data type." - ] - }, - "HLL_INVALID_INPUT_SKETCH_BUFFER" : { - "message" : [ - "Invalid call to ; only valid HLL sketch buffers are supported as inputs (such as those produced by the `hll_sketch_agg` function)." - ] - }, - "HLL_INVALID_LG_K" : { - "message" : [ - "Invalid call to ; the `lgConfigK` value must be between and , inclusive: ." - ] - }, - "HLL_UNION_DIFFERENT_LG_K" : { - "message" : [ - "Sketches have different `lgConfigK` values: and . Set the `allowDifferentLgConfigK` parameter to true to call with different `lgConfigK` values." - ] - }, - "IDENTIFIER_TOO_MANY_NAME_PARTS" : { - "message" : [ - " is not a valid identifier as it has more than 2 name parts." - ], - "sqlState" : "42601" - }, - "INCOMPARABLE_PIVOT_COLUMN" : { - "message" : [ - "Invalid pivot column . Pivot columns must be comparable." - ], - "sqlState" : "42818" - }, - "INCOMPATIBLE_COLUMN_TYPE" : { - "message" : [ - " can only be performed on tables with compatible column types. The column of the table is type which is not compatible with at the same column of the first table.." - ], - "sqlState" : "42825" - }, - "INCOMPATIBLE_DATASOURCE_REGISTER" : { - "message" : [ - "Detected an incompatible DataSourceRegister. Please remove the incompatible library from classpath or upgrade it. Error: " - ] - }, - "INCOMPATIBLE_DATA_FOR_TABLE" : { - "message" : [ - "Cannot write incompatible data for the table :" - ], - "subClass" : { - "AMBIGUOUS_COLUMN_NAME" : { - "message" : [ - "Ambiguous column name in the input data ." - ] - }, - "CANNOT_FIND_DATA" : { - "message" : [ - "Cannot find data for the output column ." - ] - }, - "CANNOT_SAFELY_CAST" : { - "message" : [ - "Cannot safely cast to ." - ] - }, - "EXTRA_STRUCT_FIELDS" : { - "message" : [ - "Cannot write extra fields to the struct ." - ] - }, - "NULLABLE_ARRAY_ELEMENTS" : { - "message" : [ - "Cannot write nullable elements to array of non-nulls: ." - ] - }, - "NULLABLE_COLUMN" : { - "message" : [ - "Cannot write nullable values to non-null column ." - ] - }, - "NULLABLE_MAP_VALUES" : { - "message" : [ - "Cannot write nullable values to map of non-nulls: ." - ] - }, - "STRUCT_MISSING_FIELDS" : { - "message" : [ - "Struct missing fields: ." - ] - }, - "UNEXPECTED_COLUMN_NAME" : { - "message" : [ - "Struct -th field name does not match (may be out of order): expected , found ." - ] - } - }, - "sqlState" : "KD000" - }, - "INCOMPATIBLE_JOIN_TYPES" : { - "message" : [ - "The join types and are incompatible." - ], - "sqlState" : "42613" - }, - "INCOMPATIBLE_VIEW_SCHEMA_CHANGE" : { - "message" : [ - "The SQL query of view has an incompatible schema change and column cannot be resolved. Expected columns named but got .", - "Please try to re-create the view by running: ." - ] - }, - "INCOMPLETE_TYPE_DEFINITION" : { - "message" : [ - "Incomplete complex type:" - ], - "subClass" : { - "ARRAY" : { - "message" : [ - "The definition of \"ARRAY\" type is incomplete. You must provide an element type. For example: \"ARRAY\"." - ] - }, - "MAP" : { - "message" : [ - "The definition of \"MAP\" type is incomplete. You must provide a key type and a value type. For example: \"MAP\"." - ] - }, - "STRUCT" : { - "message" : [ - "The definition of \"STRUCT\" type is incomplete. You must provide at least one field type. For example: \"STRUCT\"." - ] - } - }, - "sqlState" : "42K01" - }, - "INCONSISTENT_BEHAVIOR_CROSS_VERSION" : { - "message" : [ - "You may get a different result due to the upgrading to" - ], - "subClass" : { - "DATETIME_PATTERN_RECOGNITION" : { - "message" : [ - "Spark >= 3.0:", - "Fail to recognize pattern in the DateTimeFormatter. 1) You can set to \"LEGACY\" to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from '/sql-ref-datetime-pattern.html'." - ] - }, - "PARSE_DATETIME_BY_NEW_PARSER" : { - "message" : [ - "Spark >= 3.0:", - "Fail to parse in the new parser. You can set to \"LEGACY\" to restore the behavior before Spark 3.0, or set to \"CORRECTED\" and treat it as an invalid datetime string." - ] - }, - "READ_ANCIENT_DATETIME" : { - "message" : [ - "Spark >= 3.0:", - "reading dates before 1582-10-15 or timestamps before 1900-01-01T00:00:00Z", - "from files can be ambiguous, as the files may be written by", - "Spark 2.x or legacy versions of Hive, which uses a legacy hybrid calendar", - "that is different from Spark 3.0+'s Proleptic Gregorian calendar.", - "See more details in SPARK-31404. You can set the SQL config or", - "the datasource option
's column with type to with type ." - ] - }, - "NOT_SUPPORTED_COMMAND_FOR_V2_TABLE" : { - "message" : [ - " is not supported for v2 tables." - ], - "sqlState" : "46110" - }, - "NOT_SUPPORTED_COMMAND_WITHOUT_HIVE_SUPPORT" : { - "message" : [ - " is not supported, if you want to enable it, please set \"spark.sql.catalogImplementation\" to \"hive\"." - ] - }, - "NOT_SUPPORTED_IN_JDBC_CATALOG" : { - "message" : [ - "Not supported command in JDBC catalog:" - ], - "subClass" : { - "COMMAND" : { - "message" : [ - "" - ] - }, - "COMMAND_WITH_PROPERTY" : { - "message" : [ - " with property ." - ] - } - }, - "sqlState" : "46110" - }, - "NO_DEFAULT_COLUMN_VALUE_AVAILABLE" : { - "message" : [ - "Can't determine the default value for since it is not nullable and it has no default value." - ], - "sqlState" : "42608" - }, - "NO_HANDLER_FOR_UDAF" : { - "message" : [ - "No handler for UDAF ''. Use sparkSession.udf.register(...) instead." - ] - }, - "NO_SQL_TYPE_IN_PROTOBUF_SCHEMA" : { - "message" : [ - "Cannot find in Protobuf schema." - ] - }, - "NO_UDF_INTERFACE" : { - "message" : [ - "UDF class doesn't implement any UDF interface." - ] - }, - "NULLABLE_COLUMN_OR_FIELD" : { - "message" : [ - "Column or field is nullable while it's required to be non-nullable." - ], - "sqlState" : "42000" - }, - "NULLABLE_ROW_ID_ATTRIBUTES" : { - "message" : [ - "Row ID attributes cannot be nullable: ." - ], - "sqlState" : "42000" - }, - "NULL_MAP_KEY" : { - "message" : [ - "Cannot use null as map key." - ], - "sqlState" : "2200E" - }, - "NUMERIC_OUT_OF_SUPPORTED_RANGE" : { - "message" : [ - "The value cannot be interpreted as a numeric since it has more than 38 digits." - ], - "sqlState" : "22003" - }, - "NUMERIC_VALUE_OUT_OF_RANGE" : { - "message" : [ - " cannot be represented as Decimal(, ). If necessary set to \"false\" to bypass this error, and return NULL instead." - ], - "sqlState" : "22003" - }, - "NUM_COLUMNS_MISMATCH" : { - "message" : [ - " can only be performed on inputs with the same number of columns, but the first input has columns and the input has columns." - ], - "sqlState" : "42826" - }, - "NUM_TABLE_VALUE_ALIASES_MISMATCH" : { - "message" : [ - "Number of given aliases does not match number of output columns. Function name: ; number of aliases: ; number of output columns: ." - ] - }, - "OPERATION_CANCELED" : { - "message" : [ - "Operation has been canceled." - ], - "sqlState" : "HY008" - }, - "ORDER_BY_POS_OUT_OF_RANGE" : { - "message" : [ - "ORDER BY position is not in select list (valid range is [1, ])." - ], - "sqlState" : "42805" - }, - "PARSE_EMPTY_STATEMENT" : { - "message" : [ - "Syntax error, unexpected empty statement." - ], - "sqlState" : "42617" - }, - "PARSE_SYNTAX_ERROR" : { - "message" : [ - "Syntax error at or near ." - ], - "sqlState" : "42601" - }, - "PARTITIONS_ALREADY_EXIST" : { - "message" : [ - "Cannot ADD or RENAME TO partition(s) in table because they already exist.", - "Choose a different name, drop the existing partition, or add the IF NOT EXISTS clause to tolerate a pre-existing partition." - ], - "sqlState" : "428FT" - }, - "PARTITIONS_NOT_FOUND" : { - "message" : [ - "The partition(s) cannot be found in table .", - "Verify the partition specification and table name.", - "To tolerate the error on drop use ALTER TABLE … DROP IF EXISTS PARTITION." - ], - "sqlState" : "428FT" - }, - "PATH_ALREADY_EXISTS" : { - "message" : [ - "Path already exists. Set mode as \"overwrite\" to overwrite the existing path." - ], - "sqlState" : "42K04" - }, - "PATH_NOT_FOUND" : { - "message" : [ - "Path does not exist: ." - ], - "sqlState" : "42K03" - }, - "PIVOT_VALUE_DATA_TYPE_MISMATCH" : { - "message" : [ - "Invalid pivot value '': value data type does not match pivot column data type ." - ], - "sqlState" : "42K09" - }, - "PLAN_VALIDATION_FAILED_RULE_EXECUTOR" : { - "message" : [ - "The input plan of is invalid: " - ] - }, - "PLAN_VALIDATION_FAILED_RULE_IN_BATCH" : { - "message" : [ - "Rule in batch generated an invalid plan: " - ] - }, - "PROTOBUF_DEPENDENCY_NOT_FOUND" : { - "message" : [ - "Could not find dependency: ." - ] - }, - "PROTOBUF_DESCRIPTOR_FILE_NOT_FOUND" : { - "message" : [ - "Error reading Protobuf descriptor file at path: ." - ] - }, - "PROTOBUF_FIELD_MISSING" : { - "message" : [ - "Searching for in Protobuf schema at gave matches. Candidates: ." - ] - }, - "PROTOBUF_FIELD_MISSING_IN_SQL_SCHEMA" : { - "message" : [ - "Found in Protobuf schema but there is no match in the SQL schema." - ] - }, - "PROTOBUF_FIELD_TYPE_MISMATCH" : { - "message" : [ - "Type mismatch encountered for field: ." - ] - }, - "PROTOBUF_MESSAGE_NOT_FOUND" : { - "message" : [ - "Unable to locate Message in Descriptor." - ] - }, - "PROTOBUF_TYPE_NOT_SUPPORT" : { - "message" : [ - "Protobuf type not yet supported: ." - ] - }, - "RECURSIVE_PROTOBUF_SCHEMA" : { - "message" : [ - "Found recursive reference in Protobuf schema, which can not be processed by Spark by default: . try setting the option `recursive.fields.max.depth` 0 to 10. Going beyond 10 levels of recursion is not allowed." - ] - }, - "RECURSIVE_VIEW" : { - "message" : [ - "Recursive view detected (cycle: )." - ] - }, - "REF_DEFAULT_VALUE_IS_NOT_ALLOWED_IN_PARTITION" : { - "message" : [ - "References to DEFAULT column values are not allowed within the PARTITION clause." - ] - }, - "RENAME_SRC_PATH_NOT_FOUND" : { - "message" : [ - "Failed to rename as was not found." - ], - "sqlState" : "42K03" - }, - "REPEATED_CLAUSE" : { - "message" : [ - "The clause may be used at most once per operation." - ], - "sqlState" : "42614" - }, - "REQUIRES_SINGLE_PART_NAMESPACE" : { - "message" : [ - " requires a single-part namespace, but got ." - ], - "sqlState" : "42K05" - }, - "ROUTINE_ALREADY_EXISTS" : { - "message" : [ - "Cannot create the function because it already exists.", - "Choose a different name, drop or replace the existing function, or add the IF NOT EXISTS clause to tolerate a pre-existing function." - ], - "sqlState" : "42723" - }, - "ROUTINE_NOT_FOUND" : { - "message" : [ - "The function cannot be found. Verify the spelling and correctness of the schema and catalog.", - "If you did not qualify the name with a schema and catalog, verify the current_schema() output, or qualify the name with the correct schema and catalog.", - "To tolerate the error on drop use DROP FUNCTION IF EXISTS." - ], - "sqlState" : "42883" - }, - "SCALAR_SUBQUERY_IS_IN_GROUP_BY_OR_AGGREGATE_FUNCTION" : { - "message" : [ - "The correlated scalar subquery '' is neither present in GROUP BY, nor in an aggregate function. Add it to GROUP BY using ordinal position or wrap it in `first()` (or `first_value`) if you don't care which value you get." - ] - }, - "SCALAR_SUBQUERY_TOO_MANY_ROWS" : { - "message" : [ - "More than one row returned by a subquery used as an expression." - ], - "sqlState" : "21000" - }, - "SCHEMA_ALREADY_EXISTS" : { - "message" : [ - "Cannot create schema because it already exists.", - "Choose a different name, drop the existing schema, or add the IF NOT EXISTS clause to tolerate pre-existing schema." - ], - "sqlState" : "42P06" - }, - "SCHEMA_NOT_EMPTY" : { - "message" : [ - "Cannot drop a schema because it contains objects.", - "Use DROP SCHEMA ... CASCADE to drop the schema and all its objects." - ], - "sqlState" : "2BP01" - }, - "SCHEMA_NOT_FOUND" : { - "message" : [ - "The schema cannot be found. Verify the spelling and correctness of the schema and catalog.", - "If you did not qualify the name with a catalog, verify the current_schema() output, or qualify the name with the correct catalog.", - "To tolerate the error on drop use DROP SCHEMA IF EXISTS." - ], - "sqlState" : "42704" - }, - "SECOND_FUNCTION_ARGUMENT_NOT_INTEGER" : { - "message" : [ - "The second argument of function needs to be an integer." - ], - "sqlState" : "22023" - }, - "SEED_EXPRESSION_IS_UNFOLDABLE" : { - "message" : [ - "The seed expression of the expression must be foldable." - ] - }, - "SORT_BY_WITHOUT_BUCKETING" : { - "message" : [ - "sortBy must be used together with bucketBy." - ] - }, - "SPECIFY_BUCKETING_IS_NOT_ALLOWED" : { - "message" : [ - "Cannot specify bucketing information if the table schema is not specified when creating and will be inferred at runtime." - ] - }, - "SPECIFY_PARTITION_IS_NOT_ALLOWED" : { - "message" : [ - "It is not allowed to specify partition columns when the table schema is not defined. When the table schema is not provided, schema and partition columns will be inferred." - ] - }, - "SQL_CONF_NOT_FOUND" : { - "message" : [ - "The SQL config cannot be found. Please verify that the config exists." - ] - }, - "STAR_GROUP_BY_POS" : { - "message" : [ - "Star (*) is not allowed in a select list when GROUP BY an ordinal position is used." - ], - "sqlState" : "0A000" - }, - "STATIC_PARTITION_COLUMN_IN_INSERT_COLUMN_LIST" : { - "message" : [ - "Static partition column is also specified in the column list." - ] - }, - "STREAM_FAILED" : { - "message" : [ - "Query [id = , runId = ] terminated with exception: " - ] - }, - "SUM_OF_LIMIT_AND_OFFSET_EXCEEDS_MAX_INT" : { - "message" : [ - "The sum of the LIMIT clause and the OFFSET clause must not be greater than the maximum 32-bit integer value (2,147,483,647) but found limit = , offset = ." - ] - }, - "TABLE_OR_VIEW_ALREADY_EXISTS" : { - "message" : [ - "Cannot create table or view because it already exists.", - "Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects." - ], - "sqlState" : "42P07" - }, - "TABLE_OR_VIEW_NOT_FOUND" : { - "message" : [ - "The table or view cannot be found. Verify the spelling and correctness of the schema and catalog.", - "If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog.", - "To tolerate the error on drop use DROP VIEW IF EXISTS or DROP TABLE IF EXISTS." - ], - "sqlState" : "42P01" - }, - "TABLE_VALUED_FUNCTION_TOO_MANY_TABLE_ARGUMENTS" : { - "message" : [ - "There are too many table arguments for table-valued function. It allows one table argument, but got: . If you want to allow it, please set \"spark.sql.allowMultipleTableArguments.enabled\" to \"true\"" - ] - }, - "TASK_WRITE_FAILED" : { - "message" : [ - "Task failed while writing rows to ." - ] - }, - "TEMP_TABLE_OR_VIEW_ALREADY_EXISTS" : { - "message" : [ - "Cannot create the temporary view because it already exists.", - "Choose a different name, drop or replace the existing view, or add the IF NOT EXISTS clause to tolerate pre-existing views." - ], - "sqlState" : "42P07" - }, - "TEMP_VIEW_NAME_TOO_MANY_NAME_PARTS" : { - "message" : [ - "CREATE TEMPORARY VIEW or the corresponding Dataset APIs only accept single-part view names, but got: ." - ], - "sqlState" : "428EK" - }, - "TOO_MANY_ARRAY_ELEMENTS" : { - "message" : [ - "Cannot initialize array with elements of size ." - ], - "sqlState" : "54000" - }, - "UDTF_ALIAS_NUMBER_MISMATCH" : { - "message" : [ - "The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF. Expected aliases, but got . Please ensure that the number of aliases provided matches the number of columns output by the UDTF." - ] - }, - "UNABLE_TO_ACQUIRE_MEMORY" : { - "message" : [ - "Unable to acquire bytes of memory, got ." - ], - "sqlState" : "53200" - }, - "UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE" : { - "message" : [ - "Unable to convert SQL type to Protobuf type ." - ] - }, - "UNABLE_TO_INFER_SCHEMA" : { - "message" : [ - "Unable to infer schema for . It must be specified manually." - ], - "sqlState" : "42KD9" - }, - "UNBOUND_SQL_PARAMETER" : { - "message" : [ - "Found the unbound parameter: . Please, fix `args` and provide a mapping of the parameter to a SQL literal." - ], - "sqlState" : "42P02" - }, - "UNCLOSED_BRACKETED_COMMENT" : { - "message" : [ - "Found an unclosed bracketed comment. Please, append */ at the end of the comment." - ], - "sqlState" : "42601" - }, - "UNEXPECTED_INPUT_TYPE" : { - "message" : [ - "Parameter of function requires the type, however has the type ." - ], - "sqlState" : "42K09" - }, - "UNKNOWN_PROTOBUF_MESSAGE_TYPE" : { - "message" : [ - "Attempting to treat as a Message, but it was ." - ] - }, - "UNPIVOT_REQUIRES_ATTRIBUTES" : { - "message" : [ - "UNPIVOT requires all given expressions to be columns when no expressions are given. These are not columns: []." - ], - "sqlState" : "42K0A" - }, - "UNPIVOT_REQUIRES_VALUE_COLUMNS" : { - "message" : [ - "At least one value column needs to be specified for UNPIVOT, all columns specified as ids." - ], - "sqlState" : "42K0A" - }, - "UNPIVOT_VALUE_DATA_TYPE_MISMATCH" : { - "message" : [ - "Unpivot value columns must share a least common type, some types do not: []." - ], - "sqlState" : "42K09" - }, - "UNPIVOT_VALUE_SIZE_MISMATCH" : { - "message" : [ - "All unpivot value columns must have the same size as there are value column names ()." - ], - "sqlState" : "428C4" - }, - "UNRECOGNIZED_SQL_TYPE" : { - "message" : [ - "Unrecognized SQL type - name: , id: ." - ], - "sqlState" : "42704" - }, - "UNRESOLVABLE_TABLE_VALUED_FUNCTION" : { - "message" : [ - "Could not resolve to a table-valued function. Please make sure that is defined as a table-valued function and that all required parameters are provided correctly. If is not defined, please create the table-valued function before using it. For more information about defining table-valued functions, please refer to the Apache Spark documentation." - ] - }, - "UNRESOLVED_ALL_IN_GROUP_BY" : { - "message" : [ - "Cannot infer grouping columns for GROUP BY ALL based on the select clause. Please explicitly specify the grouping columns." - ], - "sqlState" : "42803" - }, - "UNRESOLVED_COLUMN" : { - "message" : [ - "A column or function parameter with name cannot be resolved." - ], - "subClass" : { - "WITHOUT_SUGGESTION" : { - "message" : [ - "" - ] - }, - "WITH_SUGGESTION" : { - "message" : [ - "Did you mean one of the following? []." - ] - } - }, - "sqlState" : "42703" - }, - "UNRESOLVED_FIELD" : { - "message" : [ - "A field with name cannot be resolved with the struct-type column ." - ], - "subClass" : { - "WITHOUT_SUGGESTION" : { - "message" : [ - "" - ] - }, - "WITH_SUGGESTION" : { - "message" : [ - "Did you mean one of the following? []." - ] - } - }, - "sqlState" : "42703" - }, - "UNRESOLVED_MAP_KEY" : { - "message" : [ - "Cannot resolve column as a map key. If the key is a string literal, add the single quotes '' around it." - ], - "subClass" : { - "WITHOUT_SUGGESTION" : { - "message" : [ - "" - ] - }, - "WITH_SUGGESTION" : { - "message" : [ - "Otherwise did you mean one of the following column(s)? []." - ] - } - }, - "sqlState" : "42703" - }, - "UNRESOLVED_ROUTINE" : { - "message" : [ - "Cannot resolve function on search path ." - ], - "sqlState" : "42883" - }, - "UNRESOLVED_USING_COLUMN_FOR_JOIN" : { - "message" : [ - "USING column cannot be resolved on the side of the join. The -side columns: []." - ], - "sqlState" : "42703" - }, - "UNSET_NONEXISTENT_PROPERTIES" : { - "message" : [ - "Attempted to unset non-existent properties [] in table
." - ] - }, - "UNSUPPORTED_ADD_FILE" : { - "message" : [ - "Don't support add file." - ], - "subClass" : { - "DIRECTORY" : { - "message" : [ - "The file is a directory, consider to set \"spark.sql.legacy.addSingleFileInAddFile\" to \"false\"." - ] - }, - "LOCAL_DIRECTORY" : { - "message" : [ - "The local directory is not supported in a non-local master mode." - ] - } - } - }, - "UNSUPPORTED_ARROWTYPE" : { - "message" : [ - "Unsupported arrow type ." - ], - "sqlState" : "0A000" - }, - "UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING" : { - "message" : [ - "The char/varchar type can't be used in the table schema. If you want Spark treat them as string type as same as Spark 3.0 and earlier, please set \"spark.sql.legacy.charVarcharAsString\" to \"true\"." - ] - }, - "UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY" : { - "message" : [ - "Unsupported data source type for direct query on files: " - ] - }, - "UNSUPPORTED_DATATYPE" : { - "message" : [ - "Unsupported data type ." - ], - "sqlState" : "0A000" - }, - "UNSUPPORTED_DATA_SOURCE_FOR_DIRECT_QUERY" : { - "message" : [ - "The direct query on files does not support the data source type: . Please try a different data source type or consider using a different query method." - ] - }, - "UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE" : { - "message" : [ - "The datasource doesn't support the column of the type ." - ] - }, - "UNSUPPORTED_DEFAULT_VALUE" : { - "message" : [ - "DEFAULT column values is not supported." - ], - "subClass" : { - "WITHOUT_SUGGESTION" : { - "message" : [ - "" - ] - }, - "WITH_SUGGESTION" : { - "message" : [ - "Enable it by setting \"spark.sql.defaultColumn.enabled\" to \"true\"." - ] - } - } - }, - "UNSUPPORTED_DESERIALIZER" : { - "message" : [ - "The deserializer is not supported:" - ], - "subClass" : { - "DATA_TYPE_MISMATCH" : { - "message" : [ - "need a(n) field but got ." - ] - }, - "FIELD_NUMBER_MISMATCH" : { - "message" : [ - "try to map to Tuple, but failed as the number of fields does not line up." - ] - } - }, - "sqlState" : "0A000" - }, - "UNSUPPORTED_EXPRESSION_GENERATED_COLUMN" : { - "message" : [ - "Cannot create generated column with generation expression because ." - ] - }, - "UNSUPPORTED_EXPR_FOR_OPERATOR" : { - "message" : [ - "A query operator contains one or more unsupported expressions. Consider to rewrite it to avoid window functions, aggregate functions, and generator functions in the WHERE clause.", - "Invalid expressions: []" - ] - }, - "UNSUPPORTED_EXPR_FOR_WINDOW" : { - "message" : [ - "Expression not supported within a window function." - ], - "sqlState" : "42P20" - }, - "UNSUPPORTED_FEATURE" : { - "message" : [ - "The feature is not supported:" - ], - "subClass" : { - "AES_MODE" : { - "message" : [ - "AES- with the padding by the function." - ] - }, - "AES_MODE_AAD" : { - "message" : [ - " with AES- does not support additional authenticate data (AAD)." - ] - }, - "AES_MODE_IV" : { - "message" : [ - " with AES- does not support initialization vectors (IVs)." - ] - }, - "ANALYZE_UNCACHED_TEMP_VIEW" : { - "message" : [ - "The ANALYZE TABLE FOR COLUMNS command can operate on temporary views that have been cached already. Consider to cache the view ." - ] - }, - "ANALYZE_UNSUPPORTED_COLUMN_TYPE" : { - "message" : [ - "The ANALYZE TABLE FOR COLUMNS command does not support the type of the column in the table ." - ] - }, - "ANALYZE_VIEW" : { - "message" : [ - "The ANALYZE TABLE command does not support views." - ] - }, - "CATALOG_OPERATION" : { - "message" : [ - "Catalog does not support ." - ] - }, - "COMBINATION_QUERY_RESULT_CLAUSES" : { - "message" : [ - "Combination of ORDER BY/SORT BY/DISTRIBUTE BY/CLUSTER BY." - ] - }, - "COMMENT_NAMESPACE" : { - "message" : [ - "Attach a comment to the namespace ." - ] - }, - "DESC_TABLE_COLUMN_PARTITION" : { - "message" : [ - "DESC TABLE COLUMN for a specific partition." - ] - }, - "DROP_DATABASE" : { - "message" : [ - "Drop the default database ." - ] - }, - "DROP_NAMESPACE" : { - "message" : [ - "Drop the namespace ." - ] - }, - "HIVE_TABLE_TYPE" : { - "message" : [ - "The is hive ." - ] - }, - "HIVE_WITH_ANSI_INTERVALS" : { - "message" : [ - "Hive table with ANSI intervals." - ] - }, - "INSERT_PARTITION_SPEC_IF_NOT_EXISTS" : { - "message" : [ - "INSERT INTO with IF NOT EXISTS in the PARTITION spec." - ] - }, - "LATERAL_COLUMN_ALIAS_IN_AGGREGATE_FUNC" : { - "message" : [ - "Referencing a lateral column alias in the aggregate function ." - ] - }, - "LATERAL_COLUMN_ALIAS_IN_AGGREGATE_WITH_WINDOW_AND_HAVING" : { - "message" : [ - "Referencing lateral column alias in the aggregate query both with window expressions and with having clause. Please rewrite the aggregate query by removing the having clause or removing lateral alias reference in the SELECT list." - ] - }, - "LATERAL_COLUMN_ALIAS_IN_GROUP_BY" : { - "message" : [ - "Referencing a lateral column alias via GROUP BY alias/ALL is not supported yet." - ] - }, - "LATERAL_COLUMN_ALIAS_IN_WINDOW" : { - "message" : [ - "Referencing a lateral column alias in window expression ." - ] - }, - "LATERAL_JOIN_USING" : { - "message" : [ - "JOIN USING with LATERAL correlation." - ] - }, - "LITERAL_TYPE" : { - "message" : [ - "Literal for '' of ." - ] - }, - "MULTIPLE_BUCKET_TRANSFORMS" : { - "message" : [ - "Multiple bucket TRANSFORMs." - ] - }, - "MULTI_ACTION_ALTER" : { - "message" : [ - "The target JDBC server hosting table does not support ALTER TABLE with multiple actions. Split the ALTER TABLE up into individual actions to avoid this error." - ] - }, - "ORC_TYPE_CAST" : { - "message" : [ - "Unable to convert of Orc to data type ." - ] - }, - "PANDAS_UDAF_IN_PIVOT" : { - "message" : [ - "Pandas user defined aggregate function in the PIVOT clause." - ] - }, - "PARAMETER_MARKER_IN_UNEXPECTED_STATEMENT" : { - "message" : [ - "Parameter markers are not allowed in ." - ] - }, - "PARTITION_WITH_NESTED_COLUMN_IS_UNSUPPORTED" : { - "message" : [ - "Invalid partitioning: is missing or is in a map or array." - ] - }, - "PIVOT_AFTER_GROUP_BY" : { - "message" : [ - "PIVOT clause following a GROUP BY clause. Consider pushing the GROUP BY into a subquery." - ] - }, - "PIVOT_TYPE" : { - "message" : [ - "Pivoting by the value '' of the column data type ." - ] - }, - "PYTHON_UDF_IN_ON_CLAUSE" : { - "message" : [ - "Python UDF in the ON clause of a JOIN. In case of an INNNER JOIN consider rewriting to a CROSS JOIN with a WHERE clause." - ] - }, - "REMOVE_NAMESPACE_COMMENT" : { - "message" : [ - "Remove a comment from the namespace ." - ] - }, - "REPLACE_NESTED_COLUMN" : { - "message" : [ - "The replace function does not support nested column ." - ] - }, - "SET_NAMESPACE_PROPERTY" : { - "message" : [ - " is a reserved namespace property, ." - ] - }, - "SET_OPERATION_ON_MAP_TYPE" : { - "message" : [ - "Cannot have MAP type columns in DataFrame which calls set operations (INTERSECT, EXCEPT, etc.), but the type of column is ." - ] - }, - "SET_PROPERTIES_AND_DBPROPERTIES" : { - "message" : [ - "set PROPERTIES and DBPROPERTIES at the same time." - ] - }, - "SET_TABLE_PROPERTY" : { - "message" : [ - " is a reserved table property, ." - ] - }, - "TABLE_OPERATION" : { - "message" : [ - "Table does not support . Please check the current catalog and namespace to make sure the qualified table name is expected, and also check the catalog implementation which is configured by \"spark.sql.catalog\"." - ] - }, - "TIME_TRAVEL" : { - "message" : [ - "Time travel on the relation: ." - ] - }, - "TOO_MANY_TYPE_ARGUMENTS_FOR_UDF_CLASS" : { - "message" : [ - "UDF class with type arguments." - ] - }, - "TRANSFORM_DISTINCT_ALL" : { - "message" : [ - "TRANSFORM with the DISTINCT/ALL clause." - ] - }, - "TRANSFORM_NON_HIVE" : { - "message" : [ - "TRANSFORM with SERDE is only supported in hive mode." - ] - } - }, - "sqlState" : "0A000" - }, - "UNSUPPORTED_GENERATOR" : { - "message" : [ - "The generator is not supported:" - ], - "subClass" : { - "MULTI_GENERATOR" : { - "message" : [ - "only one generator allowed per clause but found : ." - ] - }, - "NESTED_IN_EXPRESSIONS" : { - "message" : [ - "nested in expressions ." - ] - }, - "NOT_GENERATOR" : { - "message" : [ - " is expected to be a generator. However, its class is , which is not a generator." - ] - }, - "OUTSIDE_SELECT" : { - "message" : [ - "outside the SELECT clause, found: ." - ] - } - }, - "sqlState" : "0A000" - }, - "UNSUPPORTED_GROUPING_EXPRESSION" : { - "message" : [ - "grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup." - ] - }, - "UNSUPPORTED_INSERT" : { - "message" : [ - "Can't insert into the target." - ], - "subClass" : { - "NOT_ALLOWED" : { - "message" : [ - "The target relation does not allow insertion." - ] - }, - "NOT_PARTITIONED" : { - "message" : [ - "The target relation is not partitioned." - ] - }, - "RDD_BASED" : { - "message" : [ - "An RDD-based table is not allowed." - ] - }, - "READ_FROM" : { - "message" : [ - "The target relation is also being read from." - ] - } - } - }, - "UNSUPPORTED_MERGE_CONDITION" : { - "message" : [ - "MERGE operation contains unsupported condition." - ], - "subClass" : { - "AGGREGATE" : { - "message" : [ - "Aggregates are not allowed: ." - ] - }, - "NON_DETERMINISTIC" : { - "message" : [ - "Non-deterministic expressions are not allowed: ." - ] - }, - "SUBQUERY" : { - "message" : [ - "Subqueries are not allowed: ." - ] - } - } - }, - "UNSUPPORTED_OVERWRITE" : { - "message" : [ - "Can't overwrite the target that is also being read from." - ], - "subClass" : { - "PATH" : { - "message" : [ - "The target path is ." - ] - }, - "TABLE" : { - "message" : [ - "The target table is
." - ] - } - } - }, - "UNSUPPORTED_SAVE_MODE" : { - "message" : [ - "The save mode is not supported for:" - ], - "subClass" : { - "EXISTENT_PATH" : { - "message" : [ - "an existent path." - ] - }, - "NON_EXISTENT_PATH" : { - "message" : [ - "a non-existent path." - ] - } - } - }, - "UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY" : { - "message" : [ - "Unsupported subquery expression:" - ], - "subClass" : { - "ACCESSING_OUTER_QUERY_COLUMN_IS_NOT_ALLOWED" : { - "message" : [ - "Accessing outer query column is not allowed in this location." - ] - }, - "AGGREGATE_FUNCTION_MIXED_OUTER_LOCAL_REFERENCES" : { - "message" : [ - "Found an aggregate function in a correlated predicate that has both outer and local references, which is not supported: ." - ] - }, - "CORRELATED_COLUMN_IS_NOT_ALLOWED_IN_PREDICATE" : { - "message" : [ - "Correlated column is not allowed in predicate: ." - ] - }, - "CORRELATED_COLUMN_NOT_FOUND" : { - "message" : [ - "A correlated outer name reference within a subquery expression body was not found in the enclosing query: ." - ] - }, - "CORRELATED_REFERENCE" : { - "message" : [ - "Expressions referencing the outer query are not supported outside of WHERE/HAVING clauses: ." - ] - }, - "LATERAL_JOIN_CONDITION_NON_DETERMINISTIC" : { - "message" : [ - "Lateral join condition cannot be non-deterministic: ." - ] - }, - "MUST_AGGREGATE_CORRELATED_SCALAR_SUBQUERY" : { - "message" : [ - "Correlated scalar subqueries must be aggregated to return at most one row." - ] - }, - "NON_CORRELATED_COLUMNS_IN_GROUP_BY" : { - "message" : [ - "A GROUP BY clause in a scalar correlated subquery cannot contain non-correlated columns: ." - ] - }, - "NON_DETERMINISTIC_LATERAL_SUBQUERIES" : { - "message" : [ - "Non-deterministic lateral subqueries are not supported when joining with outer relations that produce more than one row." - ] - }, - "UNSUPPORTED_CORRELATED_REFERENCE_DATA_TYPE" : { - "message" : [ - "Correlated column reference '' cannot be type." - ] - }, - "UNSUPPORTED_CORRELATED_SCALAR_SUBQUERY" : { - "message" : [ - "Correlated scalar subqueries can only be used in filters, aggregations, projections, and UPDATE/MERGE/DELETE commands." - ] - }, - "UNSUPPORTED_IN_EXISTS_SUBQUERY" : { - "message" : [ - "IN/EXISTS predicate subqueries can only be used in filters, joins, aggregations, window functions, projections, and UPDATE/MERGE/DELETE commands." - ] - } - }, - "sqlState" : "0A000" - }, - "UNSUPPORTED_TYPED_LITERAL" : { - "message" : [ - "Literals of the type are not supported. Supported types are ." - ], - "sqlState" : "0A000" - }, - "UNTYPED_SCALA_UDF" : { - "message" : [ - "You're using untyped Scala UDF, which does not have the input type information. Spark may blindly pass null to the Scala closure with primitive-type argument, and the closure will see the default value of the Java type for the null argument, e.g. `udf((x: Int) => x, IntegerType)`, the result is 0 for null input. To get rid of this error, you could:", - "1. use typed Scala UDF APIs(without return type parameter), e.g. `udf((x: Int) => x)`.", - "2. use Java UDF APIs, e.g. `udf(new UDF1[String, Integer] { override def call(s: String): Integer = s.length() }, IntegerType)`, if input types are all non primitive.", - "3. set \"spark.sql.legacy.allowUntypedScalaUDF\" to \"true\" and use this API with caution." - ] - }, - "VIEW_ALREADY_EXISTS" : { - "message" : [ - "Cannot create view because it already exists.", - "Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects." - ], - "sqlState" : "42P07" - }, - "VIEW_NOT_FOUND" : { - "message" : [ - "The view cannot be found. Verify the spelling and correctness of the schema and catalog.", - "If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog.", - "To tolerate the error on drop use DROP VIEW IF EXISTS." - ], - "sqlState" : "42P01" - }, - "WINDOW_FUNCTION_AND_FRAME_MISMATCH" : { - "message" : [ - " function can only be evaluated in an ordered row-based window frame with a single offset: ." - ] - }, - "WINDOW_FUNCTION_WITHOUT_OVER_CLAUSE" : { - "message" : [ - "Window function requires an OVER clause." - ] - }, - "WRITE_STREAM_NOT_ALLOWED" : { - "message" : [ - "`writeStream` can be called only on streaming Dataset/DataFrame." - ] - }, - "WRONG_COMMAND_FOR_OBJECT_TYPE" : { - "message" : [ - "The operation requires a . But is a . Use instead." - ] - }, - "WRONG_NUM_ARGS" : { - "message" : [ - "The requires parameters but the actual number is ." - ], - "subClass" : { - "WITHOUT_SUGGESTION" : { - "message" : [ - "Please, refer to '/sql-ref-functions.html' for a fix." - ] - }, - "WITH_SUGGESTION" : { - "message" : [ - "If you have to call this function with parameters, set the legacy configuration to ." - ] - } - }, - "sqlState" : "42605" - }, - "_LEGACY_ERROR_TEMP_0001" : { - "message" : [ - "Invalid InsertIntoContext." - ] - }, - "_LEGACY_ERROR_TEMP_0004" : { - "message" : [ - "Empty source for merge: you should specify a source table/subquery in merge." - ] - }, - "_LEGACY_ERROR_TEMP_0006" : { - "message" : [ - "The number of inserted values cannot match the fields." - ] - }, - "_LEGACY_ERROR_TEMP_0008" : { - "message" : [ - "There must be at least one WHEN clause in a MERGE statement." - ] - }, - "_LEGACY_ERROR_TEMP_0012" : { - "message" : [ - "DISTRIBUTE BY is not supported." - ] - }, - "_LEGACY_ERROR_TEMP_0014" : { - "message" : [ - "TABLESAMPLE does not accept empty inputs." - ] - }, - "_LEGACY_ERROR_TEMP_0015" : { - "message" : [ - "TABLESAMPLE() is not supported." - ] - }, - "_LEGACY_ERROR_TEMP_0016" : { - "message" : [ - " is not a valid byte length literal, expected syntax: DIGIT+ ('B' | 'K' | 'M' | 'G')." - ] - }, - "_LEGACY_ERROR_TEMP_0018" : { - "message" : [ - "Function trim doesn't support with type . Please use BOTH, LEADING or TRAILING as trim type." - ] - }, - "_LEGACY_ERROR_TEMP_0024" : { - "message" : [ - "Can only have a single from-to unit in the interval literal syntax." - ] - }, - "_LEGACY_ERROR_TEMP_0026" : { - "message" : [ - "Can only use numbers in the interval value part for multiple unit value pairs interval form, but got invalid value: ." - ] - }, - "_LEGACY_ERROR_TEMP_0027" : { - "message" : [ - "The value of from-to unit must be a string." - ] - }, - "_LEGACY_ERROR_TEMP_0028" : { - "message" : [ - "Intervals FROM TO are not supported." - ] - }, - "_LEGACY_ERROR_TEMP_0029" : { - "message" : [ - "Cannot mix year-month and day-time fields: ." - ] - }, - "_LEGACY_ERROR_TEMP_0031" : { - "message" : [ - "Invalid number of buckets: ." - ] - }, - "_LEGACY_ERROR_TEMP_0032" : { - "message" : [ - "Duplicated table paths found: '' and ''. LOCATION and the case insensitive key 'path' in OPTIONS are all used to indicate the custom table path, you can only specify one of them." - ] - }, - "_LEGACY_ERROR_TEMP_0033" : { - "message" : [ - "Expected either STORED AS or STORED BY, not both." - ] - }, - "_LEGACY_ERROR_TEMP_0034" : { - "message" : [ - " is not supported in Hive-style ." - ] - }, - "_LEGACY_ERROR_TEMP_0035" : { - "message" : [ - "Operation not allowed: ." - ] - }, - "_LEGACY_ERROR_TEMP_0037" : { - "message" : [ - "It is not allowed to add catalog/namespace prefix to the table name in CACHE TABLE AS SELECT." - ] - }, - "_LEGACY_ERROR_TEMP_0038" : { - "message" : [ - "CTE definition can't have duplicate names: ." - ] - }, - "_LEGACY_ERROR_TEMP_0039" : { - "message" : [ - "Unsupported SQL statement." - ] - }, - "_LEGACY_ERROR_TEMP_0043" : { - "message" : [ - "Expected format is 'RESET' or 'RESET key'. If you want to include special characters in key, please use quotes, e.g., RESET `key`." - ] - }, - "_LEGACY_ERROR_TEMP_0044" : { - "message" : [ - "The interval value must be in the range of [-18, +18] hours with second precision." - ] - }, - "_LEGACY_ERROR_TEMP_0045" : { - "message" : [ - "Invalid time zone displacement value." - ] - }, - "_LEGACY_ERROR_TEMP_0046" : { - "message" : [ - "CREATE TEMPORARY TABLE without a provider is not allowed." - ] - }, - "_LEGACY_ERROR_TEMP_0047" : { - "message" : [ - "'ROW FORMAT' must be used with 'STORED AS'." - ] - }, - "_LEGACY_ERROR_TEMP_0048" : { - "message" : [ - "Unsupported operation: Used defined record reader/writer classes." - ] - }, - "_LEGACY_ERROR_TEMP_0049" : { - "message" : [ - "Directory path and 'path' in OPTIONS should be specified one, but not both." - ] - }, - "_LEGACY_ERROR_TEMP_0051" : { - "message" : [ - "Empty set in grouping sets is not supported." - ] - }, - "_LEGACY_ERROR_TEMP_0052" : { - "message" : [ - "CREATE VIEW with both IF NOT EXISTS and REPLACE is not allowed." - ] - }, - "_LEGACY_ERROR_TEMP_0053" : { - "message" : [ - "It is not allowed to define a TEMPORARY view with IF NOT EXISTS." - ] - }, - "_LEGACY_ERROR_TEMP_0056" : { - "message" : [ - "Invalid time travel spec: ." - ] - }, - "_LEGACY_ERROR_TEMP_0060" : { - "message" : [ - "." - ] - }, - "_LEGACY_ERROR_TEMP_0061" : { - "message" : [ - "." - ] - }, - "_LEGACY_ERROR_TEMP_0062" : { - "message" : [ - "." - ] - }, - "_LEGACY_ERROR_TEMP_0063" : { - "message" : [ - "." - ] - }, - "_LEGACY_ERROR_TEMP_0064" : { - "message" : [ - "." - ] - }, - "_LEGACY_ERROR_TEMP_1000" : { - "message" : [ - "LEGACY store assignment policy is disallowed in Spark data source V2. Please set the configuration to other values." - ] - }, - "_LEGACY_ERROR_TEMP_1002" : { - "message" : [ - "Unable to generate an encoder for inner class `` without access to the scope that this class was defined in.", - "Try moving this class out of its parent class." - ] - }, - "_LEGACY_ERROR_TEMP_1004" : { - "message" : [ - "Window specification is not defined in the WINDOW clause." - ] - }, - "_LEGACY_ERROR_TEMP_1005" : { - "message" : [ - " doesn't show up in the GROUP BY list ." - ] - }, - "_LEGACY_ERROR_TEMP_1006" : { - "message" : [ - "Aggregate expression required for pivot, but '' did not appear in any aggregate function." - ] - }, - "_LEGACY_ERROR_TEMP_1007" : { - "message" : [ - "Cannot write into temp view as it's not a data source v2 relation." - ] - }, - "_LEGACY_ERROR_TEMP_1008" : { - "message" : [ - " is not a temp view of streaming logical plan, please use batch API such as `DataFrameReader.table` to read it." - ] - }, - "_LEGACY_ERROR_TEMP_1009" : { - "message" : [ - "The depth of view exceeds the maximum view resolution depth (). Analysis is aborted to avoid errors. Increase the value of to work around this." - ] - }, - "_LEGACY_ERROR_TEMP_1010" : { - "message" : [ - "Inserting into a view is not allowed. View: ." - ] - }, - "_LEGACY_ERROR_TEMP_1011" : { - "message" : [ - "Writing into a view is not allowed. View: ." - ] - }, - "_LEGACY_ERROR_TEMP_1012" : { - "message" : [ - "Cannot write into v1 table: ." - ] - }, - "_LEGACY_ERROR_TEMP_1013" : { - "message" : [ - " is a . '' expects a table.." - ] - }, - "_LEGACY_ERROR_TEMP_1014" : { - "message" : [ - " is a temp view. '' expects a permanent view." - ] - }, - "_LEGACY_ERROR_TEMP_1015" : { - "message" : [ - " is a table. '' expects a view.." - ] - }, - "_LEGACY_ERROR_TEMP_1016" : { - "message" : [ - " is a temp view. '' expects a table or permanent view." - ] - }, - "_LEGACY_ERROR_TEMP_1017" : { - "message" : [ - " is a built-in/temporary function. '' expects a persistent function.." - ] - }, - "_LEGACY_ERROR_TEMP_1018" : { - "message" : [ - " is a permanent view, which is not supported by streaming reading API such as `DataStreamReader.table` yet." - ] - }, - "_LEGACY_ERROR_TEMP_1021" : { - "message" : [ - "count(.*) is not allowed. Please use count(*) or expand the columns manually, e.g. count(col1, col2)." - ] - }, - "_LEGACY_ERROR_TEMP_1023" : { - "message" : [ - "Function does not support ." - ] - }, - "_LEGACY_ERROR_TEMP_1024" : { - "message" : [ - "FILTER expression is non-deterministic, it cannot be used in aggregate functions." - ] - }, - "_LEGACY_ERROR_TEMP_1025" : { - "message" : [ - "FILTER expression is not of type boolean. It cannot be used in an aggregate function." - ] - }, - "_LEGACY_ERROR_TEMP_1026" : { - "message" : [ - "FILTER expression contains aggregate. It cannot be used in an aggregate function." - ] - }, - "_LEGACY_ERROR_TEMP_1027" : { - "message" : [ - "FILTER expression contains window function. It cannot be used in an aggregate function." - ] - }, - "_LEGACY_ERROR_TEMP_1028" : { - "message" : [ - "Number of column aliases does not match number of columns. Number of column aliases: ; number of columns: ." - ] - }, - "_LEGACY_ERROR_TEMP_1030" : { - "message" : [ - "Window aggregate function with filter predicate is not supported yet." - ] - }, - "_LEGACY_ERROR_TEMP_1031" : { - "message" : [ - "It is not allowed to use a window function inside an aggregate function. Please use the inner window function in a sub-query." - ] - }, - "_LEGACY_ERROR_TEMP_1032" : { - "message" : [ - " does not have any WindowExpression." - ] - }, - "_LEGACY_ERROR_TEMP_1033" : { - "message" : [ - " has multiple Window Specifications ().", - "Please file a bug report with this error message, stack trace, and the query." - ] - }, - "_LEGACY_ERROR_TEMP_1034" : { - "message" : [ - "It is not allowed to use window functions inside clause." - ] - }, - "_LEGACY_ERROR_TEMP_1035" : { - "message" : [ - "Cannot specify window frame for function." - ] - }, - "_LEGACY_ERROR_TEMP_1036" : { - "message" : [ - "Window Frame must match the required frame ." - ] - }, - "_LEGACY_ERROR_TEMP_1037" : { - "message" : [ - "Window function requires window to be ordered, please add ORDER BY clause. For example SELECT (value_expr) OVER (PARTITION BY window_partition ORDER BY window_ordering) from table." - ] - }, - "_LEGACY_ERROR_TEMP_1039" : { - "message" : [ - "Multiple time/session window expressions would result in a cartesian product of rows, therefore they are currently not supported." - ] - }, - "_LEGACY_ERROR_TEMP_1040" : { - "message" : [ - "Gap duration expression used in session window must be CalendarIntervalType, but got
." - ] - }, - "_LEGACY_ERROR_TEMP_1045" : { - "message" : [ - "ALTER TABLE SET LOCATION does not support partition for v2 tables." - ] - }, - "_LEGACY_ERROR_TEMP_1046" : { - "message" : [ - "Join strategy hint parameter should be an identifier or string but was ()." - ] - }, - "_LEGACY_ERROR_TEMP_1047" : { - "message" : [ - " Hint parameter should include columns, but found." - ] - }, - "_LEGACY_ERROR_TEMP_1048" : { - "message" : [ - " Hint expects a partition number as a parameter." - ] - }, - "_LEGACY_ERROR_TEMP_1049" : { - "message" : [ - "Syntax error in attribute name: ." - ] - }, - "_LEGACY_ERROR_TEMP_1050" : { - "message" : [ - "Can only star expand struct data types. Attribute: ``." - ] - }, - "_LEGACY_ERROR_TEMP_1052" : { - "message" : [ - "ADD COLUMN with v1 tables cannot specify NOT NULL." - ] - }, - "_LEGACY_ERROR_TEMP_1053" : { - "message" : [ - "ALTER COLUMN with v1 tables cannot specify NOT NULL." - ] - }, - "_LEGACY_ERROR_TEMP_1054" : { - "message" : [ - "ALTER COLUMN cannot find column in v1 table. Available: ." - ] - }, - "_LEGACY_ERROR_TEMP_1055" : { - "message" : [ - "The database name is not valid: ." - ] - }, - "_LEGACY_ERROR_TEMP_1057" : { - "message" : [ - "SHOW COLUMNS with conflicting databases: '' != ''." - ] - }, - "_LEGACY_ERROR_TEMP_1058" : { - "message" : [ - "Cannot create table with both USING and ." - ] - }, - "_LEGACY_ERROR_TEMP_1059" : { - "message" : [ - "STORED AS with file format '' is invalid." - ] - }, - "_LEGACY_ERROR_TEMP_1060" : { - "message" : [ - " does not support nested column: ." - ] - }, - "_LEGACY_ERROR_TEMP_1065" : { - "message" : [ - "`` is not a valid name for tables/databases. Valid names only contain alphabet characters, numbers and _." - ] - }, - "_LEGACY_ERROR_TEMP_1066" : { - "message" : [ - " is a system preserved database, you cannot create a database with this name." - ] - }, - "_LEGACY_ERROR_TEMP_1068" : { - "message" : [ - " is a system preserved database, you cannot use it as current database. To access global temporary views, you should use qualified name with the GLOBAL_TEMP_DATABASE, e.g. SELECT * FROM .viewName." - ] - }, - "_LEGACY_ERROR_TEMP_1069" : { - "message" : [ - "CREATE EXTERNAL TABLE must be accompanied by LOCATION." - ] - }, - "_LEGACY_ERROR_TEMP_1071" : { - "message" : [ - "Some existing schema fields () are not present in the new schema. We don't support dropping columns yet." - ] - }, - "_LEGACY_ERROR_TEMP_1072" : { - "message" : [ - "Only the tables/views belong to the same database can be retrieved. Querying tables/views are ." - ] - }, - "_LEGACY_ERROR_TEMP_1073" : { - "message" : [ - "RENAME TABLE source and destination databases do not match: '' != ''." - ] - }, - "_LEGACY_ERROR_TEMP_1074" : { - "message" : [ - "RENAME TEMPORARY VIEW from '' to '': cannot specify database name '' in the destination table." - ] - }, - "_LEGACY_ERROR_TEMP_1076" : { - "message" : [ - "Partition spec is invalid.
." - ] - }, - "_LEGACY_ERROR_TEMP_1079" : { - "message" : [ - "Resource Type '' is not supported." - ] - }, - "_LEGACY_ERROR_TEMP_1080" : { - "message" : [ - "Table did not specify database." - ] - }, - "_LEGACY_ERROR_TEMP_1081" : { - "message" : [ - "Table did not specify locationUri." - ] - }, - "_LEGACY_ERROR_TEMP_1082" : { - "message" : [ - "Partition [] did not specify locationUri." - ] - }, - "_LEGACY_ERROR_TEMP_1083" : { - "message" : [ - "Number of buckets should be greater than 0 but less than or equal to bucketing.maxBuckets (``). Got ``." - ] - }, - "_LEGACY_ERROR_TEMP_1084" : { - "message" : [ - "Corrupted table name context in catalog: parts expected, but part is missing." - ] - }, - "_LEGACY_ERROR_TEMP_1085" : { - "message" : [ - "Corrupted view SQL configs in catalog." - ] - }, - "_LEGACY_ERROR_TEMP_1086" : { - "message" : [ - "Corrupted view query output column names in catalog: parts expected, but part is missing." - ] - }, - "_LEGACY_ERROR_TEMP_1087" : { - "message" : [ - "Corrupted view referred temp view names in catalog." - ] - }, - "_LEGACY_ERROR_TEMP_1088" : { - "message" : [ - "Corrupted view referred temp functions names in catalog." - ] - }, - "_LEGACY_ERROR_TEMP_1089" : { - "message" : [ - "Column statistics deserialization is not supported for column of data type: ." - ] - }, - "_LEGACY_ERROR_TEMP_1090" : { - "message" : [ - "Column statistics serialization is not supported for column of data type: ." - ] - }, - "_LEGACY_ERROR_TEMP_1097" : { - "message" : [ - "The field for corrupt records must be string type and nullable." - ] - }, - "_LEGACY_ERROR_TEMP_1098" : { - "message" : [ - "DataType '' is not supported by ." - ] - }, - "_LEGACY_ERROR_TEMP_1099" : { - "message" : [ - "() doesn't support the mode. Acceptable modes are and ." - ] - }, - "_LEGACY_ERROR_TEMP_1100" : { - "message" : [ - "The '' parameter of function '' needs to be a literal." - ] - }, - "_LEGACY_ERROR_TEMP_1103" : { - "message" : [ - "Unsupported component type in arrays." - ] - }, - "_LEGACY_ERROR_TEMP_1104" : { - "message" : [ - "The second argument should be a double literal." - ] - }, - "_LEGACY_ERROR_TEMP_1107" : { - "message" : [ - "Table
declares capability but is not an instance of ." - ] - }, - "_LEGACY_ERROR_TEMP_1108" : { - "message" : [ - "Delete by condition with subquery is not supported: ." - ] - }, - "_LEGACY_ERROR_TEMP_1109" : { - "message" : [ - "Exec update failed: cannot translate expression to source filter: ." - ] - }, - "_LEGACY_ERROR_TEMP_1110" : { - "message" : [ - "Cannot delete from table
where ." - ] - }, - "_LEGACY_ERROR_TEMP_1111" : { - "message" : [ - "DESCRIBE does not support partition for v2 tables." - ] - }, - "_LEGACY_ERROR_TEMP_1113" : { - "message" : [ - "Table
does not support ." - ] - }, - "_LEGACY_ERROR_TEMP_1114" : { - "message" : [ - "The streaming sources in a query do not have a common supported execution mode.", - "Sources support micro-batch: .", - "Sources support continuous: ." - ] - }, - "_LEGACY_ERROR_TEMP_1120" : { - "message" : [ - "Unsupported NamespaceChange in JDBC catalog." - ] - }, - "_LEGACY_ERROR_TEMP_1121" : { - "message" : [ - "Table does not support :
." - ] - }, - "_LEGACY_ERROR_TEMP_1122" : { - "message" : [ - "Table
is not a row-level operation table." - ] - }, - "_LEGACY_ERROR_TEMP_1123" : { - "message" : [ - "Cannot rename a table with ALTER VIEW. Please use ALTER TABLE instead." - ] - }, - "_LEGACY_ERROR_TEMP_1125" : { - "message" : [ - "Database from v1 session catalog is not specified." - ] - }, - "_LEGACY_ERROR_TEMP_1126" : { - "message" : [ - "Nested databases are not supported by v1 session catalog: ." - ] - }, - "_LEGACY_ERROR_TEMP_1127" : { - "message" : [ - "Invalid partitionExprs specified: For range partitioning use REPARTITION_BY_RANGE instead." - ] - }, - "_LEGACY_ERROR_TEMP_1128" : { - "message" : [ - "Failed to resolve the schema for for the partition column: . It must be specified manually." - ] - }, - "_LEGACY_ERROR_TEMP_1131" : { - "message" : [ - "Data source does not support output mode." - ] - }, - "_LEGACY_ERROR_TEMP_1132" : { - "message" : [ - "A schema needs to be specified when using ." - ] - }, - "_LEGACY_ERROR_TEMP_1133" : { - "message" : [ - "The user-specified schema doesn't match the actual schema:", - "user-specified: , actual: . If you're using", - "DataFrameReader.schema API or creating a table, please do not specify the schema.", - "Or if you're scanning an existed table, please drop it and re-create it." - ] - }, - "_LEGACY_ERROR_TEMP_1134" : { - "message" : [ - "Unable to infer schema for at . It must be specified manually." - ] - }, - "_LEGACY_ERROR_TEMP_1135" : { - "message" : [ - " is not a valid Spark SQL Data Source." - ] - }, - "_LEGACY_ERROR_TEMP_1136" : { - "message" : [ - "Cannot save interval data type into external storage." - ] - }, - "_LEGACY_ERROR_TEMP_1137" : { - "message" : [ - "Unable to resolve given []." - ] - }, - "_LEGACY_ERROR_TEMP_1138" : { - "message" : [ - "Hive built-in ORC data source must be used with Hive support enabled. Please use the native ORC data source by setting 'spark.sql.orc.impl' to 'native'." - ] - }, - "_LEGACY_ERROR_TEMP_1139" : { - "message" : [ - "Failed to find data source: . Avro is built-in but external data source module since Spark 2.4. Please deploy the application as per the deployment section of Apache Avro Data Source Guide." - ] - }, - "_LEGACY_ERROR_TEMP_1140" : { - "message" : [ - "Failed to find data source: . Please deploy the application as per the deployment section of Structured Streaming + Kafka Integration Guide." - ] - }, - "_LEGACY_ERROR_TEMP_1141" : { - "message" : [ - "Multiple sources found for (), please specify the fully qualified class name." - ] - }, - "_LEGACY_ERROR_TEMP_1142" : { - "message" : [ - "Datasource does not support writing empty or nested empty schemas. Please make sure the data schema has at least one or more column(s)." - ] - }, - "_LEGACY_ERROR_TEMP_1143" : { - "message" : [ - "The data to be inserted needs to have the same number of columns as the target table: target table has column(s) but the inserted data has column(s), which contain partition column(s) having assigned constant values." - ] - }, - "_LEGACY_ERROR_TEMP_1144" : { - "message" : [ - "The data to be inserted needs to have the same number of partition columns as the target table: target table has partition column(s) but the inserted data has partition columns specified." - ] - }, - "_LEGACY_ERROR_TEMP_1145" : { - "message" : [ - " is not a partition column. Partition columns are ." - ] - }, - "_LEGACY_ERROR_TEMP_1146" : { - "message" : [ - "Partition column have multiple values specified, . Please only specify a single value." - ] - }, - "_LEGACY_ERROR_TEMP_1147" : { - "message" : [ - "The ordering of partition columns is . All partition columns having constant values need to appear before other partition columns that do not have an assigned constant value." - ] - }, - "_LEGACY_ERROR_TEMP_1148" : { - "message" : [ - "Can only write data to relations with a single path." - ] - }, - "_LEGACY_ERROR_TEMP_1149" : { - "message" : [ - "Fail to rebuild expression: missing key in `translatedFilterToExpr`." - ] - }, - "_LEGACY_ERROR_TEMP_1151" : { - "message" : [ - "Fail to resolve data source for the table
since the table serde property has the duplicated key with extra options specified for this scan operation. To fix this, you can rollback to the legacy behavior of ignoring the extra options by setting the config to `false`, or address the conflicts of the same config." - ] - }, - "_LEGACY_ERROR_TEMP_1153" : { - "message" : [ - "Cannot use for partition column." - ] - }, - "_LEGACY_ERROR_TEMP_1155" : { - "message" : [ - "Partition column `` not found in schema ." - ] - }, - "_LEGACY_ERROR_TEMP_1156" : { - "message" : [ - "Column not found in schema ." - ] - }, - "_LEGACY_ERROR_TEMP_1158" : { - "message" : [ - "Saving data into a view is not allowed." - ] - }, - "_LEGACY_ERROR_TEMP_1159" : { - "message" : [ - "The format of the existing table is ``. It doesn't match the specified format ``." - ] - }, - "_LEGACY_ERROR_TEMP_1160" : { - "message" : [ - "The location of the existing table is ``. It doesn't match the specified location ``." - ] - }, - "_LEGACY_ERROR_TEMP_1161" : { - "message" : [ - "The column number of the existing table () doesn't match the data schema ()." - ] - }, - "_LEGACY_ERROR_TEMP_1162" : { - "message" : [ - "Cannot resolve '' given input columns: []." - ] - }, - "_LEGACY_ERROR_TEMP_1163" : { - "message" : [ - "Specified partitioning does not match that of the existing table .", - "Specified partition columns: [].", - "Existing partition columns: []." - ] - }, - "_LEGACY_ERROR_TEMP_1164" : { - "message" : [ - "Specified bucketing does not match that of the existing table .", - "Specified bucketing: .", - "Existing bucketing: ." - ] - }, - "_LEGACY_ERROR_TEMP_1165" : { - "message" : [ - "It is not allowed to specify partitioning when the table schema is not defined." - ] - }, - "_LEGACY_ERROR_TEMP_1166" : { - "message" : [ - "Bucketing column '' should not be part of partition columns ''." - ] - }, - "_LEGACY_ERROR_TEMP_1167" : { - "message" : [ - "Bucket sorting column '' should not be part of partition columns ''." - ] - }, - "_LEGACY_ERROR_TEMP_1169" : { - "message" : [ - "Requested partitioning does not match the table :", - "Requested partitions: .", - "Table partitions: ." - ] - }, - "_LEGACY_ERROR_TEMP_1171" : { - "message" : [ - "createTableColumnTypes option column not found in schema ." - ] - }, - "_LEGACY_ERROR_TEMP_1172" : { - "message" : [ - "Parquet type not yet supported: ." - ] - }, - "_LEGACY_ERROR_TEMP_1173" : { - "message" : [ - "Illegal Parquet type: ." - ] - }, - "_LEGACY_ERROR_TEMP_1174" : { - "message" : [ - "Unrecognized Parquet type: ." - ] - }, - "_LEGACY_ERROR_TEMP_1175" : { - "message" : [ - "Unsupported data type ." - ] - }, - "_LEGACY_ERROR_TEMP_1181" : { - "message" : [ - "Stream-stream join without equality predicate is not supported." - ] - }, - "_LEGACY_ERROR_TEMP_1182" : { - "message" : [ - "Column are ambiguous. It's probably because you joined several Datasets together, and some of these Datasets are the same. This column points to one of the Datasets but Spark is unable to figure out which one. Please alias the Datasets with different names via `Dataset.as` before joining them, and specify the column using qualified name, e.g. `df.as(\"a\").join(df.as(\"b\"), $\"a.id\" > $\"b.id\")`. You can also set to false to disable this check." - ] - }, - "_LEGACY_ERROR_TEMP_1183" : { - "message" : [ - "Cannot use interval type in the table schema." - ] - }, - "_LEGACY_ERROR_TEMP_1184" : { - "message" : [ - "Catalog does not support ." - ] - }, - "_LEGACY_ERROR_TEMP_1186" : { - "message" : [ - "Multi-part identifier cannot be empty." - ] - }, - "_LEGACY_ERROR_TEMP_1187" : { - "message" : [ - "Hive data source can only be used with tables, you can not files of Hive data source directly." - ] - }, - "_LEGACY_ERROR_TEMP_1188" : { - "message" : [ - "There is a 'path' option set and () is called with a path parameter. Either remove the path option, or call () without the parameter. To ignore this check, set '' to 'true'." - ] - }, - "_LEGACY_ERROR_TEMP_1189" : { - "message" : [ - "User specified schema not supported with ``." - ] - }, - "_LEGACY_ERROR_TEMP_1190" : { - "message" : [ - "Temporary view doesn't support streaming write." - ] - }, - "_LEGACY_ERROR_TEMP_1191" : { - "message" : [ - "Streaming into views is not supported." - ] - }, - "_LEGACY_ERROR_TEMP_1192" : { - "message" : [ - "The input source() is different from the table 's data source provider()." - ] - }, - "_LEGACY_ERROR_TEMP_1193" : { - "message" : [ - "Table doesn't support streaming write - ." - ] - }, - "_LEGACY_ERROR_TEMP_1194" : { - "message" : [ - "queryName must be specified for memory sink." - ] - }, - "_LEGACY_ERROR_TEMP_1195" : { - "message" : [ - "'' is not supported with continuous trigger." - ] - }, - "_LEGACY_ERROR_TEMP_1196" : { - "message" : [ - " column not found in existing columns ()." - ] - }, - "_LEGACY_ERROR_TEMP_1197" : { - "message" : [ - "'' does not support partitioning." - ] - }, - "_LEGACY_ERROR_TEMP_1198" : { - "message" : [ - "Function '' cannot process input: (): ." - ] - }, - "_LEGACY_ERROR_TEMP_1199" : { - "message" : [ - "Invalid bound function ': there are arguments but parameters returned from 'inputTypes()'." - ] - }, - "_LEGACY_ERROR_TEMP_1200" : { - "message" : [ - " is not supported for v2 tables." - ] - }, - "_LEGACY_ERROR_TEMP_1201" : { - "message" : [ - "Cannot resolve column name \"\" among ()." - ] - }, - "_LEGACY_ERROR_TEMP_1205" : { - "message" : [ - "Expected only partition pruning predicates: ." - ] - }, - "_LEGACY_ERROR_TEMP_1207" : { - "message" : [ - "The duration and time inputs to window must be an integer, long or string literal." - ] - }, - "_LEGACY_ERROR_TEMP_1210" : { - "message" : [ - "The second argument in should be a boolean literal." - ] - }, - "_LEGACY_ERROR_TEMP_1211" : { - "message" : [ - "Detected implicit cartesian product for join between logical plans", - "", - "and", - "rightPlan", - "Join condition is missing or trivial.", - "Either: use the CROSS JOIN syntax to allow cartesian products between these relations, or: enable implicit cartesian products by setting the configuration variable spark.sql.crossJoin.enabled=true." - ] - }, - "_LEGACY_ERROR_TEMP_1212" : { - "message" : [ - "Found conflicting attributes in the condition joining outer plan:", - "", - "and subplan:", - "." - ] - }, - "_LEGACY_ERROR_TEMP_1213" : { - "message" : [ - "Window expression is empty in ." - ] - }, - "_LEGACY_ERROR_TEMP_1214" : { - "message" : [ - "Found different window function type in ." - ] - }, - "_LEGACY_ERROR_TEMP_1218" : { - "message" : [ - " should be converted to HadoopFsRelation." - ] - }, - "_LEGACY_ERROR_TEMP_1219" : { - "message" : [ - "Hive metastore does not support altering database location." - ] - }, - "_LEGACY_ERROR_TEMP_1221" : { - "message" : [ - "Hive 0.12 doesn't support creating permanent functions. Please use Hive 0.13 or higher." - ] - }, - "_LEGACY_ERROR_TEMP_1222" : { - "message" : [ - "Unknown resource type: ." - ] - }, - "_LEGACY_ERROR_TEMP_1223" : { - "message" : [ - "Invalid field id '' in day-time interval. Supported interval fields: ." - ] - }, - "_LEGACY_ERROR_TEMP_1224" : { - "message" : [ - "'interval to ' is invalid." - ] - }, - "_LEGACY_ERROR_TEMP_1225" : { - "message" : [ - "Invalid field id '' in year-month interval. Supported interval fields: ." - ] - }, - "_LEGACY_ERROR_TEMP_1226" : { - "message" : [ - "The SQL config '' was removed in the version . " - ] - }, - "_LEGACY_ERROR_TEMP_1228" : { - "message" : [ - "Decimal scale () cannot be greater than precision ()." - ] - }, - "_LEGACY_ERROR_TEMP_1231" : { - "message" : [ - " is not a valid partition column in table ." - ] - }, - "_LEGACY_ERROR_TEMP_1232" : { - "message" : [ - "Partition spec is invalid. The spec () must match the partition spec () defined in table ''." - ] - }, - "_LEGACY_ERROR_TEMP_1237" : { - "message" : [ - "The list of partition columns with values in partition specification for table '
' in database '' is not a prefix of the list of partition columns defined in the table schema. Expected a prefix of [], but got []." - ] - }, - "_LEGACY_ERROR_TEMP_1239" : { - "message" : [ - "Analyzing column statistics is not supported for column of data type: ." - ] - }, - "_LEGACY_ERROR_TEMP_1241" : { - "message" : [ - "CREATE-TABLE-AS-SELECT cannot create table with location to a non-empty directory . To allow overwriting the existing non-empty directory, set '' to true." - ] - }, - "_LEGACY_ERROR_TEMP_1246" : { - "message" : [ - "Can't find column `` given table data columns ." - ] - }, - "_LEGACY_ERROR_TEMP_1247" : { - "message" : [ - "Operation not allowed: ALTER TABLE SET [SERDE | SERDEPROPERTIES] for a specific partition is not supported for tables created with the datasource API." - ] - }, - "_LEGACY_ERROR_TEMP_1248" : { - "message" : [ - "Operation not allowed: ALTER TABLE SET SERDE is not supported for tables created with the datasource API." - ] - }, - "_LEGACY_ERROR_TEMP_1250" : { - "message" : [ - " is not allowed on since filesource partition management is disabled (spark.sql.hive.manageFilesourcePartitions = false)." - ] - }, - "_LEGACY_ERROR_TEMP_1251" : { - "message" : [ - " is not allowed on since its partition metadata is not stored in the Hive metastore. To import this information into the metastore, run `msck repair table `." - ] - }, - "_LEGACY_ERROR_TEMP_1252" : { - "message" : [ - "Cannot alter a view with ALTER TABLE. Please use ALTER VIEW instead." - ] - }, - "_LEGACY_ERROR_TEMP_1253" : { - "message" : [ - "Cannot alter a table with ALTER VIEW. Please use ALTER TABLE instead." - ] - }, - "_LEGACY_ERROR_TEMP_1255" : { - "message" : [ - "Cannot drop built-in function ''." - ] - }, - "_LEGACY_ERROR_TEMP_1256" : { - "message" : [ - "Cannot refresh built-in function ." - ] - }, - "_LEGACY_ERROR_TEMP_1257" : { - "message" : [ - "Cannot refresh temporary function ." - ] - }, - "_LEGACY_ERROR_TEMP_1259" : { - "message" : [ - "ALTER ADD COLUMNS does not support views. You must drop and re-create the views for adding the new columns. Views:
." - ] - }, - "_LEGACY_ERROR_TEMP_1260" : { - "message" : [ - "ALTER ADD COLUMNS does not support datasource table with type . You must drop and re-create the table for adding the new columns. Tables:
." - ] - }, - "_LEGACY_ERROR_TEMP_1261" : { - "message" : [ - "LOAD DATA is not supported for datasource tables: ." - ] - }, - "_LEGACY_ERROR_TEMP_1262" : { - "message" : [ - "LOAD DATA target table is partitioned, but no partition spec is provided." - ] - }, - "_LEGACY_ERROR_TEMP_1263" : { - "message" : [ - "LOAD DATA target table is partitioned, but number of columns in provided partition spec () do not match number of partitioned columns in table ()." - ] - }, - "_LEGACY_ERROR_TEMP_1264" : { - "message" : [ - "LOAD DATA target table is not partitioned, but a partition spec was provided." - ] - }, - "_LEGACY_ERROR_TEMP_1266" : { - "message" : [ - "Operation not allowed: TRUNCATE TABLE on external tables: ." - ] - }, - "_LEGACY_ERROR_TEMP_1267" : { - "message" : [ - "Operation not allowed: TRUNCATE TABLE ... PARTITION is not supported for tables that are not partitioned: ." - ] - }, - "_LEGACY_ERROR_TEMP_1268" : { - "message" : [ - "Failed to truncate table when removing data of the path: ." - ] - }, - "_LEGACY_ERROR_TEMP_1270" : { - "message" : [ - "SHOW CREATE TABLE is not supported on a temporary view:
." - ] - }, - "_LEGACY_ERROR_TEMP_1271" : { - "message" : [ - "Failed to execute SHOW CREATE TABLE against table
, which is created by Hive and uses the following unsupported feature(s)", - "", - "Please use `SHOW CREATE TABLE
AS SERDE` to show Hive DDL instead." - ] - }, - "_LEGACY_ERROR_TEMP_1272" : { - "message" : [ - "SHOW CREATE TABLE doesn't support transactional Hive table. Please use `SHOW CREATE TABLE
AS SERDE` to show Hive DDL instead." - ] - }, - "_LEGACY_ERROR_TEMP_1273" : { - "message" : [ - "Failed to execute SHOW CREATE TABLE against table
, which is created by Hive and uses the following unsupported serde configuration", - "", - "Please use `SHOW CREATE TABLE
AS SERDE` to show Hive DDL instead." - ] - }, - "_LEGACY_ERROR_TEMP_1274" : { - "message" : [ - "
is a Spark data source table. Use `SHOW CREATE TABLE` without `AS SERDE` instead." - ] - }, - "_LEGACY_ERROR_TEMP_1275" : { - "message" : [ - "Failed to execute SHOW CREATE TABLE against table/view
, which is created by Hive and uses the following unsupported feature(s)", - "." - ] - }, - "_LEGACY_ERROR_TEMP_1276" : { - "message" : [ - "The logical plan that represents the view is not analyzed." - ] - }, - "_LEGACY_ERROR_TEMP_1278" : { - "message" : [ - " is not a view." - ] - }, - "_LEGACY_ERROR_TEMP_1280" : { - "message" : [ - "It is not allowed to create a persisted view from the Dataset API." - ] - }, - "_LEGACY_ERROR_TEMP_1285" : { - "message" : [ - "Since Spark 2.3, the queries from raw JSON/CSV files are disallowed when the", - "referenced columns only include the internal corrupt record column", - "(named _corrupt_record by default). For example:", - "spark.read.schema(schema).csv(file).filter($\"_corrupt_record\".isNotNull).count()", - "and spark.read.schema(schema).csv(file).select(\"_corrupt_record\").show().", - "Instead, you can cache or save the parsed results and then send the same query.", - "For example, val df = spark.read.schema(schema).csv(file).cache() and then", - "df.filter($\"_corrupt_record\".isNotNull).count()." - ] - }, - "_LEGACY_ERROR_TEMP_1286" : { - "message" : [ - "User-defined partition column not found in the JDBC relation: ." - ] - }, - "_LEGACY_ERROR_TEMP_1287" : { - "message" : [ - "Partition column type should be , , or , but found." - ] - }, - "_LEGACY_ERROR_TEMP_1288" : { - "message" : [ - "Table or view '' already exists. SaveMode: ErrorIfExists." - ] - }, - "_LEGACY_ERROR_TEMP_1290" : { - "message" : [ - "Text data source supports only a single column, and you have columns." - ] - }, - "_LEGACY_ERROR_TEMP_1291" : { - "message" : [ - "Can't find required partition column in partition schema ." - ] - }, - "_LEGACY_ERROR_TEMP_1292" : { - "message" : [ - "Temporary view '' should not have specified a database." - ] - }, - "_LEGACY_ERROR_TEMP_1293" : { - "message" : [ - "Hive data source can only be used with tables, you can't use it with CREATE TEMP VIEW USING." - ] - }, - "_LEGACY_ERROR_TEMP_1294" : { - "message" : [ - "The timestamp provided for the '' option is invalid. The expected format is 'YYYY-MM-DDTHH:mm:ss', but the provided timestamp: ." - ] - }, - "_LEGACY_ERROR_TEMP_1295" : { - "message" : [ - "Set a host to read from with option(\"host\", ...)." - ] - }, - "_LEGACY_ERROR_TEMP_1296" : { - "message" : [ - "Set a port to read from with option(\"port\", ...)." - ] - }, - "_LEGACY_ERROR_TEMP_1297" : { - "message" : [ - "IncludeTimestamp must be set to either \"true\" or \"false\"." - ] - }, - "_LEGACY_ERROR_TEMP_1298" : { - "message" : [ - "checkpointLocation must be specified either through option(\"checkpointLocation\", ...) or SparkSession.conf.set(\"\", ...)." - ] - }, - "_LEGACY_ERROR_TEMP_1299" : { - "message" : [ - "This query does not support recovering from checkpoint location. Delete to start over." - ] - }, - "_LEGACY_ERROR_TEMP_1300" : { - "message" : [ - "Unable to find the column `` given []." - ] - }, - "_LEGACY_ERROR_TEMP_1305" : { - "message" : [ - "Unsupported TableChange in JDBC catalog." - ] - }, - "_LEGACY_ERROR_TEMP_1306" : { - "message" : [ - "There is a 'path' or 'paths' option set and load() is called with path parameters. Either remove the path option if it's the same as the path parameter, or add it to the load() parameter if you do want to read multiple paths. To ignore this check, set '' to 'true'." - ] - }, - "_LEGACY_ERROR_TEMP_1307" : { - "message" : [ - "There is a 'path' option set and save() is called with a path parameter. Either remove the path option, or call save() without the parameter. To ignore this check, set '' to 'true'." - ] - }, - "_LEGACY_ERROR_TEMP_1308" : { - "message" : [ - "TableProvider implementation cannot be written with mode, please use Append or Overwrite modes instead." - ] - }, - "_LEGACY_ERROR_TEMP_1309" : { - "message" : [ - "insertInto() can't be used together with partitionBy(). Partition columns have already been defined for the table. It is not necessary to use partitionBy()." - ] - }, - "_LEGACY_ERROR_TEMP_1310" : { - "message" : [ - "Couldn't find a catalog to handle the identifier ." - ] - }, - "_LEGACY_ERROR_TEMP_1312" : { - "message" : [ - "'' does not support bucketBy right now." - ] - }, - "_LEGACY_ERROR_TEMP_1313" : { - "message" : [ - "'' does not support bucketBy and sortBy right now." - ] - }, - "_LEGACY_ERROR_TEMP_1316" : { - "message" : [ - "Invalid partition transformation: ." - ] - }, - "_LEGACY_ERROR_TEMP_1319" : { - "message" : [ - "Invalid join type in joinWith: ." - ] - }, - "_LEGACY_ERROR_TEMP_1320" : { - "message" : [ - "Typed column that needs input type and schema cannot be passed in untyped `select` API. Use the typed `Dataset.select` API instead." - ] - }, - "_LEGACY_ERROR_TEMP_1321" : { - "message" : [ - "Invalid view name: ." - ] - }, - "_LEGACY_ERROR_TEMP_1322" : { - "message" : [ - "Invalid number of buckets: bucket(, )." - ] - }, - "_LEGACY_ERROR_TEMP_1323" : { - "message" : [ - "\"\" is not a numeric column. Aggregation function can only be applied on a numeric column." - ] - }, - "_LEGACY_ERROR_TEMP_1324" : { - "message" : [ - "The pivot column has more than distinct values, this could indicate an error. If this was intended, set to at least the number of distinct values of the pivot column." - ] - }, - "_LEGACY_ERROR_TEMP_1325" : { - "message" : [ - "Cannot modify the value of a static config: ." - ] - }, - "_LEGACY_ERROR_TEMP_1327" : { - "message" : [ - "Command execution is not supported in runner ." - ] - }, - "_LEGACY_ERROR_TEMP_1328" : { - "message" : [ - "Can not instantiate class , please make sure it has public non argument constructor." - ] - }, - "_LEGACY_ERROR_TEMP_1329" : { - "message" : [ - "Can not load class , please make sure it is on the classpath." - ] - }, - "_LEGACY_ERROR_TEMP_1330" : { - "message" : [ - "Class doesn't implement interface UserDefinedAggregateFunction." - ] - }, - "_LEGACY_ERROR_TEMP_1331" : { - "message" : [ - "Missing field in table
with schema:", - "." - ] - }, - "_LEGACY_ERROR_TEMP_1332" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_1334" : { - "message" : [ - "Cannot specify both version and timestamp when time travelling the table." - ] - }, - "_LEGACY_ERROR_TEMP_1338" : { - "message" : [ - "Sinks cannot request distribution and ordering in continuous execution mode." - ] - }, - "_LEGACY_ERROR_TEMP_1339" : { - "message" : [ - "Failed to execute INSERT INTO command because the VALUES list contains a DEFAULT column reference as part of another expression; this is not allowed." - ] - }, - "_LEGACY_ERROR_TEMP_1340" : { - "message" : [ - "Failed to execute UPDATE command because the SET list contains a DEFAULT column reference as part of another expression; this is not allowed." - ] - }, - "_LEGACY_ERROR_TEMP_1343" : { - "message" : [ - "Failed to execute MERGE INTO command because one of its INSERT or UPDATE assignments contains a DEFAULT column reference as part of another expression; this is not allowed." - ] - }, - "_LEGACY_ERROR_TEMP_1344" : { - "message" : [ - "Invalid DEFAULT value for column : fails to parse as a valid literal value." - ] - }, - "_LEGACY_ERROR_TEMP_1345" : { - "message" : [ - "Failed to execute command because DEFAULT values are not supported for target data source with table provider: \"\"." - ] - }, - "_LEGACY_ERROR_TEMP_1346" : { - "message" : [ - "Failed to execute command because DEFAULT values are not supported when adding new columns to previously existing target data source with table provider: \"\"." - ] - }, - "_LEGACY_ERROR_TEMP_2000" : { - "message" : [ - ". If necessary set to false to bypass this error." - ] - }, - "_LEGACY_ERROR_TEMP_2003" : { - "message" : [ - "Unsuccessful try to zip maps with unique keys due to exceeding the array size limit ." - ] - }, - "_LEGACY_ERROR_TEMP_2005" : { - "message" : [ - "Type does not support ordered operations." - ] - }, - "_LEGACY_ERROR_TEMP_2011" : { - "message" : [ - "Unexpected data type ." - ] - }, - "_LEGACY_ERROR_TEMP_2013" : { - "message" : [ - "Negative values found in " - ] - }, - "_LEGACY_ERROR_TEMP_2015" : { - "message" : [ - "Cannot generate code for incomparable type: ." - ] - }, - "_LEGACY_ERROR_TEMP_2016" : { - "message" : [ - "Can not interpolate into code block." - ] - }, - "_LEGACY_ERROR_TEMP_2017" : { - "message" : [ - "not resolved." - ] - }, - "_LEGACY_ERROR_TEMP_2018" : { - "message" : [ - "class `` is not supported by `MapObjects` as resulting collection." - ] - }, - "_LEGACY_ERROR_TEMP_2020" : { - "message" : [ - "Couldn't find a valid constructor on ." - ] - }, - "_LEGACY_ERROR_TEMP_2021" : { - "message" : [ - "Couldn't find a primary constructor on ." - ] - }, - "_LEGACY_ERROR_TEMP_2023" : { - "message" : [ - "Unresolved encoder expected, but was found." - ] - }, - "_LEGACY_ERROR_TEMP_2024" : { - "message" : [ - "Only expression encoders are supported for now." - ] - }, - "_LEGACY_ERROR_TEMP_2025" : { - "message" : [ - " must override either or ." - ] - }, - "_LEGACY_ERROR_TEMP_2026" : { - "message" : [ - "Failed to convert value (class of ) with the type of to JSON." - ] - }, - "_LEGACY_ERROR_TEMP_2027" : { - "message" : [ - "Unexpected operator in correlated subquery." - ] - }, - "_LEGACY_ERROR_TEMP_2028" : { - "message" : [ - "This line should be unreachable." - ] - }, - "_LEGACY_ERROR_TEMP_2030" : { - "message" : [ - "Can not handle nested schema yet... plan ." - ] - }, - "_LEGACY_ERROR_TEMP_2031" : { - "message" : [ - "The input external row cannot be null." - ] - }, - "_LEGACY_ERROR_TEMP_2032" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_2033" : { - "message" : [ - "Unable to create database as failed to create its directory ." - ] - }, - "_LEGACY_ERROR_TEMP_2034" : { - "message" : [ - "Unable to drop database as failed to delete its directory ." - ] - }, - "_LEGACY_ERROR_TEMP_2035" : { - "message" : [ - "Unable to create table
as failed to create its directory ." - ] - }, - "_LEGACY_ERROR_TEMP_2036" : { - "message" : [ - "Unable to delete partition path ." - ] - }, - "_LEGACY_ERROR_TEMP_2037" : { - "message" : [ - "Unable to drop table
as failed to delete its directory ." - ] - }, - "_LEGACY_ERROR_TEMP_2038" : { - "message" : [ - "Unable to rename table to as failed to rename its directory ." - ] - }, - "_LEGACY_ERROR_TEMP_2039" : { - "message" : [ - "Unable to create partition path ." - ] - }, - "_LEGACY_ERROR_TEMP_2040" : { - "message" : [ - "Unable to rename partition path ." - ] - }, - "_LEGACY_ERROR_TEMP_2041" : { - "message" : [ - " is not implemented." - ] - }, - "_LEGACY_ERROR_TEMP_2042" : { - "message" : [ - ". If necessary set to false to bypass this error." - ] - }, - "_LEGACY_ERROR_TEMP_2043" : { - "message" : [ - "- caused overflow." - ] - }, - "_LEGACY_ERROR_TEMP_2045" : { - "message" : [ - "Unsupported table change: " - ] - }, - "_LEGACY_ERROR_TEMP_2046" : { - "message" : [ - "[BUG] Not a DataSourceRDDPartition: ." - ] - }, - "_LEGACY_ERROR_TEMP_2047" : { - "message" : [ - "'path' is not specified." - ] - }, - "_LEGACY_ERROR_TEMP_2048" : { - "message" : [ - "Schema must be specified when creating a streaming source DataFrame. If some files already exist in the directory, then depending on the file format you may be able to create a static DataFrame on that directory with 'spark.read.load(directory)' and infer schema from it." - ] - }, - "_LEGACY_ERROR_TEMP_2049" : { - "message" : [ - "Data source does not support streamed ." - ] - }, - "_LEGACY_ERROR_TEMP_2050" : { - "message" : [ - "Expected exactly one path to be specified, but got: ." - ] - }, - "_LEGACY_ERROR_TEMP_2052" : { - "message" : [ - " was removed in Spark 2.0. Please check if your library is compatible with Spark 2.0." - ] - }, - "_LEGACY_ERROR_TEMP_2053" : { - "message" : [ - "buildReader is not supported for ." - ] - }, - "_LEGACY_ERROR_TEMP_2055" : { - "message" : [ - "", - "It is possible the underlying files have been updated. You can explicitly invalidate the cache in Spark by running 'REFRESH TABLE tableName' command in SQL or by recreating the Dataset/DataFrame involved." - ] - }, - "_LEGACY_ERROR_TEMP_2056" : { - "message" : [ - "Unable to clear output directory prior to writing to it." - ] - }, - "_LEGACY_ERROR_TEMP_2057" : { - "message" : [ - "Unable to clear partition directory prior to writing to it." - ] - }, - "_LEGACY_ERROR_TEMP_2058" : { - "message" : [ - "Failed to cast value `` to `` for partition column ``." - ] - }, - "_LEGACY_ERROR_TEMP_2059" : { - "message" : [ - "End of stream." - ] - }, - "_LEGACY_ERROR_TEMP_2060" : { - "message" : [ - "The fallback v1 relation reports inconsistent schema:", - "Schema of v2 scan: .", - "Schema of v1 relation: ." - ] - }, - "_LEGACY_ERROR_TEMP_2061" : { - "message" : [ - "No records should be returned from EmptyDataReader." - ] - }, - "_LEGACY_ERROR_TEMP_2062" : { - "message" : [ - "", - "It is possible the underlying files have been updated. You can explicitly invalidate the cache in Spark by recreating the Dataset/DataFrame involved." - ] - }, - "_LEGACY_ERROR_TEMP_2063" : { - "message" : [ - "Parquet column cannot be converted in file . Column: , Expected: , Found: ." - ] - }, - "_LEGACY_ERROR_TEMP_2064" : { - "message" : [ - "Encountered error while reading file . Details:" - ] - }, - "_LEGACY_ERROR_TEMP_2065" : { - "message" : [ - "Cannot create columnar reader." - ] - }, - "_LEGACY_ERROR_TEMP_2066" : { - "message" : [ - "Invalid namespace name: ." - ] - }, - "_LEGACY_ERROR_TEMP_2067" : { - "message" : [ - "Unsupported partition transform: ." - ] - }, - "_LEGACY_ERROR_TEMP_2068" : { - "message" : [ - "Missing database location." - ] - }, - "_LEGACY_ERROR_TEMP_2069" : { - "message" : [ - "Cannot remove reserved property: ." - ] - }, - "_LEGACY_ERROR_TEMP_2070" : { - "message" : [ - "Writing job failed." - ] - }, - "_LEGACY_ERROR_TEMP_2071" : { - "message" : [ - "Commit denied for partition (task , attempt , stage .)." - ] - }, - "_LEGACY_ERROR_TEMP_2073" : { - "message" : [ - "Cannot create JDBC table with partition." - ] - }, - "_LEGACY_ERROR_TEMP_2074" : { - "message" : [ - "user-specified schema." - ] - }, - "_LEGACY_ERROR_TEMP_2075" : { - "message" : [ - "Write is not supported for binary file data source." - ] - }, - "_LEGACY_ERROR_TEMP_2076" : { - "message" : [ - "The length of is , which exceeds the max length allowed: ." - ] - }, - "_LEGACY_ERROR_TEMP_2077" : { - "message" : [ - "Unsupported field name: ." - ] - }, - "_LEGACY_ERROR_TEMP_2078" : { - "message" : [ - "Both '' and '' can not be specified at the same time." - ] - }, - "_LEGACY_ERROR_TEMP_2079" : { - "message" : [ - "Option '' or '' is required." - ] - }, - "_LEGACY_ERROR_TEMP_2080" : { - "message" : [ - "Option `` can not be empty." - ] - }, - "_LEGACY_ERROR_TEMP_2081" : { - "message" : [ - "Invalid value `` for parameter ``. This can be `NONE`, `READ_UNCOMMITTED`, `READ_COMMITTED`, `REPEATABLE_READ` or `SERIALIZABLE`." - ] - }, - "_LEGACY_ERROR_TEMP_2082" : { - "message" : [ - "Can't get JDBC type for ." - ] - }, - "_LEGACY_ERROR_TEMP_2083" : { - "message" : [ - "Unsupported type ." - ] - }, - "_LEGACY_ERROR_TEMP_2084" : { - "message" : [ - "Unsupported array element type based on binary." - ] - }, - "_LEGACY_ERROR_TEMP_2085" : { - "message" : [ - "Nested arrays unsupported." - ] - }, - "_LEGACY_ERROR_TEMP_2086" : { - "message" : [ - "Can't translate non-null value for field ." - ] - }, - "_LEGACY_ERROR_TEMP_2087" : { - "message" : [ - "Invalid value `` for parameter `` in table writing via JDBC. The minimum value is 1." - ] - }, - "_LEGACY_ERROR_TEMP_2088" : { - "message" : [ - " is not supported yet." - ] - }, - "_LEGACY_ERROR_TEMP_2089" : { - "message" : [ - "DataType: ." - ] - }, - "_LEGACY_ERROR_TEMP_2090" : { - "message" : [ - "The input filter of should be fully convertible." - ] - }, - "_LEGACY_ERROR_TEMP_2093" : { - "message" : [ - "Found duplicate field(s) \"\": in case-insensitive mode." - ] - }, - "_LEGACY_ERROR_TEMP_2094" : { - "message" : [ - "Found duplicate field(s) \"\": in id mapping mode." - ] - }, - "_LEGACY_ERROR_TEMP_2095" : { - "message" : [ - "Failed to merge incompatible schemas and ." - ] - }, - "_LEGACY_ERROR_TEMP_2096" : { - "message" : [ - " is not supported temporarily." - ] - }, - "_LEGACY_ERROR_TEMP_2097" : { - "message" : [ - "Could not execute broadcast in secs. You can increase the timeout for broadcasts via or disable broadcast join by setting to -1." - ] - }, - "_LEGACY_ERROR_TEMP_2098" : { - "message" : [ - "Could not compare cost with ." - ] - }, - "_LEGACY_ERROR_TEMP_2100" : { - "message" : [ - "not support type: ." - ] - }, - "_LEGACY_ERROR_TEMP_2101" : { - "message" : [ - "Not support non-primitive type now." - ] - }, - "_LEGACY_ERROR_TEMP_2102" : { - "message" : [ - "Unsupported type: ." - ] - }, - "_LEGACY_ERROR_TEMP_2103" : { - "message" : [ - "Dictionary encoding should not be used because of dictionary overflow." - ] - }, - "_LEGACY_ERROR_TEMP_2104" : { - "message" : [ - "End of the iterator." - ] - }, - "_LEGACY_ERROR_TEMP_2105" : { - "message" : [ - "Could not allocate memory to grow BytesToBytesMap." - ] - }, - "_LEGACY_ERROR_TEMP_2106" : { - "message" : [ - "Can't acquire bytes memory to build hash relation, got bytes." - ] - }, - "_LEGACY_ERROR_TEMP_2107" : { - "message" : [ - "There is not enough memory to build hash map." - ] - }, - "_LEGACY_ERROR_TEMP_2108" : { - "message" : [ - "Does not support row that is larger than 256M." - ] - }, - "_LEGACY_ERROR_TEMP_2109" : { - "message" : [ - "Cannot build HashedRelation with more than 1/3 billions unique keys." - ] - }, - "_LEGACY_ERROR_TEMP_2110" : { - "message" : [ - "Can not build a HashedRelation that is larger than 8G." - ] - }, - "_LEGACY_ERROR_TEMP_2111" : { - "message" : [ - "failed to push a row into ." - ] - }, - "_LEGACY_ERROR_TEMP_2112" : { - "message" : [ - "Unexpected window function frame ." - ] - }, - "_LEGACY_ERROR_TEMP_2113" : { - "message" : [ - "Unable to parse as a percentile." - ] - }, - "_LEGACY_ERROR_TEMP_2114" : { - "message" : [ - " is not a recognised statistic." - ] - }, - "_LEGACY_ERROR_TEMP_2115" : { - "message" : [ - "Unknown column: ." - ] - }, - "_LEGACY_ERROR_TEMP_2116" : { - "message" : [ - "Unexpected: ." - ] - }, - "_LEGACY_ERROR_TEMP_2120" : { - "message" : [ - "Do not support array of type ." - ] - }, - "_LEGACY_ERROR_TEMP_2121" : { - "message" : [ - "Do not support type ." - ] - }, - "_LEGACY_ERROR_TEMP_2124" : { - "message" : [ - "Failed to merge decimal types with incompatible scale and ." - ] - }, - "_LEGACY_ERROR_TEMP_2126" : { - "message" : [ - "Unsuccessful attempt to build maps with elements due to exceeding the map size limit ." - ] - }, - "_LEGACY_ERROR_TEMP_2128" : { - "message" : [ - "The key array and value array of MapData must have the same length." - ] - }, - "_LEGACY_ERROR_TEMP_2129" : { - "message" : [ - "Conflict found: Field differs from derived from ." - ] - }, - "_LEGACY_ERROR_TEMP_2130" : { - "message" : [ - "Fail to recognize '' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from '/sql-ref-datetime-pattern.html'." - ] - }, - "_LEGACY_ERROR_TEMP_2131" : { - "message" : [ - "Exception when registering StreamingQueryListener." - ] - }, - "_LEGACY_ERROR_TEMP_2133" : { - "message" : [ - "Cannot parse field name , field value , [] as target spark data type []." - ] - }, - "_LEGACY_ERROR_TEMP_2134" : { - "message" : [ - "Cannot parse field value for pattern as target spark data type []." - ] - }, - "_LEGACY_ERROR_TEMP_2138" : { - "message" : [ - "Cannot have circular references in bean class, but got the circular reference of class ." - ] - }, - "_LEGACY_ERROR_TEMP_2139" : { - "message" : [ - "cannot have circular references in class, but got the circular reference of class ." - ] - }, - "_LEGACY_ERROR_TEMP_2140" : { - "message" : [ - "`` is not a valid identifier of Java and cannot be used as field name", - "." - ] - }, - "_LEGACY_ERROR_TEMP_2142" : { - "message" : [ - "Attributes for type is not supported." - ] - }, - "_LEGACY_ERROR_TEMP_2144" : { - "message" : [ - "Unable to find constructor for . This could happen if is an interface, or a trait without companion object constructor." - ] - }, - "_LEGACY_ERROR_TEMP_2145" : { - "message" : [ - " cannot be more than one character." - ] - }, - "_LEGACY_ERROR_TEMP_2146" : { - "message" : [ - " should be an integer. Found ." - ] - }, - "_LEGACY_ERROR_TEMP_2147" : { - "message" : [ - " flag can be true or false." - ] - }, - "_LEGACY_ERROR_TEMP_2148" : { - "message" : [ - "null value found but field is not nullable." - ] - }, - "_LEGACY_ERROR_TEMP_2150" : { - "message" : [ - "Due to Scala's limited support of tuple, tuple with more than 22 elements are not supported." - ] - }, - "_LEGACY_ERROR_TEMP_2151" : { - "message" : [ - "Error while decoding: ", - "." - ] - }, - "_LEGACY_ERROR_TEMP_2152" : { - "message" : [ - "Error while encoding: ", - "." - ] - }, - "_LEGACY_ERROR_TEMP_2153" : { - "message" : [ - "class has unexpected serializer: ." - ] - }, - "_LEGACY_ERROR_TEMP_2154" : { - "message" : [ - "Failed to get outer pointer for ." - ] - }, - "_LEGACY_ERROR_TEMP_2155" : { - "message" : [ - " is not annotated with SQLUserDefinedType nor registered with UDTRegistration.}" - ] - }, - "_LEGACY_ERROR_TEMP_2156" : { - "message" : [ - "The size function doesn't support the operand type ." - ] - }, - "_LEGACY_ERROR_TEMP_2157" : { - "message" : [ - "Unexpected value for start in function : SQL array indices start at 1." - ] - }, - "_LEGACY_ERROR_TEMP_2158" : { - "message" : [ - "Unexpected value for length in function : length must be greater than or equal to 0." - ] - }, - "_LEGACY_ERROR_TEMP_2159" : { - "message" : [ - "Unsuccessful try to concat arrays with elements due to exceeding the array size limit ." - ] - }, - "_LEGACY_ERROR_TEMP_2160" : { - "message" : [ - "Unsuccessful try to flatten an array of arrays with elements due to exceeding the array size limit ." - ] - }, - "_LEGACY_ERROR_TEMP_2161" : { - "message" : [ - "Unsuccessful try to create array with elements due to exceeding the array size limit ." - ] - }, - "_LEGACY_ERROR_TEMP_2162" : { - "message" : [ - "Unsuccessful try to union arrays with elements due to exceeding the array size limit ." - ] - }, - "_LEGACY_ERROR_TEMP_2163" : { - "message" : [ - "Initial type must be a ." - ] - }, - "_LEGACY_ERROR_TEMP_2164" : { - "message" : [ - "Initial type must be an , a or a ." - ] - }, - "_LEGACY_ERROR_TEMP_2165" : { - "message" : [ - "Malformed records are detected in schema inference. Parse Mode: ." - ] - }, - "_LEGACY_ERROR_TEMP_2166" : { - "message" : [ - "Malformed JSON." - ] - }, - "_LEGACY_ERROR_TEMP_2167" : { - "message" : [ - "Malformed records are detected in schema inference. Parse Mode: . Reasons: Failed to infer a common schema. Struct types are expected, but `` was found." - ] - }, - "_LEGACY_ERROR_TEMP_2168" : { - "message" : [ - "Decorrelate inner query through is not supported." - ] - }, - "_LEGACY_ERROR_TEMP_2169" : { - "message" : [ - "This method should not be called in the analyzer." - ] - }, - "_LEGACY_ERROR_TEMP_2170" : { - "message" : [ - "Cannot safely merge SERDEPROPERTIES:", - "", - "", - "The conflict keys: ." - ] - }, - "_LEGACY_ERROR_TEMP_2171" : { - "message" : [ - "Not supported pair: , at ()." - ] - }, - "_LEGACY_ERROR_TEMP_2172" : { - "message" : [ - "Once strategy's idempotence is broken for batch ", - "." - ] - }, - "_LEGACY_ERROR_TEMP_2175" : { - "message" : [ - "Rule id not found for . Please modify RuleIdCollection.scala if you are adding a new rule." - ] - }, - "_LEGACY_ERROR_TEMP_2176" : { - "message" : [ - "Cannot create array with elements of data due to exceeding the limit elements for ArrayData. " - ] - }, - "_LEGACY_ERROR_TEMP_2178" : { - "message" : [ - "Remote operations not supported." - ] - }, - "_LEGACY_ERROR_TEMP_2179" : { - "message" : [ - "HiveServer2 Kerberos principal or keytab is not correctly configured." - ] - }, - "_LEGACY_ERROR_TEMP_2180" : { - "message" : [ - "Parent SparkUI to attach this tab to not found." - ] - }, - "_LEGACY_ERROR_TEMP_2181" : { - "message" : [ - "inferSchema is not supported for hive data source." - ] - }, - "_LEGACY_ERROR_TEMP_2182" : { - "message" : [ - "Requested partitioning does not match the table:", - "Requested partitions: .", - "Table partitions: ." - ] - }, - "_LEGACY_ERROR_TEMP_2183" : { - "message" : [ - "Dynamic partition key is not among written partition paths." - ] - }, - "_LEGACY_ERROR_TEMP_2184" : { - "message" : [ - "Cannot remove partition directory ''." - ] - }, - "_LEGACY_ERROR_TEMP_2185" : { - "message" : [ - "Cannot create staging directory: " - ] - }, - "_LEGACY_ERROR_TEMP_2186" : { - "message" : [ - "The SerDe interface removed since Hive 2.3(HIVE-15167). Please migrate your custom SerDes to Hive 2.3. See HIVE-15167 for more details." - ] - }, - "_LEGACY_ERROR_TEMP_2187" : { - "message" : [ - ", db: , table: ." - ] - }, - "_LEGACY_ERROR_TEMP_2189" : { - "message" : [ - "Hive 2.2 and lower versions don't support getTablesByType. Please use Hive 2.3 or higher version." - ] - }, - "_LEGACY_ERROR_TEMP_2190" : { - "message" : [ - "DROP TABLE ... PURGE." - ] - }, - "_LEGACY_ERROR_TEMP_2191" : { - "message" : [ - "ALTER TABLE ... DROP PARTITION ... PURGE." - ] - }, - "_LEGACY_ERROR_TEMP_2192" : { - "message" : [ - "Partition filter cannot have both `\"` and `'` characters." - ] - }, - "_LEGACY_ERROR_TEMP_2193" : { - "message" : [ - "Caught Hive MetaException attempting to get partition metadata by filter from Hive. You can set the Spark configuration setting to true to work around this problem, however this will result in degraded performance. Please report a bug: https://issues.apache.org/jira/browse/SPARK." - ] - }, - "_LEGACY_ERROR_TEMP_2194" : { - "message" : [ - "Unsupported Hive Metastore version . Please set with a valid version." - ] - }, - "_LEGACY_ERROR_TEMP_2195" : { - "message" : [ - " when creating Hive client using classpath: Please make sure that jars for your version of hive and hadoop are included in the paths passed to ." - ] - }, - "_LEGACY_ERROR_TEMP_2196" : { - "message" : [ - "Unable to fetch tables of db ." - ] - }, - "_LEGACY_ERROR_TEMP_2197" : { - "message" : [ - "LOCATION clause illegal for view partition." - ] - }, - "_LEGACY_ERROR_TEMP_2198" : { - "message" : [ - "Failed to rename as already exists." - ] - }, - "_LEGACY_ERROR_TEMP_2200" : { - "message" : [ - "Error: we detected a possible problem with the location of your \"_spark_metadata\"", - "directory and you likely need to move it before restarting this query.", - "", - "Earlier version of Spark incorrectly escaped paths when writing out the", - "\"_spark_metadata\" directory for structured streaming. While this was corrected in", - "Spark 3.0, it appears that your query was started using an earlier version that", - "", - "Correct \"_spark_metadata\" Directory: ", - "Incorrect \"_spark_metadata\" Directory: ", - "", - "Please move the data from the incorrect directory to the correct one, delete the", - "incorrect directory, and then restart this query. If you believe you are receiving", - "this message in error, you can disable it with the SQL conf", - "." - ] - }, - "_LEGACY_ERROR_TEMP_2201" : { - "message" : [ - "Partition column not found in schema ." - ] - }, - "_LEGACY_ERROR_TEMP_2203" : { - "message" : [ - "Cannot set timeout duration without enabling processing time timeout in [map|flatMap]GroupsWithState." - ] - }, - "_LEGACY_ERROR_TEMP_2204" : { - "message" : [ - "Cannot get event time watermark timestamp without setting watermark before [map|flatMap]GroupsWithState." - ] - }, - "_LEGACY_ERROR_TEMP_2205" : { - "message" : [ - "Cannot set timeout timestamp without enabling event time timeout in [map|flatMapGroupsWithState." - ] - }, - "_LEGACY_ERROR_TEMP_2207" : { - "message" : [ - "Multiple streaming queries are concurrently using ." - ] - }, - "_LEGACY_ERROR_TEMP_2208" : { - "message" : [ - " does not support adding files with an absolute path." - ] - }, - "_LEGACY_ERROR_TEMP_2209" : { - "message" : [ - "Data source does not support microbatch processing.", - "", - "Either the data source is disabled at", - "SQLConf.get.DISABLED_V2_STREAMING_MICROBATCH_READERS.key (The disabled sources", - "are []) or the table
does not have MICRO_BATCH_READ", - "capability. Meanwhile, the fallback, data source v1, is not available.\"" - ] - }, - "_LEGACY_ERROR_TEMP_2210" : { - "message" : [ - "StreamingRelationExec cannot be executed." - ] - }, - "_LEGACY_ERROR_TEMP_2211" : { - "message" : [ - "Invalid output mode: ." - ] - }, - "_LEGACY_ERROR_TEMP_2212" : { - "message" : [ - "Invalid catalog name: ." - ] - }, - "_LEGACY_ERROR_TEMP_2214" : { - "message" : [ - "Plugin class for catalog '' does not implement CatalogPlugin: ." - ] - }, - "_LEGACY_ERROR_TEMP_2215" : { - "message" : [ - "Cannot find catalog plugin class for catalog '': ." - ] - }, - "_LEGACY_ERROR_TEMP_2216" : { - "message" : [ - "Failed to find public no-arg constructor for catalog '': )." - ] - }, - "_LEGACY_ERROR_TEMP_2217" : { - "message" : [ - "Failed to call public no-arg constructor for catalog '': )." - ] - }, - "_LEGACY_ERROR_TEMP_2218" : { - "message" : [ - "Cannot instantiate abstract catalog plugin class for catalog '': ." - ] - }, - "_LEGACY_ERROR_TEMP_2219" : { - "message" : [ - "Failed during instantiating constructor for catalog '': ." - ] - }, - "_LEGACY_ERROR_TEMP_2220" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_2222" : { - "message" : [ - "Cannot mutate ReadOnlySQLConf." - ] - }, - "_LEGACY_ERROR_TEMP_2223" : { - "message" : [ - "Cannot clone/copy ReadOnlySQLConf." - ] - }, - "_LEGACY_ERROR_TEMP_2224" : { - "message" : [ - "Cannot get SQLConf inside scheduler event loop thread." - ] - }, - "_LEGACY_ERROR_TEMP_2225" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_2226" : { - "message" : [ - "null literals can't be casted to ." - ] - }, - "_LEGACY_ERROR_TEMP_2227" : { - "message" : [ - " is not an UserDefinedType. Please make sure registering an UserDefinedType for ." - ] - }, - "_LEGACY_ERROR_TEMP_2228" : { - "message" : [ - "Can not load in UserDefinedType for user class ." - ] - }, - "_LEGACY_ERROR_TEMP_2229" : { - "message" : [ - " is not a public class. Only public classes are supported." - ] - }, - "_LEGACY_ERROR_TEMP_2230" : { - "message" : [ - "Primitive types are not supported." - ] - }, - "_LEGACY_ERROR_TEMP_2231" : { - "message" : [ - "fieldIndex on a Row without schema is undefined." - ] - }, - "_LEGACY_ERROR_TEMP_2232" : { - "message" : [ - "Value at index is null." - ] - }, - "_LEGACY_ERROR_TEMP_2233" : { - "message" : [ - "Only Data Sources providing FileFormat are supported: ." - ] - }, - "_LEGACY_ERROR_TEMP_2234" : { - "message" : [ - "Failed to set original ACL back to the created path: . Exception: " - ] - }, - "_LEGACY_ERROR_TEMP_2235" : { - "message" : [ - "Multiple failures in stage materialization." - ] - }, - "_LEGACY_ERROR_TEMP_2236" : { - "message" : [ - "Unrecognized compression scheme type ID: ." - ] - }, - "_LEGACY_ERROR_TEMP_2237" : { - "message" : [ - ".getParentLogger is not yet implemented." - ] - }, - "_LEGACY_ERROR_TEMP_2238" : { - "message" : [ - "Unable to create Parquet converter for whose Parquet type is without decimal metadata. Please read this column/field as Spark BINARY type." - ] - }, - "_LEGACY_ERROR_TEMP_2239" : { - "message" : [ - "Unable to create Parquet converter for decimal type whose Parquet type is . Parquet DECIMAL type can only be backed by INT32, INT64, FIXED_LEN_BYTE_ARRAY, or BINARY." - ] - }, - "_LEGACY_ERROR_TEMP_2240" : { - "message" : [ - "Unable to create Parquet converter for data type whose Parquet type is ." - ] - }, - "_LEGACY_ERROR_TEMP_2241" : { - "message" : [ - "Nonatomic partition table can not add multiple partitions." - ] - }, - "_LEGACY_ERROR_TEMP_2242" : { - "message" : [ - " source does not support user-specified schema." - ] - }, - "_LEGACY_ERROR_TEMP_2243" : { - "message" : [ - "Nonatomic partition table can not drop multiple partitions." - ] - }, - "_LEGACY_ERROR_TEMP_2244" : { - "message" : [ - "The table does not support truncation of multiple partition." - ] - }, - "_LEGACY_ERROR_TEMP_2245" : { - "message" : [ - "Table does not support overwrite by expression:
." - ] - }, - "_LEGACY_ERROR_TEMP_2246" : { - "message" : [ - "Table does not support dynamic partition overwrite:
." - ] - }, - "_LEGACY_ERROR_TEMP_2248" : { - "message" : [ - "Cannot broadcast the table over rows: rows." - ] - }, - "_LEGACY_ERROR_TEMP_2249" : { - "message" : [ - "Cannot broadcast the table that is larger than : ." - ] - }, - "_LEGACY_ERROR_TEMP_2250" : { - "message" : [ - "Not enough memory to build and broadcast the table to all worker nodes. As a workaround, you can either disable broadcast by setting to -1 or increase the spark driver memory by setting to a higher value" - ] - }, - "_LEGACY_ERROR_TEMP_2251" : { - "message" : [ - " does not support the execute() code path." - ] - }, - "_LEGACY_ERROR_TEMP_2252" : { - "message" : [ - "Cannot merge with ." - ] - }, - "_LEGACY_ERROR_TEMP_2253" : { - "message" : [ - "Data source does not support continuous processing." - ] - }, - "_LEGACY_ERROR_TEMP_2254" : { - "message" : [ - "Data read failed." - ] - }, - "_LEGACY_ERROR_TEMP_2255" : { - "message" : [ - "Epoch marker generation failed." - ] - }, - "_LEGACY_ERROR_TEMP_2256" : { - "message" : [ - "Foreach writer has been aborted due to a task failure." - ] - }, - "_LEGACY_ERROR_TEMP_2258" : { - "message" : [ - "Error reading delta file of : key size cannot be ." - ] - }, - "_LEGACY_ERROR_TEMP_2259" : { - "message" : [ - "Error reading snapshot file of : " - ] - }, - "_LEGACY_ERROR_TEMP_2260" : { - "message" : [ - "Cannot purge as it might break internal state." - ] - }, - "_LEGACY_ERROR_TEMP_2261" : { - "message" : [ - "Clean up source files is not supported when reading from the output directory of FileStreamSink." - ] - }, - "_LEGACY_ERROR_TEMP_2262" : { - "message" : [ - "latestOffset(Offset, ReadLimit) should be called instead of this method." - ] - }, - "_LEGACY_ERROR_TEMP_2263" : { - "message" : [ - "Error: we detected a possible problem with the location of your checkpoint and you", - "likely need to move it before restarting this query.", - "", - "Earlier version of Spark incorrectly escaped paths when writing out checkpoints for", - "structured streaming. While this was corrected in Spark 3.0, it appears that your", - "query was started using an earlier version that incorrectly handled the checkpoint", - "path.", - "", - "Correct Checkpoint Directory: ", - "Incorrect Checkpoint Directory: ", - "", - "Please move the data from the incorrect directory to the correct one, delete the", - "incorrect directory, and then restart this query. If you believe you are receiving", - "this message in error, you can disable it with the SQL conf", - "." - ] - }, - "_LEGACY_ERROR_TEMP_2264" : { - "message" : [ - "Subprocess exited with status . Error: ." - ] - }, - "_LEGACY_ERROR_TEMP_2265" : { - "message" : [ - " without serde does not support
as output data type." - ] - }, - "_LEGACY_ERROR_TEMP_2266" : { - "message" : [ - "Invalid `startIndex` provided for generating iterator over the array. Total elements: , requested `startIndex`: ." - ] - }, - "_LEGACY_ERROR_TEMP_2267" : { - "message" : [ - "The backing has been modified since the creation of this Iterator." - ] - }, - "_LEGACY_ERROR_TEMP_2268" : { - "message" : [ - " does not implement doExecuteBroadcast." - ] - }, - "_LEGACY_ERROR_TEMP_2269" : { - "message" : [ - " is a system preserved database, please rename your existing database to resolve the name conflict, or set a different value for , and launch your Spark application again." - ] - }, - "_LEGACY_ERROR_TEMP_2270" : { - "message" : [ - "comment on table is not supported." - ] - }, - "_LEGACY_ERROR_TEMP_2271" : { - "message" : [ - "UpdateColumnNullability is not supported." - ] - }, - "_LEGACY_ERROR_TEMP_2272" : { - "message" : [ - "Rename column is only supported for MySQL version 8.0 and above." - ] - }, - "_LEGACY_ERROR_TEMP_2273" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_2277" : { - "message" : [ - "Number of dynamic partitions created is , which is more than . To solve this try to set to at least ." - ] - }, - "_LEGACY_ERROR_TEMP_2330" : { - "message" : [ - "Cannot change nullable column to non-nullable: ." - ] - }, - "_LEGACY_ERROR_TEMP_2446" : { - "message" : [ - "Operation not allowed: only works on table with location provided: " - ] - }, - "_LEGACY_ERROR_TEMP_3000" : { - "message" : [ - "Unexpected Py4J server ." - ] - }, - "_LEGACY_ERROR_TEMP_3001" : { - "message" : [ - "EOFException occurred while reading the port number from 's stdout." - ] - }, - "_LEGACY_ERROR_TEMP_3002" : { - "message" : [ - "Data of type is not supported" - ] - }, - "_LEGACY_ERROR_TEMP_3003" : { - "message" : [ - "Could not compute split, block of RDD not found" - ] - }, - "_LEGACY_ERROR_TEMP_3004" : { - "message" : [ - "Attempted to use after its blocks have been removed!" - ] - }, - "_LEGACY_ERROR_TEMP_3005" : { - "message" : [ - "Histogram on either an empty RDD or RDD containing +/-infinity or NaN" - ] - }, - "_LEGACY_ERROR_TEMP_3006" : { - "message" : [ - "empty RDD" - ] - }, - "_LEGACY_ERROR_TEMP_3007" : { - "message" : [ - "Checkpoint block not found! Either the executor", - "that originally checkpointed this partition is no longer alive, or the original RDD is", - "unpersisted. If this problem persists, you may consider using `rdd.checkpoint()`", - "instead, which is slower than local checkpointing but more fault-tolerant." - ] - }, - "_LEGACY_ERROR_TEMP_3008" : { - "message" : [ - "Cannot use map-side combining with array keys." - ] - }, - "_LEGACY_ERROR_TEMP_3009" : { - "message" : [ - "HashPartitioner cannot partition array keys." - ] - }, - "_LEGACY_ERROR_TEMP_3010" : { - "message" : [ - "reduceByKeyLocally() does not support array keys" - ] - }, - "_LEGACY_ERROR_TEMP_3011" : { - "message" : [ - "This RDD lacks a SparkContext. It could happen in the following cases:", - "(1) RDD transformations and actions are NOT invoked by the driver, but inside of other transformations; for example, rdd1.map(x => rdd2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the rdd1.map transformation. For more information, see SPARK-5063.", - "(2) When a Spark Streaming job recovers from checkpoint, this exception will be hit if a reference to an RDD not defined by the streaming job is used in DStream operations. For more information, See SPARK-13758." - ] - }, - "_LEGACY_ERROR_TEMP_3012" : { - "message" : [ - "Cannot change storage level of an RDD after it was already assigned a level" - ] - }, - "_LEGACY_ERROR_TEMP_3013" : { - "message" : [ - "Can only zip RDDs with same number of elements in each partition" - ] - }, - "_LEGACY_ERROR_TEMP_3014" : { - "message" : [ - "empty collection" - ] - }, - "_LEGACY_ERROR_TEMP_3015" : { - "message" : [ - "countByValueApprox() does not support arrays" - ] - }, - "_LEGACY_ERROR_TEMP_3016" : { - "message" : [ - "Checkpoint directory has not been set in the SparkContext" - ] - }, - "_LEGACY_ERROR_TEMP_3017" : { - "message" : [ - "Invalid checkpoint file: " - ] - }, - "_LEGACY_ERROR_TEMP_3018" : { - "message" : [ - "Failed to create checkpoint path " - ] - }, - "_LEGACY_ERROR_TEMP_3019" : { - "message" : [ - "Checkpoint RDD has a different number of partitions from original RDD. Original", - "RDD [ID: , num of partitions: ];", - "Checkpoint RDD [ID: , num of partitions: ]." - ] - }, - "_LEGACY_ERROR_TEMP_3020" : { - "message" : [ - "Checkpoint dir must be specified." - ] - }, - "_LEGACY_ERROR_TEMP_3021" : { - "message" : [ - "Error asking standalone scheduler to shut down executors" - ] - }, - "_LEGACY_ERROR_TEMP_3022" : { - "message" : [ - "Error stopping standalone scheduler's driver endpoint" - ] - }, - "_LEGACY_ERROR_TEMP_3023" : { - "message" : [ - "Can't run submitMapStage on RDD with 0 partitions" - ] - }, - "_LEGACY_ERROR_TEMP_3024" : { - "message" : [ - "attempted to access non-existent accumulator " - ] - }, - "_LEGACY_ERROR_TEMP_3025" : { - "message" : [ - "TaskSetManagers should only send Resubmitted task statuses for tasks in ShuffleMapStages." - ] - }, - "_LEGACY_ERROR_TEMP_3026" : { - "message" : [ - "duration() called on unfinished task" - ] - }, - "_LEGACY_ERROR_TEMP_3027" : { - "message" : [ - "Unrecognized : " - ] - }, - "_LEGACY_ERROR_TEMP_3028" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3029" : { - "message" : [ - "Exiting due to error from cluster scheduler: " - ] - }, - "_LEGACY_ERROR_TEMP_3030" : { - "message" : [ - "Task has not locked block for writing" - ] - }, - "_LEGACY_ERROR_TEMP_3031" : { - "message" : [ - "Block does not exist" - ] - }, - "_LEGACY_ERROR_TEMP_3032" : { - "message" : [ - "Error occurred while waiting for replication to finish" - ] - }, - "_LEGACY_ERROR_TEMP_3033" : { - "message" : [ - "Unable to register with external shuffle server due to : " - ] - }, - "_LEGACY_ERROR_TEMP_3034" : { - "message" : [ - "Error occurred while waiting for async. reregistration" - ] - }, - "_LEGACY_ERROR_TEMP_3035" : { - "message" : [ - "Unexpected shuffle block with unsupported shuffle resolver " - ] - }, - "_LEGACY_ERROR_TEMP_3036" : { - "message" : [ - "Failure while trying to store block on ." - ] - }, - "_LEGACY_ERROR_TEMP_3037" : { - "message" : [ - "Block was not found even though it's read-locked" - ] - }, - "_LEGACY_ERROR_TEMP_3038" : { - "message" : [ - "get() failed for block even though we held a lock" - ] - }, - "_LEGACY_ERROR_TEMP_3039" : { - "message" : [ - "BlockManager returned null for BlockStatus query: " - ] - }, - "_LEGACY_ERROR_TEMP_3040" : { - "message" : [ - "BlockManagerMasterEndpoint returned false, expected true." - ] - }, - "_LEGACY_ERROR_TEMP_3041" : { - "message" : [ - "" - ] - }, - "_LEGACY_ERROR_TEMP_3042" : { - "message" : [ - "Failed to get block , which is not a shuffle block" - ] - } -} diff --git a/docs/sql-error-conditions.md.orig b/docs/sql-error-conditions.md.orig deleted file mode 100644 index 91b77a6452bc5..0000000000000 --- a/docs/sql-error-conditions.md.orig +++ /dev/null @@ -1,2020 +0,0 @@ ---- -layout: global -title: Error Conditions -displayTitle: Error Conditions -license: | - Licensed to the Apache Software Foundation (ASF) under one or more - contributor license agreements. See the NOTICE file distributed with - this work for additional information regarding copyright ownership. - The ASF licenses this file to You under the Apache License, Version 2.0 - (the "License"); you may not use this file except in compliance with - the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. ---- - -This is a list of common, named error conditions returned by Spark SQL. - -Also see [SQLSTATE Codes](sql-error-conditions-sqlstates.html). - -### AGGREGATE_FUNCTION_WITH_NONDETERMINISTIC_EXPRESSION - -SQLSTATE: none assigned - -Non-deterministic expression `` should not appear in the arguments of an aggregate function. - -### ALL_PARTITION_COLUMNS_NOT_ALLOWED - -SQLSTATE: none assigned - -Cannot use all columns for partition columns. - -### ALTER_TABLE_COLUMN_DESCRIPTOR_DUPLICATE - -[SQLSTATE: 42710](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -ALTER TABLE `` column `` specifies descriptor "``" more than once, which is invalid. - -### AMBIGUOUS_ALIAS_IN_NESTED_CTE - -SQLSTATE: none assigned - -Name `` is ambiguous in nested CTE. -Please set `` to "CORRECTED" so that name defined in inner CTE takes precedence. If set it to "LEGACY", outer CTE definitions will take precedence. -See '``/sql-migration-guide.html#query-engine'. - -### AMBIGUOUS_COLUMN_OR_FIELD - -[SQLSTATE: 42702](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Column or field `` is ambiguous and has `` matches. - -### AMBIGUOUS_LATERAL_COLUMN_ALIAS - -[SQLSTATE: 42702](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Lateral column alias `` is ambiguous and has `` matches. - -### AMBIGUOUS_REFERENCE - -[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Reference `` is ambiguous, could be: ``. - -### AMBIGUOUS_REFERENCE_TO_FIELDS - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Ambiguous reference to the field ``. It appears `` times in the schema. - -### ARITHMETIC_OVERFLOW - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -``.`` If necessary set `` to "false" to bypass this error. - -### [AS_OF_JOIN](sql-error-conditions-as-of-join-error-class.html) - -SQLSTATE: none assigned - -Invalid as-of join. - -For more details see [AS_OF_JOIN](sql-error-conditions-as-of-join-error-class.html) - -### AVRO_INCORRECT_TYPE - -SQLSTATE: none assigned - -Cannot convert Avro `` to SQL `` because the original encoded data type is ``, however you're trying to read the field as ``, which would lead to an incorrect answer. To allow reading this field, enable the SQL configuration: ``. - -### AVRO_LOWER_PRECISION - -SQLSTATE: none assigned - -Cannot convert Avro `` to SQL `` because the original encoded data type is ``, however you're trying to read the field as ``, which leads to data being read as null. Please provide a wider decimal type to get the correct result. To allow reading null to this field, enable the SQL configuration: ``. - -### BATCH_METADATA_NOT_FOUND - -[SQLSTATE: 42K03](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Unable to find batch ``. - -### BINARY_ARITHMETIC_OVERFLOW - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -`` `` `` caused overflow. - -### CALL_ON_STREAMING_DATASET_UNSUPPORTED - -SQLSTATE: none assigned - -The method `` can not be called on streaming Dataset/DataFrame. - -### CANNOT_CAST_DATATYPE - -[SQLSTATE: 42846](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot cast `` to ``. - -### CANNOT_CONVERT_PROTOBUF_FIELD_TYPE_TO_SQL_TYPE - -SQLSTATE: none assigned - -Cannot convert Protobuf `` to SQL `` because schema is incompatible (protobufType = ``, sqlType = ``). - -### CANNOT_CONVERT_PROTOBUF_MESSAGE_TYPE_TO_SQL_TYPE - -SQLSTATE: none assigned - -Unable to convert `` of Protobuf to SQL type ``. - -### CANNOT_CONVERT_SQL_TYPE_TO_PROTOBUF_FIELD_TYPE - -SQLSTATE: none assigned - -Cannot convert SQL `` to Protobuf `` because schema is incompatible (protobufType = ``, sqlType = ``). - -### CANNOT_CONVERT_SQL_VALUE_TO_PROTOBUF_ENUM_TYPE - -SQLSTATE: none assigned - -Cannot convert SQL `` to Protobuf `` because `` is not in defined values for enum: ``. - -### CANNOT_DECODE_URL - -[SQLSTATE: 22546](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The provided URL cannot be decoded: ``. Please ensure that the URL is properly formatted and try again. - -### CANNOT_INVOKE_IN_TRANSFORMATIONS - -SQLSTATE: none assigned - -Dataset transformations and actions can only be invoked by the driver, not inside of other Dataset transformations; for example, dataset1.map(x => dataset2.values.count() * x) is invalid because the values transformation and count action cannot be performed inside of the dataset1.map transformation. For more information, see SPARK-28702. - -### CANNOT_LOAD_FUNCTION_CLASS - -SQLSTATE: none assigned - -Cannot load class `` when registering the function ``, please make sure it is on the classpath. - -### CANNOT_LOAD_PROTOBUF_CLASS - -SQLSTATE: none assigned - -Could not load Protobuf class with name ``. ``. - -### CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE - -[SQLSTATE: 42825](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Failed to merge incompatible data types `` and ``. Please check the data types of the columns being merged and ensure that they are compatible. If necessary, consider casting the columns to compatible data types before attempting the merge. - -### CANNOT_MERGE_SCHEMAS - -[SQLSTATE: 42KD9](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Failed merging schemas: -Initial schema: -`` -Schema that cannot be merged with the initial schema: -``. - -### CANNOT_MODIFY_CONFIG - -[SQLSTATE: 46110](sql-error-conditions-sqlstates.html#class-46-java-ddl-1) - -Cannot modify the value of the Spark config: ``. -See also '``/sql-migration-guide.html#ddl-statements'. - -### CANNOT_PARSE_DECIMAL - -[SQLSTATE: 22018](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Cannot parse decimal. Please ensure that the input is a valid number with optional decimal point or comma separators. - -### CANNOT_PARSE_INTERVAL - -SQLSTATE: none assigned - -Unable to parse ``. Please ensure that the value provided is in a valid format for defining an interval. You can reference the documentation for the correct format. If the issue persists, please double check that the input value is not null or empty and try again. - -### CANNOT_PARSE_JSON_FIELD - -[SQLSTATE: 2203G](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Cannot parse the field name `` and the value `` of the JSON token type `` to target Spark data type ``. - -### CANNOT_PARSE_PROTOBUF_DESCRIPTOR - -SQLSTATE: none assigned - -Error parsing descriptor bytes into Protobuf FileDescriptorSet. - -### CANNOT_PARSE_TIMESTAMP - -[SQLSTATE: 22007](sql-error-conditions-sqlstates.html#class-22-data-exception) - -``. If necessary set `` to "false" to bypass this error. - -### CANNOT_READ_FILE_FOOTER - -SQLSTATE: none assigned - -Could not read footer for file: ``. Please ensure that the file is in either ORC or Parquet format. If not, please convert it to a valid format. If the file is in the valid format, please check if it is corrupt. If it is, you can choose to either ignore it or fix the corruption. - -### CANNOT_RECOGNIZE_HIVE_TYPE - -[SQLSTATE: 429BB](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot recognize hive type string: ``, column: ``. The specified data type for the field cannot be recognized by Spark SQL. Please check the data type of the specified field and ensure that it is a valid Spark SQL data type. Refer to the Spark SQL documentation for a list of valid data types and their format. If the data type is correct, please ensure that you are using a supported version of Spark SQL. - -### CANNOT_RENAME_ACROSS_SCHEMA - -[SQLSTATE: 0AKD0](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) - -Renaming a `` across schemas is not allowed. - -### CANNOT_RESOLVE_STAR_EXPAND - -SQLSTATE: none assigned - -Cannot resolve ``.* given input columns ``. Please check that the specified table or struct exists and is accessible in the input columns. - -### CANNOT_RESTORE_PERMISSIONS_FOR_PATH - -SQLSTATE: none assigned - -Failed to set permissions on created path `` back to ``. - -### [CANNOT_UPDATE_FIELD](sql-error-conditions-cannot-update-field-error-class.html) - -SQLSTATE: none assigned - -Cannot update `
` field `` type: - -For more details see [CANNOT_UPDATE_FIELD](sql-error-conditions-cannot-update-field-error-class.html) - -### CANNOT_UP_CAST_DATATYPE - -SQLSTATE: none assigned - -Cannot up cast `` from `` to ``. -`
` - -### CAST_INVALID_INPUT - -[SQLSTATE: 22018](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The value `` of the type `` cannot be cast to `` because it is malformed. Correct the value as per the syntax, or change its target type. Use `try_cast` to tolerate malformed input and return NULL instead. If necessary set `` to "false" to bypass this error. - -### CAST_OVERFLOW - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The value `` of the type `` cannot be cast to `` due to an overflow. Use `try_cast` to tolerate overflow and return NULL instead. If necessary set `` to "false" to bypass this error. - -### CAST_OVERFLOW_IN_TABLE_INSERT - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Fail to insert a value of `` type into the `` type column `` due to an overflow. Use `try_cast` on the input value to tolerate overflow and return NULL instead. - -### CODEC_NOT_AVAILABLE - -SQLSTATE: none assigned - -The codec `` is not available. Consider to set the config `` to ``. - -### CODEC_SHORT_NAME_NOT_FOUND - -SQLSTATE: none assigned - -Cannot find a short name for the codec ``. - -### COLUMN_ALIASES_IS_NOT_ALLOWED - -SQLSTATE: none assigned - -Columns aliases are not allowed in ``. - -### COLUMN_ALREADY_EXISTS - -[SQLSTATE: 42711](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The column `` already exists. Consider to choose another name or rename the existing column. - -### COLUMN_NOT_DEFINED_IN_TABLE - -SQLSTATE: none assigned - -`` column `` is not defined in table ``, defined table columns are: ``. - -### COLUMN_NOT_FOUND - -[SQLSTATE: 42703](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The column `` cannot be found. Verify the spelling and correctness of the column name according to the SQL config ``. - -### COMPARATOR_RETURNS_NULL - -SQLSTATE: none assigned - -The comparator has returned a NULL for a comparison between `` and ``. It should return a positive integer for "greater than", 0 for "equal" and a negative integer for "less than". To revert to deprecated behavior where NULL is treated as 0 (equal), you must set "spark.sql.legacy.allowNullComparisonResultInArraySort" to "true". - -### CONCURRENT_QUERY - -SQLSTATE: none assigned - -Another instance of this query was just started by a concurrent session. - -### CONCURRENT_STREAM_LOG_UPDATE - -SQLSTATE: 40000 - -Concurrent update to the log. Multiple streaming jobs detected for ``. -Please make sure only one streaming job runs on a specific checkpoint location at a time. - -### [CONNECT](sql-error-conditions-connect-error-class.html) - -SQLSTATE: none assigned - -Generic Spark Connect error. - -For more details see [CONNECT](sql-error-conditions-connect-error-class.html) - -### CONVERSION_INVALID_INPUT - -[SQLSTATE: 22018](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The value `` (``) cannot be converted to `` because it is malformed. Correct the value as per the syntax, or change its format. Use `` to tolerate malformed input and return NULL instead. - -### CREATE_PERMANENT_VIEW_WITHOUT_ALIAS - -SQLSTATE: none assigned - -Not allowed to create the permanent view `` without explicitly assigning an alias for the expression ``. - -### CREATE_TABLE_COLUMN_DESCRIPTOR_DUPLICATE - -[SQLSTATE: 42710](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -CREATE TABLE column `` specifies descriptor "``" more than once, which is invalid. - -### [CREATE_VIEW_COLUMN_ARITY_MISMATCH](sql-error-conditions-create-view-column-arity-mismatch-error-class.html) - -[SQLSTATE: 21S01](sql-error-conditions-sqlstates.html#class-21-cardinality-violation) - -Cannot create view ``, the reason is - -For more details see [CREATE_VIEW_COLUMN_ARITY_MISMATCH](sql-error-conditions-create-view-column-arity-mismatch-error-class.html) - -### [DATATYPE_MISMATCH](sql-error-conditions-datatype-mismatch-error-class.html) - -[SQLSTATE: 42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot resolve `` due to data type mismatch: - -For more details see [DATATYPE_MISMATCH](sql-error-conditions-datatype-mismatch-error-class.html) - -### DATATYPE_MISSING_SIZE - -[SQLSTATE: 42K01](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -DataType `` requires a length parameter, for example ``(10). Please specify the length. - -### DATA_SOURCE_NOT_FOUND - -[SQLSTATE: 42K02](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Failed to find the data source: ``. Please find packages at `https://spark.apache.org/third-party-projects.html`. - -### DATETIME_OVERFLOW - -[SQLSTATE: 22008](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Datetime operation overflow: ``. - -### DECIMAL_PRECISION_EXCEEDS_MAX_PRECISION - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Decimal precision `` exceeds max precision ``. - -### DEFAULT_DATABASE_NOT_EXISTS - -[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Default database `` does not exist, please create it first or change default database to ````. - -### DISTINCT_WINDOW_FUNCTION_UNSUPPORTED - -SQLSTATE: none assigned - -Distinct window functions are not supported: ``. - -### DIVIDE_BY_ZERO - -[SQLSTATE: 22012](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set `` to "false" to bypass this error. - -### DUPLICATED_FIELD_NAME_IN_ARROW_STRUCT - -SQLSTATE: none assigned - -Duplicated field names in Arrow Struct are not allowed, got ``. - -### DUPLICATED_MAP_KEY - -[SQLSTATE: 23505](sql-error-conditions-sqlstates.html#class-23-integrity-constraint-violation) - -Duplicate map key `` was found, please check the input data. If you want to remove the duplicated keys, you can set `` to "LAST_WIN" so that the key inserted at last takes precedence. - -### DUPLICATED_METRICS_NAME - -SQLSTATE: none assigned - -The metric name is not unique: ``. The same name cannot be used for metrics with different results. However multiple instances of metrics with with same result and name are allowed (e.g. self-joins). - -### DUPLICATE_CLAUSES - -SQLSTATE: none assigned - -Found duplicate clauses: ``. Please, remove one of them. - -### DUPLICATE_KEY - -[SQLSTATE: 23505](sql-error-conditions-sqlstates.html#class-23-integrity-constraint-violation) - -Found duplicate keys ``. - -### EMPTY_JSON_FIELD_VALUE - -[SQLSTATE: 42604](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Failed to parse an empty string for data type ``. - -### ENCODER_NOT_FOUND - -SQLSTATE: none assigned - -Not found an encoder of the type `` to Spark SQL internal representation. Consider to change the input type to one of supported at '``/sql-ref-datatypes.html'. - -### EVENT_TIME_IS_NOT_ON_TIMESTAMP_TYPE - -SQLSTATE: none assigned - -The event time `` has the invalid type ``, but expected "TIMESTAMP". - -### EXCEED_LIMIT_LENGTH - -SQLSTATE: none assigned - -Exceeds char/varchar type length limitation: ``. - -### EXPRESSION_TYPE_IS_NOT_ORDERABLE - -SQLSTATE: none assigned - -Column expression `` cannot be sorted because its type `` is not orderable. - -### FAILED_EXECUTE_UDF - -[SQLSTATE: 39000](sql-error-conditions-sqlstates.html#class-39-external-routine-invocation-exception) - -Failed to execute user defined function (``: (``) => ``). - -### FAILED_FUNCTION_CALL - -[SQLSTATE: 38000](sql-error-conditions-sqlstates.html#class-38-external-routine-exception) - -Failed preparing of the function `` for call. Please, double check function's arguments. - -### FAILED_PARSE_STRUCT_TYPE - -[SQLSTATE: 22018](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Failed parsing struct: ``. - -### FAILED_RENAME_PATH - -[SQLSTATE: 42K04](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Failed to rename `` to `` as destination already exists. - -### FAILED_RENAME_TEMP_FILE - -SQLSTATE: none assigned - -Failed to rename temp file `` to `` as FileSystem.rename returned false. - -### FIELDS_ALREADY_EXISTS - -SQLSTATE: none assigned - -Cannot `` column, because `` already exists in ``. - -### FIELD_NOT_FOUND - -[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -No such struct field `` in ``. - -### FORBIDDEN_OPERATION - -[SQLSTATE: 42809](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The operation `` is not allowed on the ``: ``. - -### GENERATED_COLUMN_WITH_DEFAULT_VALUE - -SQLSTATE: none assigned - -A column cannot have both a default value and a generation expression but column `` has default value: (``) and generation expression: (``). - -### GRAPHITE_SINK_INVALID_PROTOCOL - -SQLSTATE: none assigned - -Invalid Graphite protocol: ``. - -### GRAPHITE_SINK_PROPERTY_MISSING - -SQLSTATE: none assigned - -Graphite sink requires '``' property. - -### GROUPING_COLUMN_MISMATCH - -[SQLSTATE: 42803](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Column of grouping (``) can't be found in grouping columns ``. - -### GROUPING_ID_COLUMN_MISMATCH - -[SQLSTATE: 42803](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Columns of grouping_id (``) does not match grouping columns (``). - -### GROUPING_SIZE_LIMIT_EXCEEDED - -[SQLSTATE: 54000](sql-error-conditions-sqlstates.html#class-54-program-limit-exceeded) - -Grouping sets size cannot be greater than ``. - -### GROUP_BY_AGGREGATE - -[SQLSTATE: 42903](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Aggregate functions are not allowed in GROUP BY, but found ``. - -### GROUP_BY_POS_AGGREGATE - -[SQLSTATE: 42903](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -GROUP BY `` refers to an expression `` that contains an aggregate function. Aggregate functions are not allowed in GROUP BY. - -### GROUP_BY_POS_OUT_OF_RANGE - -[SQLSTATE: 42805](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -GROUP BY position `` is not in select list (valid range is [1, ``]). - -### GROUP_EXPRESSION_TYPE_IS_NOT_ORDERABLE - -SQLSTATE: none assigned - -The expression `` cannot be used as a grouping expression because its data type `` is not an orderable data type. - -### HLL_INVALID_INPUT_SKETCH_BUFFER - -SQLSTATE: none assigned - -Invalid call to ``; only valid HLL sketch buffers are supported as inputs (such as those produced by the `hll_sketch_agg` function). - -### HLL_INVALID_LG_K - -SQLSTATE: none assigned - -Invalid call to ``; the `lgConfigK` value must be between `` and ``, inclusive: ``. - -### HLL_UNION_DIFFERENT_LG_K - -SQLSTATE: none assigned - -Sketches have different `lgConfigK` values: `` and ``. Set the `allowDifferentLgConfigK` parameter to true to call `` with different `lgConfigK` values. - -### IDENTIFIER_TOO_MANY_NAME_PARTS - -[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -`` is not a valid identifier as it has more than 2 name parts. - -### INCOMPARABLE_PIVOT_COLUMN - -[SQLSTATE: 42818](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Invalid pivot column ``. Pivot columns must be comparable. - -### INCOMPATIBLE_COLUMN_TYPE - -[SQLSTATE: 42825](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -`` can only be performed on tables with compatible column types. The `` column of the `` table is `` type which is not compatible with `` at the same column of the first table.``. - -### INCOMPATIBLE_DATASOURCE_REGISTER - -SQLSTATE: none assigned - -Detected an incompatible DataSourceRegister. Please remove the incompatible library from classpath or upgrade it. Error: `` - -### [INCOMPATIBLE_DATA_FOR_TABLE](sql-error-conditions-incompatible-data-for-table-error-class.html) - -SQLSTATE: KD000 - -Cannot write incompatible data for the table ``: - -For more details see [INCOMPATIBLE_DATA_FOR_TABLE](sql-error-conditions-incompatible-data-for-table-error-class.html) - -### INCOMPATIBLE_JOIN_TYPES - -[SQLSTATE: 42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The join types `` and `` are incompatible. - -### INCOMPATIBLE_VIEW_SCHEMA_CHANGE - -SQLSTATE: none assigned - -The SQL query of view `` has an incompatible schema change and column `` cannot be resolved. Expected `` columns named `` but got ``. -Please try to re-create the view by running: ``. - -### [INCOMPLETE_TYPE_DEFINITION](sql-error-conditions-incomplete-type-definition-error-class.html) - -[SQLSTATE: 42K01](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Incomplete complex type: - -For more details see [INCOMPLETE_TYPE_DEFINITION](sql-error-conditions-incomplete-type-definition-error-class.html) - -### [INCONSISTENT_BEHAVIOR_CROSS_VERSION](sql-error-conditions-inconsistent-behavior-cross-version-error-class.html) - -[SQLSTATE: 42K0B](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -You may get a different result due to the upgrading to - -For more details see [INCONSISTENT_BEHAVIOR_CROSS_VERSION](sql-error-conditions-inconsistent-behavior-cross-version-error-class.html) - -### INCORRECT_END_OFFSET - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Max offset with `` rowsPerSecond is ``, but it's `` now. - -### INCORRECT_RAMP_UP_RATE - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Max offset with `` rowsPerSecond is ``, but 'rampUpTimeSeconds' is ``. - -### INDEX_ALREADY_EXISTS - -[SQLSTATE: 42710](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot create the index `` on table `` because it already exists. - -### INDEX_NOT_FOUND - -[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot find the index `` on table ``. - -### [INSERT_COLUMN_ARITY_MISMATCH](sql-error-conditions-insert-column-arity-mismatch-error-class.html) - -[SQLSTATE: 21S01](sql-error-conditions-sqlstates.html#class-21-cardinality-violation) - -Cannot write to ``, the reason is - -For more details see [INSERT_COLUMN_ARITY_MISMATCH](sql-error-conditions-insert-column-arity-mismatch-error-class.html) - -### INSERT_PARTITION_COLUMN_ARITY_MISMATCH - -[SQLSTATE: 21S01](sql-error-conditions-sqlstates.html#class-21-cardinality-violation) - -Cannot write to '``', ``: -Table columns: ``. -Partition columns with static values: ``. -Data columns: ``. - -### [INSUFFICIENT_TABLE_PROPERTY](sql-error-conditions-insufficient-table-property-error-class.html) - -SQLSTATE: none assigned - -Can't find table property: - -For more details see [INSUFFICIENT_TABLE_PROPERTY](sql-error-conditions-insufficient-table-property-error-class.html) - -### INTERNAL_ERROR - -[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) - -`` - -### INTERNAL_ERROR_BROADCAST - -[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) - -`` - -### INTERNAL_ERROR_EXECUTOR - -[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) - -`` - -### INTERNAL_ERROR_MEMORY - -[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) - -`` - -### INTERNAL_ERROR_NETWORK - -[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) - -`` - -### INTERNAL_ERROR_SHUFFLE - -[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) - -`` - -### INTERNAL_ERROR_STORAGE - -[SQLSTATE: XX000](sql-error-conditions-sqlstates.html#class-XX-internal-error) - -`` - -### INTERVAL_ARITHMETIC_OVERFLOW - -[SQLSTATE: 22015](sql-error-conditions-sqlstates.html#class-22-data-exception) - -``.`` - -### INTERVAL_DIVIDED_BY_ZERO - -[SQLSTATE: 22012](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. - -### INVALID_ARRAY_INDEX - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The index `` is out of bounds. The array has `` elements. Use the SQL function `get()` to tolerate accessing element at invalid index and return NULL instead. If necessary set `` to "false" to bypass this error. - -### INVALID_ARRAY_INDEX_IN_ELEMENT_AT - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The index `` is out of bounds. The array has `` elements. Use `try_element_at` to tolerate accessing element at invalid index and return NULL instead. If necessary set `` to "false" to bypass this error. - -### INVALID_BITMAP_POSITION - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The 0-indexed bitmap position `` is out of bounds. The bitmap has `` bits (`` bytes). - -### [INVALID_BOUNDARY](sql-error-conditions-invalid-boundary-error-class.html) - -SQLSTATE: none assigned - -The boundary `` is invalid: ``. - -For more details see [INVALID_BOUNDARY](sql-error-conditions-invalid-boundary-error-class.html) - -### INVALID_BUCKET_FILE - -SQLSTATE: none assigned - -Invalid bucket file: ``. - -### INVALID_BYTE_STRING - -SQLSTATE: none assigned - -The expected format is ByteString, but was `` (``). - -### INVALID_COLUMN_NAME_AS_PATH - -[SQLSTATE: 46121](sql-error-conditions-sqlstates.html#class-46-java-ddl-1) - -The datasource `` cannot save the column `` because its name contains some characters that are not allowed in file paths. Please, use an alias to rename it. - -### INVALID_COLUMN_OR_FIELD_DATA_TYPE - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Column or field `` is of type `` while it's required to be ``. - -### [INVALID_DEFAULT_VALUE](sql-error-conditions-invalid-default-value-error-class.html) - -SQLSTATE: none assigned - -Failed to execute `` command because the destination table column `` has a DEFAULT value ``, - -For more details see [INVALID_DEFAULT_VALUE](sql-error-conditions-invalid-default-value-error-class.html) - -### INVALID_DRIVER_MEMORY - -SQLSTATE: F0000 - -System memory `` must be at least ``. Please increase heap size using the --driver-memory option or "``" in Spark configuration. - -### INVALID_EMPTY_LOCATION - -[SQLSTATE: 42K05](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The location name cannot be empty string, but ```` was given. - -### INVALID_ESC - -SQLSTATE: none assigned - -Found an invalid escape string: ``. The escape string must contain only one character. - -### INVALID_ESCAPE_CHAR - -SQLSTATE: none assigned - -`EscapeChar` should be a string literal of length one, but got ``. - -### INVALID_EXECUTOR_MEMORY - -SQLSTATE: F0000 - -Executor memory `` must be at least ``. Please increase executor memory using the --executor-memory option or "``" in Spark configuration. - -### INVALID_EXTRACT_BASE_FIELD_TYPE - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Can't extract a value from ``. Need a complex type [STRUCT, ARRAY, MAP] but got ``. - -### INVALID_EXTRACT_FIELD - -[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot extract `` from ``. - -### INVALID_EXTRACT_FIELD_TYPE - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Field name should be a non-null string literal, but it's ``. - -### INVALID_FIELD_NAME - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Field name `` is invalid: `` is not a struct. - -### [INVALID_FORMAT](sql-error-conditions-invalid-format-error-class.html) - -[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The format is invalid: ``. - -For more details see [INVALID_FORMAT](sql-error-conditions-invalid-format-error-class.html) - -### INVALID_FRACTION_OF_SECOND - -[SQLSTATE: 22023](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The fraction of sec must be zero. Valid range is [0, 60]. If necessary set `` to "false" to bypass this error. - -### INVALID_HIVE_COLUMN_NAME - -SQLSTATE: none assigned - -Cannot create the table `` having the nested column `` whose name contains invalid characters `` in Hive metastore. - -### INVALID_IDENTIFIER - -[SQLSTATE: 42602](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The identifier `` is invalid. Please, consider quoting it with back-quotes as ````. - -### INVALID_INDEX_OF_ZERO - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The index 0 is invalid. An index shall be either `< 0 or >` 0 (the first element has index 1). - -### [INVALID_INLINE_TABLE](sql-error-conditions-invalid-inline-table-error-class.html) - -SQLSTATE: none assigned - -Invalid inline table. - -For more details see [INVALID_INLINE_TABLE](sql-error-conditions-invalid-inline-table-error-class.html) - -### INVALID_JSON_ROOT_FIELD - -[SQLSTATE: 22032](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Cannot convert JSON root field to target Spark type. - -### INVALID_JSON_SCHEMA_MAP_TYPE - -[SQLSTATE: 22032](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Input schema `` can only contain STRING as a key type for a MAP. - -### [INVALID_LAMBDA_FUNCTION_CALL](sql-error-conditions-invalid-lambda-function-call-error-class.html) - -SQLSTATE: none assigned - -Invalid lambda function call. - -For more details see [INVALID_LAMBDA_FUNCTION_CALL](sql-error-conditions-invalid-lambda-function-call-error-class.html) - -### INVALID_LATERAL_JOIN_TYPE - -[SQLSTATE: 42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The `` JOIN with LATERAL correlation is not allowed because an OUTER subquery cannot correlate to its join partner. Remove the LATERAL correlation or use an INNER JOIN, or LEFT OUTER JOIN instead. - -### [INVALID_LIMIT_LIKE_EXPRESSION](sql-error-conditions-invalid-limit-like-expression-error-class.html) - -SQLSTATE: none assigned - -The limit like expression `` is invalid. - -For more details see [INVALID_LIMIT_LIKE_EXPRESSION](sql-error-conditions-invalid-limit-like-expression-error-class.html) - -### INVALID_NON_DETERMINISTIC_EXPRESSIONS - -SQLSTATE: none assigned - -The operator expects a deterministic expression, but the actual expression is ``. - -### INVALID_NUMERIC_LITERAL_RANGE - -SQLSTATE: none assigned - -Numeric literal `` is outside the valid range for `` with minimum value of `` and maximum value of ``. Please adjust the value accordingly. - -### [INVALID_OBSERVED_METRICS](sql-error-conditions-invalid-observed-metrics-error-class.html) - -SQLSTATE: none assigned - -Invalid observed metrics. - -For more details see [INVALID_OBSERVED_METRICS](sql-error-conditions-invalid-observed-metrics-error-class.html) - -### [INVALID_OPTIONS](sql-error-conditions-invalid-options-error-class.html) - -[SQLSTATE: 42K06](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Invalid options: - -For more details see [INVALID_OPTIONS](sql-error-conditions-invalid-options-error-class.html) - -### INVALID_PANDAS_UDF_PLACEMENT - -[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) - -The group aggregate pandas UDF `` cannot be invoked together with as other, non-pandas aggregate functions. - -### [INVALID_PARAMETER_VALUE](sql-error-conditions-invalid-parameter-value-error-class.html) - -[SQLSTATE: 22023](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The value of parameter(s) `` in `` is invalid: - -For more details see [INVALID_PARAMETER_VALUE](sql-error-conditions-invalid-parameter-value-error-class.html) - -### [INVALID_PARTITION_OPERATION](sql-error-conditions-invalid-partition-operation-error-class.html) - -SQLSTATE: none assigned - -The partition command is invalid. - -For more details see [INVALID_PARTITION_OPERATION](sql-error-conditions-invalid-partition-operation-error-class.html) - -### INVALID_PROPERTY_KEY - -[SQLSTATE: 42602](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -`` is an invalid property key, please use quotes, e.g. SET ``=``. - -### INVALID_PROPERTY_VALUE - -[SQLSTATE: 42602](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -`` is an invalid property value, please use quotes, e.g. SET ``=`` - -### [INVALID_SCHEMA](sql-error-conditions-invalid-schema-error-class.html) - -[SQLSTATE: 42K07](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The input schema `` is not a valid schema string. - -For more details see [INVALID_SCHEMA](sql-error-conditions-invalid-schema-error-class.html) - -### INVALID_SET_SYNTAX - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Expected format is 'SET', 'SET key', or 'SET key=value'. If you want to include special characters in key, or include semicolon in value, please use backquotes, e.g., SET `key`=`value`. - -### INVALID_SQL_ARG - -SQLSTATE: none assigned - -The argument `` of `sql()` is invalid. Consider to replace it by a SQL literal. - -### [INVALID_SQL_SYNTAX](sql-error-conditions-invalid-sql-syntax-error-class.html) - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Invalid SQL syntax: - -For more details see [INVALID_SQL_SYNTAX](sql-error-conditions-invalid-sql-syntax-error-class.html) - -### [INVALID_SUBQUERY_EXPRESSION](sql-error-conditions-invalid-subquery-expression-error-class.html) - -[SQLSTATE: 42823](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Invalid subquery: - -For more details see [INVALID_SUBQUERY_EXPRESSION](sql-error-conditions-invalid-subquery-expression-error-class.html) - -### INVALID_TEMP_OBJ_REFERENCE - -SQLSTATE: none assigned - -Cannot create the persistent object `` of the type `` because it references to the temporary object `` of the type ``. Please make the temporary object `` persistent, or make the persistent object `` temporary. - -### [INVALID_TIME_TRAVEL_TIMESTAMP_EXPR](sql-error-conditions-invalid-time-travel-timestamp-expr-error-class.html) - -SQLSTATE: none assigned - -The time travel timestamp expression `` is invalid. - -For more details see [INVALID_TIME_TRAVEL_TIMESTAMP_EXPR](sql-error-conditions-invalid-time-travel-timestamp-expr-error-class.html) - -### INVALID_TYPED_LITERAL - -[SQLSTATE: 42604](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The value of the typed literal `` is invalid: ``. - -### INVALID_UDF_IMPLEMENTATION - -SQLSTATE: none assigned - -Function `` does not implement ScalarFunction or AggregateFunction. - -### INVALID_URL - -SQLSTATE: none assigned - -The url is invalid: ``. If necessary set `` to "false" to bypass this error. - -### INVALID_USAGE_OF_STAR_OR_REGEX - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Invalid usage of `` in ``. - -### INVALID_VIEW_TEXT - -SQLSTATE: none assigned - -The view `` cannot be displayed due to invalid view text: ``. This may be caused by an unauthorized modification of the view or an incorrect query syntax. Please check your query syntax and verify that the view has not been tampered with. - -### INVALID_WHERE_CONDITION - -[SQLSTATE: 42903](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The WHERE condition `` contains invalid expressions: ``. -Rewrite the query to avoid window functions, aggregate functions, and generator functions in the WHERE clause. - -### INVALID_WINDOW_SPEC_FOR_AGGREGATION_FUNC - -SQLSTATE: none assigned - -Cannot specify ORDER BY or a window frame for ``. - -### [INVALID_WRITE_DISTRIBUTION](sql-error-conditions-invalid-write-distribution-error-class.html) - -SQLSTATE: none assigned - -The requested write distribution is invalid. - -For more details see [INVALID_WRITE_DISTRIBUTION](sql-error-conditions-invalid-write-distribution-error-class.html) - -### JOIN_CONDITION_IS_NOT_BOOLEAN_TYPE - -SQLSTATE: none assigned - -The join condition `` has the invalid type ``, expected "BOOLEAN". - -### LOAD_DATA_PATH_NOT_EXISTS - -SQLSTATE: none assigned - -LOAD DATA input path does not exist: ``. - -### LOCAL_MUST_WITH_SCHEMA_FILE - -SQLSTATE: none assigned - -LOCAL must be used together with the schema of `file`, but got: ````. - -### LOCATION_ALREADY_EXISTS - -[SQLSTATE: 42710](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot name the managed table as ``, as its associated location `` already exists. Please pick a different table name, or remove the existing location first. - -### MALFORMED_CSV_RECORD - -SQLSTATE: none assigned - -Malformed CSV record: `` - -### MALFORMED_PROTOBUF_MESSAGE - -SQLSTATE: none assigned - -Malformed Protobuf messages are detected in message deserialization. Parse Mode: ``. To process malformed protobuf message as null result, try setting the option 'mode' as 'PERMISSIVE'. - -### [MALFORMED_RECORD_IN_PARSING](sql-error-conditions-malformed-record-in-parsing-error-class.html) - -[SQLSTATE: 22023](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Malformed records are detected in record parsing: ``. -Parse Mode: ``. To process malformed records as null result, try setting the option 'mode' as 'PERMISSIVE'. - -For more details see [MALFORMED_RECORD_IN_PARSING](sql-error-conditions-malformed-record-in-parsing-error-class.html) - -### MERGE_CARDINALITY_VIOLATION - -[SQLSTATE: 23K01](sql-error-conditions-sqlstates.html#class-23-integrity-constraint-violation) - -The ON search condition of the MERGE statement matched a single row from the target table with multiple rows of the source table. -This could result in the target row being operated on more than once with an update or delete operation and is not allowed. - -### MISSING_AGGREGATION - -[SQLSTATE: 42803](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The non-aggregating expression `` is based on columns which are not participating in the GROUP BY clause. -Add the columns or the expression to the GROUP BY, aggregate the expression, or use `` if you do not care which of the values within a group is returned. - -### [MISSING_ATTRIBUTES](sql-error-conditions-missing-attributes-error-class.html) - -SQLSTATE: none assigned - -Resolved attribute(s) `` missing from `` in operator ``. - -For more details see [MISSING_ATTRIBUTES](sql-error-conditions-missing-attributes-error-class.html) - -### MISSING_GROUP_BY - -[SQLSTATE: 42803](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The query does not include a GROUP BY clause. Add GROUP BY or turn it into the window functions using OVER clauses. - -### MULTI_SOURCES_UNSUPPORTED_FOR_EXPRESSION - -SQLSTATE: none assigned - -The expression `` does not support more than one source. - -### MULTI_UDF_INTERFACE_ERROR - -SQLSTATE: none assigned - -Not allowed to implement multiple UDF interfaces, UDF class ``. - -### NAMED_ARGUMENTS_SUPPORT_DISABLED - -SQLSTATE: none assigned - -Cannot call function `` because named argument references are not enabled here. In this case, the named argument reference was ``. Set "spark.sql.allowNamedFunctionArguments" to "true" to turn on feature. - -### NESTED_AGGREGATE_FUNCTION - -[SQLSTATE: 42607](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -It is not allowed to use an aggregate function in the argument of another aggregate function. Please use the inner aggregate function in a sub-query. - -### NON_LAST_MATCHED_CLAUSE_OMIT_CONDITION - -[SQLSTATE: 42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -When there are more than one MATCHED clauses in a MERGE statement, only the last MATCHED clause can omit the condition. - -### NON_LAST_NOT_MATCHED_BY_SOURCE_CLAUSE_OMIT_CONDITION - -[SQLSTATE: 42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -When there are more than one NOT MATCHED BY SOURCE clauses in a MERGE statement, only the last NOT MATCHED BY SOURCE clause can omit the condition. - -### NON_LAST_NOT_MATCHED_BY_TARGET_CLAUSE_OMIT_CONDITION - -[SQLSTATE: 42613](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -When there are more than one NOT MATCHED [BY TARGET] clauses in a MERGE statement, only the last NOT MATCHED [BY TARGET] clause can omit the condition. - -### NON_LITERAL_PIVOT_VALUES - -[SQLSTATE: 42K08](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Literal expressions required for pivot values, found ``. - -### NON_PARTITION_COLUMN - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -PARTITION clause cannot contain the non-partition column: ``. - -### NON_TIME_WINDOW_NOT_SUPPORTED_IN_STREAMING - -SQLSTATE: none assigned - -Window function is not supported in `` (as column ``) on streaming DataFrames/Datasets. Structured Streaming only supports time-window aggregation using the WINDOW function. (window specification: ``) - -### [NOT_ALLOWED_IN_FROM](sql-error-conditions-not-allowed-in-from-error-class.html) - -SQLSTATE: none assigned - -Not allowed in the FROM clause: - -For more details see [NOT_ALLOWED_IN_FROM](sql-error-conditions-not-allowed-in-from-error-class.html) - -### [NOT_A_CONSTANT_STRING](sql-error-conditions-not-a-constant-string-error-class.html) - -[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The expression `` used for the routine or clause `` must be a constant STRING which is NOT NULL. - -For more details see [NOT_A_CONSTANT_STRING](sql-error-conditions-not-a-constant-string-error-class.html) - -### NOT_A_PARTITIONED_TABLE - -SQLSTATE: none assigned - -Operation `` is not allowed for `` because it is not a partitioned table. - -### [NOT_NULL_CONSTRAINT_VIOLATION](sql-error-conditions-not-null-constraint-violation-error-class.html) - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Assigning a NULL is not allowed here. - -For more details see [NOT_NULL_CONSTRAINT_VIOLATION](sql-error-conditions-not-null-constraint-violation-error-class.html) - -### NOT_SUPPORTED_CHANGE_COLUMN - -SQLSTATE: none assigned - -ALTER TABLE ALTER/CHANGE COLUMN is not supported for changing `
`'s column `` with type `` to `` with type ``. - -### NOT_SUPPORTED_COMMAND_FOR_V2_TABLE - -[SQLSTATE: 46110](sql-error-conditions-sqlstates.html#class-46-java-ddl-1) - -`` is not supported for v2 tables. - -### NOT_SUPPORTED_COMMAND_WITHOUT_HIVE_SUPPORT - -SQLSTATE: none assigned - -`` is not supported, if you want to enable it, please set "spark.sql.catalogImplementation" to "hive". - -### [NOT_SUPPORTED_IN_JDBC_CATALOG](sql-error-conditions-not-supported-in-jdbc-catalog-error-class.html) - -[SQLSTATE: 46110](sql-error-conditions-sqlstates.html#class-46-java-ddl-1) - -Not supported command in JDBC catalog: - -For more details see [NOT_SUPPORTED_IN_JDBC_CATALOG](sql-error-conditions-not-supported-in-jdbc-catalog-error-class.html) - -### NO_DEFAULT_COLUMN_VALUE_AVAILABLE - -[SQLSTATE: 42608](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Can't determine the default value for `` since it is not nullable and it has no default value. - -### NO_HANDLER_FOR_UDAF - -SQLSTATE: none assigned - -No handler for UDAF '``'. Use sparkSession.udf.register(...) instead. - -### NO_SQL_TYPE_IN_PROTOBUF_SCHEMA - -SQLSTATE: none assigned - -Cannot find `` in Protobuf schema. - -### NO_UDF_INTERFACE - -SQLSTATE: none assigned - -UDF class `` doesn't implement any UDF interface. - -### NULLABLE_COLUMN_OR_FIELD - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Column or field `` is nullable while it's required to be non-nullable. - -### NULLABLE_ROW_ID_ATTRIBUTES - -[SQLSTATE: 42000](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Row ID attributes cannot be nullable: ``. - -### NULL_MAP_KEY - -[SQLSTATE: 2200E](sql-error-conditions-sqlstates.html#class-22-data-exception) - -Cannot use null as map key. - -### NUMERIC_OUT_OF_SUPPORTED_RANGE - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The value `` cannot be interpreted as a numeric since it has more than 38 digits. - -### NUMERIC_VALUE_OUT_OF_RANGE - -[SQLSTATE: 22003](sql-error-conditions-sqlstates.html#class-22-data-exception) - -`` cannot be represented as Decimal(``, ``). If necessary set `` to "false" to bypass this error, and return NULL instead. - -### NUM_COLUMNS_MISMATCH - -[SQLSTATE: 42826](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -`` can only be performed on inputs with the same number of columns, but the first input has `` columns and the `` input has `` columns. - -### NUM_TABLE_VALUE_ALIASES_MISMATCH - -SQLSTATE: none assigned - -Number of given aliases does not match number of output columns. Function name: ``; number of aliases: ``; number of output columns: ``. - -### OPERATION_CANCELED - -[SQLSTATE: HY008](sql-error-conditions-sqlstates.html#class-HY-cli-specific-condition) - -Operation has been canceled. - -### ORDER_BY_POS_OUT_OF_RANGE - -[SQLSTATE: 42805](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -ORDER BY position `` is not in select list (valid range is [1, ``]). - -### PARSE_EMPTY_STATEMENT - -[SQLSTATE: 42617](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Syntax error, unexpected empty statement. - -### PARSE_SYNTAX_ERROR - -[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Syntax error at or near ````. - -### PARTITIONS_ALREADY_EXIST - -[SQLSTATE: 428FT](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot ADD or RENAME TO partition(s) `` in table `` because they already exist. -Choose a different name, drop the existing partition, or add the IF NOT EXISTS clause to tolerate a pre-existing partition. - -### PARTITIONS_NOT_FOUND - -[SQLSTATE: 428FT](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The partition(s) `` cannot be found in table ``. -Verify the partition specification and table name. -To tolerate the error on drop use ALTER TABLE … DROP IF EXISTS PARTITION. - -### PATH_ALREADY_EXISTS - -[SQLSTATE: 42K04](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Path `` already exists. Set mode as "overwrite" to overwrite the existing path. - -### PATH_NOT_FOUND - -[SQLSTATE: 42K03](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Path does not exist: ``. - -### PIVOT_VALUE_DATA_TYPE_MISMATCH - -[SQLSTATE: 42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Invalid pivot value '``': value data type `` does not match pivot column data type ``. - -### PLAN_VALIDATION_FAILED_RULE_EXECUTOR - -SQLSTATE: none assigned - -The input plan of `` is invalid: `` - -### PLAN_VALIDATION_FAILED_RULE_IN_BATCH - -SQLSTATE: none assigned - -Rule `` in batch `` generated an invalid plan: `` - -### PROTOBUF_DEPENDENCY_NOT_FOUND - -SQLSTATE: none assigned - -Could not find dependency: ``. - -### PROTOBUF_DESCRIPTOR_FILE_NOT_FOUND - -SQLSTATE: none assigned - -Error reading Protobuf descriptor file at path: ``. - -### PROTOBUF_FIELD_MISSING - -SQLSTATE: none assigned - -Searching for `` in Protobuf schema at `` gave `` matches. Candidates: ``. - -### PROTOBUF_FIELD_MISSING_IN_SQL_SCHEMA - -SQLSTATE: none assigned - -Found `` in Protobuf schema but there is no match in the SQL schema. - -### PROTOBUF_FIELD_TYPE_MISMATCH - -SQLSTATE: none assigned - -Type mismatch encountered for field: ``. - -### PROTOBUF_MESSAGE_NOT_FOUND - -SQLSTATE: none assigned - -Unable to locate Message `` in Descriptor. - -### PROTOBUF_TYPE_NOT_SUPPORT - -SQLSTATE: none assigned - -Protobuf type not yet supported: ``. - -### RECURSIVE_PROTOBUF_SCHEMA - -SQLSTATE: none assigned - -Found recursive reference in Protobuf schema, which can not be processed by Spark by default: ``. try setting the option `recursive.fields.max.depth` 0 to 10. Going beyond 10 levels of recursion is not allowed. - -### RECURSIVE_VIEW - -SQLSTATE: none assigned - -Recursive view `` detected (cycle: ``). - -### REF_DEFAULT_VALUE_IS_NOT_ALLOWED_IN_PARTITION - -SQLSTATE: none assigned - -References to DEFAULT column values are not allowed within the PARTITION clause. - -### RENAME_SRC_PATH_NOT_FOUND - -[SQLSTATE: 42K03](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Failed to rename as `` was not found. - -### REPEATED_CLAUSE - -[SQLSTATE: 42614](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The `` clause may be used at most once per `` operation. - -### REQUIRES_SINGLE_PART_NAMESPACE - -[SQLSTATE: 42K05](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -`` requires a single-part namespace, but got ``. - -### ROUTINE_ALREADY_EXISTS - -[SQLSTATE: 42723](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot create the function `` because it already exists. -Choose a different name, drop or replace the existing function, or add the IF NOT EXISTS clause to tolerate a pre-existing function. - -### ROUTINE_NOT_FOUND - -[SQLSTATE: 42883](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The function `` cannot be found. Verify the spelling and correctness of the schema and catalog. -If you did not qualify the name with a schema and catalog, verify the current_schema() output, or qualify the name with the correct schema and catalog. -To tolerate the error on drop use DROP FUNCTION IF EXISTS. - -### SCALAR_SUBQUERY_IS_IN_GROUP_BY_OR_AGGREGATE_FUNCTION - -SQLSTATE: none assigned - -The correlated scalar subquery '``' is neither present in GROUP BY, nor in an aggregate function. Add it to GROUP BY using ordinal position or wrap it in `first()` (or `first_value`) if you don't care which value you get. - -### SCALAR_SUBQUERY_TOO_MANY_ROWS - -[SQLSTATE: 21000](sql-error-conditions-sqlstates.html#class-21-cardinality-violation) - -More than one row returned by a subquery used as an expression. - -### SCHEMA_ALREADY_EXISTS - -[SQLSTATE: 42P06](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot create schema `` because it already exists. -Choose a different name, drop the existing schema, or add the IF NOT EXISTS clause to tolerate pre-existing schema. - -### SCHEMA_NOT_EMPTY - -[SQLSTATE: 2BP01](sql-error-conditions-sqlstates.html#class-2B-dependent-privilege-descriptors-still-exist) - -Cannot drop a schema `` because it contains objects. -Use DROP SCHEMA ... CASCADE to drop the schema and all its objects. - -### SCHEMA_NOT_FOUND - -[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The schema `` cannot be found. Verify the spelling and correctness of the schema and catalog. -If you did not qualify the name with a catalog, verify the current_schema() output, or qualify the name with the correct catalog. -To tolerate the error on drop use DROP SCHEMA IF EXISTS. - -### SECOND_FUNCTION_ARGUMENT_NOT_INTEGER - -[SQLSTATE: 22023](sql-error-conditions-sqlstates.html#class-22-data-exception) - -The second argument of `` function needs to be an integer. - -### SEED_EXPRESSION_IS_UNFOLDABLE - -SQLSTATE: none assigned - -The seed expression `` of the expression `` must be foldable. - -### SORT_BY_WITHOUT_BUCKETING - -SQLSTATE: none assigned - -sortBy must be used together with bucketBy. - -### SPECIFY_BUCKETING_IS_NOT_ALLOWED - -SQLSTATE: none assigned - -Cannot specify bucketing information if the table schema is not specified when creating and will be inferred at runtime. - -### SPECIFY_PARTITION_IS_NOT_ALLOWED - -SQLSTATE: none assigned - -It is not allowed to specify partition columns when the table schema is not defined. When the table schema is not provided, schema and partition columns will be inferred. - -### SQL_CONF_NOT_FOUND - -SQLSTATE: none assigned - -The SQL config `` cannot be found. Please verify that the config exists. - -### STAR_GROUP_BY_POS - -[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) - -Star (*) is not allowed in a select list when GROUP BY an ordinal position is used. - -### STATIC_PARTITION_COLUMN_IN_INSERT_COLUMN_LIST - -SQLSTATE: none assigned - -Static partition column `` is also specified in the column list. - -### STREAM_FAILED - -SQLSTATE: none assigned - -Query [id = ``, runId = ``] terminated with exception: `` - -### SUM_OF_LIMIT_AND_OFFSET_EXCEEDS_MAX_INT - -SQLSTATE: none assigned - -The sum of the LIMIT clause and the OFFSET clause must not be greater than the maximum 32-bit integer value (2,147,483,647) but found limit = ``, offset = ``. - -### TABLE_OR_VIEW_ALREADY_EXISTS - -[SQLSTATE: 42P07](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot create table or view `` because it already exists. -Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects. - -### TABLE_OR_VIEW_NOT_FOUND - -[SQLSTATE: 42P01](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The table or view `` cannot be found. Verify the spelling and correctness of the schema and catalog. -If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog. -To tolerate the error on drop use DROP VIEW IF EXISTS or DROP TABLE IF EXISTS. - -### TABLE_VALUED_FUNCTION_TOO_MANY_TABLE_ARGUMENTS - -SQLSTATE: none assigned - -There are too many table arguments for table-valued function. It allows one table argument, but got: ``. If you want to allow it, please set "spark.sql.allowMultipleTableArguments.enabled" to "true" - -### TASK_WRITE_FAILED - -SQLSTATE: none assigned - -Task failed while writing rows to ``. - -### TEMP_TABLE_OR_VIEW_ALREADY_EXISTS - -[SQLSTATE: 42P07](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot create the temporary view `` because it already exists. -Choose a different name, drop or replace the existing view, or add the IF NOT EXISTS clause to tolerate pre-existing views. - -### TEMP_VIEW_NAME_TOO_MANY_NAME_PARTS - -[SQLSTATE: 428EK](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -CREATE TEMPORARY VIEW or the corresponding Dataset APIs only accept single-part view names, but got: ``. - -### TOO_MANY_ARRAY_ELEMENTS - -[SQLSTATE: 54000](sql-error-conditions-sqlstates.html#class-54-program-limit-exceeded) - -Cannot initialize array with `` elements of size ``. - -### UDTF_ALIAS_NUMBER_MISMATCH - -SQLSTATE: none assigned - -The number of aliases supplied in the AS clause does not match the number of columns output by the UDTF. Expected `` aliases, but got ``. Please ensure that the number of aliases provided matches the number of columns output by the UDTF. - -### UNABLE_TO_ACQUIRE_MEMORY - -[SQLSTATE: 53200](sql-error-conditions-sqlstates.html#class-53-insufficient-resources) - -Unable to acquire `` bytes of memory, got ``. - -### UNABLE_TO_CONVERT_TO_PROTOBUF_MESSAGE_TYPE - -SQLSTATE: none assigned - -Unable to convert SQL type `` to Protobuf type ``. - -### UNABLE_TO_INFER_SCHEMA - -[SQLSTATE: 42KD9](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Unable to infer schema for ``. It must be specified manually. - -### UNBOUND_SQL_PARAMETER - -[SQLSTATE: 42P02](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Found the unbound parameter: ``. Please, fix `args` and provide a mapping of the parameter to a SQL literal. - -### UNCLOSED_BRACKETED_COMMENT - -[SQLSTATE: 42601](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Found an unclosed bracketed comment. Please, append */ at the end of the comment. - -### UNEXPECTED_INPUT_TYPE - -[SQLSTATE: 42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Parameter `` of function `` requires the `` type, however `` has the type ``. - -### UNKNOWN_PROTOBUF_MESSAGE_TYPE - -SQLSTATE: none assigned - -Attempting to treat `` as a Message, but it was ``. - -### UNPIVOT_REQUIRES_ATTRIBUTES - -[SQLSTATE: 42K0A](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -UNPIVOT requires all given `` expressions to be columns when no `` expressions are given. These are not columns: [``]. - -### UNPIVOT_REQUIRES_VALUE_COLUMNS - -[SQLSTATE: 42K0A](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -At least one value column needs to be specified for UNPIVOT, all columns specified as ids. - -### UNPIVOT_VALUE_DATA_TYPE_MISMATCH - -[SQLSTATE: 42K09](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Unpivot value columns must share a least common type, some types do not: [``]. - -### UNPIVOT_VALUE_SIZE_MISMATCH - -[SQLSTATE: 428C4](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -All unpivot value columns must have the same size as there are value column names (``). - -### UNRECOGNIZED_SQL_TYPE - -[SQLSTATE: 42704](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Unrecognized SQL type - name: ``, id: ``. - -### UNRESOLVABLE_TABLE_VALUED_FUNCTION - -SQLSTATE: none assigned - -Could not resolve `` to a table-valued function. Please make sure that `` is defined as a table-valued function and that all required parameters are provided correctly. If `` is not defined, please create the table-valued function before using it. For more information about defining table-valued functions, please refer to the Apache Spark documentation. - -### UNRESOLVED_ALL_IN_GROUP_BY - -[SQLSTATE: 42803](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot infer grouping columns for GROUP BY ALL based on the select clause. Please explicitly specify the grouping columns. - -### [UNRESOLVED_COLUMN](sql-error-conditions-unresolved-column-error-class.html) - -[SQLSTATE: 42703](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -A column or function parameter with name `` cannot be resolved. - -For more details see [UNRESOLVED_COLUMN](sql-error-conditions-unresolved-column-error-class.html) - -### [UNRESOLVED_FIELD](sql-error-conditions-unresolved-field-error-class.html) - -[SQLSTATE: 42703](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -A field with name `` cannot be resolved with the struct-type column ``. - -For more details see [UNRESOLVED_FIELD](sql-error-conditions-unresolved-field-error-class.html) - -### [UNRESOLVED_MAP_KEY](sql-error-conditions-unresolved-map-key-error-class.html) - -[SQLSTATE: 42703](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot resolve column `` as a map key. If the key is a string literal, add the single quotes '' around it. - -For more details see [UNRESOLVED_MAP_KEY](sql-error-conditions-unresolved-map-key-error-class.html) - -### UNRESOLVED_ROUTINE - -[SQLSTATE: 42883](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot resolve function `` on search path ``. - -### UNRESOLVED_USING_COLUMN_FOR_JOIN - -[SQLSTATE: 42703](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -USING column `` cannot be resolved on the `` side of the join. The ``-side columns: [``]. - -### UNSET_NONEXISTENT_PROPERTIES - -SQLSTATE: none assigned - -Attempted to unset non-existent properties [``] in table `
`. - -### [UNSUPPORTED_ADD_FILE](sql-error-conditions-unsupported-add-file-error-class.html) - -SQLSTATE: none assigned - -Don't support add file. - -For more details see [UNSUPPORTED_ADD_FILE](sql-error-conditions-unsupported-add-file-error-class.html) - -### UNSUPPORTED_ARROWTYPE - -[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) - -Unsupported arrow type ``. - -### UNSUPPORTED_CHAR_OR_VARCHAR_AS_STRING - -SQLSTATE: none assigned - -The char/varchar type can't be used in the table schema. If you want Spark treat them as string type as same as Spark 3.0 and earlier, please set "spark.sql.legacy.charVarcharAsString" to "true". - -### UNSUPPORTED_DATASOURCE_FOR_DIRECT_QUERY - -SQLSTATE: none assigned - -Unsupported data source type for direct query on files: `` - -### UNSUPPORTED_DATATYPE - -[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) - -Unsupported data type ``. - -### UNSUPPORTED_DATA_SOURCE_FOR_DIRECT_QUERY - -SQLSTATE: none assigned - -The direct query on files does not support the data source type: ``. Please try a different data source type or consider using a different query method. - -### UNSUPPORTED_DATA_TYPE_FOR_DATASOURCE - -SQLSTATE: none assigned - -The `` datasource doesn't support the column `` of the type ``. - -### [UNSUPPORTED_DEFAULT_VALUE](sql-error-conditions-unsupported-default-value-error-class.html) - -SQLSTATE: none assigned - -DEFAULT column values is not supported. - -For more details see [UNSUPPORTED_DEFAULT_VALUE](sql-error-conditions-unsupported-default-value-error-class.html) - -### [UNSUPPORTED_DESERIALIZER](sql-error-conditions-unsupported-deserializer-error-class.html) - -[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) - -The deserializer is not supported: - -For more details see [UNSUPPORTED_DESERIALIZER](sql-error-conditions-unsupported-deserializer-error-class.html) - -### UNSUPPORTED_EXPRESSION_GENERATED_COLUMN - -SQLSTATE: none assigned - -Cannot create generated column `` with generation expression `` because ``. - -### UNSUPPORTED_EXPR_FOR_OPERATOR - -SQLSTATE: none assigned - -A query operator contains one or more unsupported expressions. Consider to rewrite it to avoid window functions, aggregate functions, and generator functions in the WHERE clause. -Invalid expressions: [``] - -### UNSUPPORTED_EXPR_FOR_WINDOW - -[SQLSTATE: 42P20](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Expression `` not supported within a window function. - -### [UNSUPPORTED_FEATURE](sql-error-conditions-unsupported-feature-error-class.html) - -[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) - -The feature is not supported: - -For more details see [UNSUPPORTED_FEATURE](sql-error-conditions-unsupported-feature-error-class.html) - -### [UNSUPPORTED_GENERATOR](sql-error-conditions-unsupported-generator-error-class.html) - -[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) - -The generator is not supported: - -For more details see [UNSUPPORTED_GENERATOR](sql-error-conditions-unsupported-generator-error-class.html) - -### UNSUPPORTED_GROUPING_EXPRESSION - -SQLSTATE: none assigned - -grouping()/grouping_id() can only be used with GroupingSets/Cube/Rollup. - -### [UNSUPPORTED_INSERT](sql-error-conditions-unsupported-insert-error-class.html) - -SQLSTATE: none assigned - -Can't insert into the target. - -For more details see [UNSUPPORTED_INSERT](sql-error-conditions-unsupported-insert-error-class.html) - -### [UNSUPPORTED_MERGE_CONDITION](sql-error-conditions-unsupported-merge-condition-error-class.html) - -SQLSTATE: none assigned - -MERGE operation contains unsupported `` condition. - -For more details see [UNSUPPORTED_MERGE_CONDITION](sql-error-conditions-unsupported-merge-condition-error-class.html) - -### [UNSUPPORTED_OVERWRITE](sql-error-conditions-unsupported-overwrite-error-class.html) - -SQLSTATE: none assigned - -Can't overwrite the target that is also being read from. - -For more details see [UNSUPPORTED_OVERWRITE](sql-error-conditions-unsupported-overwrite-error-class.html) - -### [UNSUPPORTED_SAVE_MODE](sql-error-conditions-unsupported-save-mode-error-class.html) - -SQLSTATE: none assigned - -The save mode `` is not supported for: - -For more details see [UNSUPPORTED_SAVE_MODE](sql-error-conditions-unsupported-save-mode-error-class.html) - -### [UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY](sql-error-conditions-unsupported-subquery-expression-category-error-class.html) - -[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) - -Unsupported subquery expression: - -For more details see [UNSUPPORTED_SUBQUERY_EXPRESSION_CATEGORY](sql-error-conditions-unsupported-subquery-expression-category-error-class.html) - -### UNSUPPORTED_TYPED_LITERAL - -[SQLSTATE: 0A000](sql-error-conditions-sqlstates.html#class-0A-feature-not-supported) - -Literals of the type `` are not supported. Supported types are ``. - -### UNTYPED_SCALA_UDF - -SQLSTATE: none assigned - -You're using untyped Scala UDF, which does not have the input type information. Spark may blindly pass null to the Scala closure with primitive-type argument, and the closure will see the default value of the Java type for the null argument, e.g. `udf((x: Int) => x, IntegerType)`, the result is 0 for null input. To get rid of this error, you could: -1. use typed Scala UDF APIs(without return type parameter), e.g. `udf((x: Int) => x)`. -2. use Java UDF APIs, e.g. `udf(new UDF1[String, Integer] { override def call(s: String): Integer = s.length() }, IntegerType)`, if input types are all non primitive. -3. set "spark.sql.legacy.allowUntypedScalaUDF" to "true" and use this API with caution. - -### VIEW_ALREADY_EXISTS - -[SQLSTATE: 42P07](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -Cannot create view `` because it already exists. -Choose a different name, drop or replace the existing object, or add the IF NOT EXISTS clause to tolerate pre-existing objects. - -### VIEW_NOT_FOUND - -[SQLSTATE: 42P01](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The view `` cannot be found. Verify the spelling and correctness of the schema and catalog. -If you did not qualify the name with a schema, verify the current_schema() output, or qualify the name with the correct schema and catalog. -To tolerate the error on drop use DROP VIEW IF EXISTS. - -### WINDOW_FUNCTION_AND_FRAME_MISMATCH - -SQLSTATE: none assigned - -`` function can only be evaluated in an ordered row-based window frame with a single offset: ``. - -### WINDOW_FUNCTION_WITHOUT_OVER_CLAUSE - -SQLSTATE: none assigned - -Window function `` requires an OVER clause. - -### WRITE_STREAM_NOT_ALLOWED - -SQLSTATE: none assigned - -`writeStream` can be called only on streaming Dataset/DataFrame. - -### WRONG_COMMAND_FOR_OBJECT_TYPE - -SQLSTATE: none assigned - -The operation `` requires a ``. But `` is a ``. Use `` instead. - -### [WRONG_NUM_ARGS](sql-error-conditions-wrong-num-args-error-class.html) - -[SQLSTATE: 42605](sql-error-conditions-sqlstates.html#class-42-syntax-error-or-access-rule-violation) - -The `` requires `` parameters but the actual number is ``. - -For more details see [WRONG_NUM_ARGS](sql-error-conditions-wrong-num-args-error-class.html) - - From 2426d91750c0353af6da3907de092254dc418545 Mon Sep 17 00:00:00 2001 From: Richard Yu Date: Mon, 17 Jul 2023 09:46:46 -0700 Subject: [PATCH 3/5] Addressing nits --- .../sql/catalyst/expressions/generators.scala | 6 + .../expressions/maskExpressions.scala | 137 +++++++++--------- 2 files changed, 74 insertions(+), 69 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala index 9a6c2ced1ea3c..4df5084a41166 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala @@ -438,6 +438,9 @@ trait ExplodeGeneratorBuilderBase extends GeneratorBuilder { > SELECT _FUNC_(collection => array(10, 20)); 10 20 + > SELECT * FROM _FUNC_(collection => array(10, 20)); + 10 + 20 """, since = "1.0.0", group = "generator_funcs") @@ -461,6 +464,9 @@ object ExplodeExpressionBuilder extends ExpressionBuilder { > SELECT _FUNC_(collection => array(10, 20)); 10 20 + > SELECT * FROM _FUNC_(collection => array(10, 20)); + 10 + 20 """, since = "1.0.0", group = "generator_funcs") diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala index 64fa5c194db25..752f937ecf3b0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala @@ -27,6 +27,74 @@ import org.apache.spark.sql.errors.QueryErrorsBase import org.apache.spark.sql.types.{AbstractDataType, DataType, StringType} import org.apache.spark.unsafe.types.UTF8String +// scalastyle:off line.size.limit +@ExpressionDescription( + usage = + """_FUNC_(input[, upperChar, lowerChar, digitChar, otherChar]) - masks the given string value. + The function replaces characters with 'X' or 'x', and numbers with 'n'. + This can be useful for creating copies of tables with sensitive information removed. + """, + arguments = """ + Arguments: + * input - string value to mask. Supported types: STRING, VARCHAR, CHAR + * upperChar - character to replace upper-case characters with. Specify NULL to retain original character. Default value: 'X' + * lowerChar - character to replace lower-case characters with. Specify NULL to retain original character. Default value: 'x' + * digitChar - character to replace digit characters with. Specify NULL to retain original character. Default value: 'n' + * otherChar - character to replace all other characters with. Specify NULL to retain original character. Default value: NULL + """, + examples = """ + Examples: + > SELECT _FUNC_('abcd-EFGH-8765-4321'); + xxxx-XXXX-nnnn-nnnn + > SELECT _FUNC_('abcd-EFGH-8765-4321', 'Q'); + xxxx-QQQQ-nnnn-nnnn + > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q'); + QqQQnnn-@$# + > SELECT _FUNC_('AbCD123-@$#'); + XxXXnnn-@$# + > SELECT _FUNC_('AbCD123-@$#', 'Q'); + QxQQnnn-@$# + > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q'); + QqQQnnn-@$# + > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q', 'd'); + QqQQddd-@$# + > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q', 'd', 'o'); + QqQQdddoooo + > SELECT _FUNC_('AbCD123-@$#', NULL, 'q', 'd', 'o'); + AqCDdddoooo + > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, 'd', 'o'); + AbCDdddoooo + > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, NULL, 'o'); + AbCD123oooo + > SELECT _FUNC_(NULL, NULL, NULL, NULL, 'o'); + NULL + > SELECT _FUNC_(NULL); + NULL + > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, NULL, NULL); + AbCD123-@$# + """, + since = "3.4.0", + group = "string_funcs") +// scalastyle:on line.size.limit +object MaskExpressionBuilder extends ExpressionBuilder { + override def functionSignature: Option[FunctionSignature] = { + val strArg = NamedArgument("str") + val upperCharArg = NamedArgument("upperChar", Some(Literal(Mask.MASKED_UPPERCASE))) + val lowerCharArg = NamedArgument("lowerChar", Some(Literal(Mask.MASKED_LOWERCASE))) + val digitCharArg = NamedArgument("digitChar", Some(Literal(Mask.MASKED_DIGIT))) + val otherCharArg = NamedArgument( + "otherChar", + Some(Literal(Mask.MASKED_IGNORE, StringType))) + val functionSignature: FunctionSignature = FunctionSignature(Seq( + strArg, upperCharArg, lowerCharArg, digitCharArg, otherCharArg)) + Some(functionSignature) + } + + override def build(funcName: String, expressions: Seq[Expression]): Expression = { + new Mask(expressions(0), expressions(1), expressions(2), expressions(3), expressions(4)) + } +} + case class Mask( input: Expression, upperChar: Expression, @@ -273,72 +341,3 @@ object Mask { } } } - -// scalastyle:off line.size.limit -@ExpressionDescription( - usage = - """_FUNC_(input[, upperChar, lowerChar, digitChar, otherChar]) - masks the given string value. - The function replaces characters with 'X' or 'x', and numbers with 'n'. - This can be useful for creating copies of tables with sensitive information removed. - """, - arguments = """ - Arguments: - * input - string value to mask. Supported types: STRING, VARCHAR, CHAR - * upperChar - character to replace upper-case characters with. Specify NULL to retain original character. Default value: 'X' - * lowerChar - character to replace lower-case characters with. Specify NULL to retain original character. Default value: 'x' - * digitChar - character to replace digit characters with. Specify NULL to retain original character. Default value: 'n' - * otherChar - character to replace all other characters with. Specify NULL to retain original character. Default value: NULL - """, - examples = """ - Examples: - > SELECT _FUNC_('abcd-EFGH-8765-4321'); - xxxx-XXXX-nnnn-nnnn - > SELECT _FUNC_('abcd-EFGH-8765-4321', 'Q'); - xxxx-QQQQ-nnnn-nnnn - > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q'); - QqQQnnn-@$# - > SELECT _FUNC_('AbCD123-@$#'); - XxXXnnn-@$# - > SELECT _FUNC_('AbCD123-@$#', 'Q'); - QxQQnnn-@$# - > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q'); - QqQQnnn-@$# - > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q', 'd'); - QqQQddd-@$# - > SELECT _FUNC_('AbCD123-@$#', 'Q', 'q', 'd', 'o'); - QqQQdddoooo - > SELECT _FUNC_('AbCD123-@$#', NULL, 'q', 'd', 'o'); - AqCDdddoooo - > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, 'd', 'o'); - AbCDdddoooo - > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, NULL, 'o'); - AbCD123oooo - > SELECT _FUNC_(NULL, NULL, NULL, NULL, 'o'); - NULL - > SELECT _FUNC_(NULL); - NULL - > SELECT _FUNC_('AbCD123-@$#', NULL, NULL, NULL, NULL); - AbCD123-@$# - """, - since = "3.4.0", - group = "string_funcs") -// scalastyle:on line.size.limit -object MaskExpressionBuilder extends ExpressionBuilder { - override def functionSignature: Option[FunctionSignature] = { - val strArg = NamedArgument("str") - val upperCharArg = NamedArgument("upperChar", Some(Literal(Mask.MASKED_UPPERCASE))) - val lowerCharArg = NamedArgument("lowerChar", Some(Literal(Mask.MASKED_LOWERCASE))) - val digitCharArg = NamedArgument("digitChar", Some(Literal(Mask.MASKED_DIGIT))) - val otherCharArg = NamedArgument( - "otherChar", - Some(Literal(Mask.MASKED_IGNORE, StringType))) - val functionSignature: FunctionSignature = FunctionSignature(Seq( - strArg, upperCharArg, lowerCharArg, digitCharArg, otherCharArg)) - Some(functionSignature) - } - - override def build(funcName: String, expressions: Seq[Expression]): Expression = { - new Mask(expressions(0), expressions(1), expressions(2), expressions(3), expressions(4)) - } -} - From bd78d1df7cac6d47921fac6e9e520e67881470ab Mon Sep 17 00:00:00 2001 From: Richard Yu Date: Mon, 17 Jul 2023 11:09:40 -0700 Subject: [PATCH 4/5] Adding some documentation --- .../catalyst/expressions/aggregate/CountMinSketchAgg.scala | 1 + .../apache/spark/sql/catalyst/expressions/generators.scala | 7 ++++++- .../spark/sql/catalyst/expressions/maskExpressions.scala | 1 + .../sql/catalyst/plans/logical/FunctionBuilderBase.scala | 4 +--- 4 files changed, 9 insertions(+), 4 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala index f450d0781f499..0e69719d7a653 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala @@ -219,6 +219,7 @@ object CountMinSketchAggExpressionBuilder extends ExpressionBuilder { )) override def functionSignature: Option[FunctionSignature] = Some(defaultFunctionSignature) override def build(funcName: String, expressions: Seq[Expression]): Expression = { + assert(expressions.size == 4) new CountMinSketchAgg(expressions(0), expressions(1), expressions(2), expressions(3)) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala index 4df5084a41166..45d836fc231ba 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala @@ -423,8 +423,10 @@ case class Explode(child: Expression) extends ExplodeBase { trait ExplodeGeneratorBuilderBase extends GeneratorBuilder { override def functionSignature: Option[FunctionSignature] = Some(FunctionSignature(Seq(NamedArgument("collection")))) - override def buildGenerator(funcName: String, expressions: Seq[Expression]): Generator = + override def buildGenerator(funcName: String, expressions: Seq[Expression]): Generator = { + assert(expressions.size == 1) Explode(expressions(0)) + } } // scalastyle:off line.size.limit @@ -512,6 +514,9 @@ object ExplodeOuterGeneratorBuilder extends ExplodeGeneratorBuilderBase { > SELECT _FUNC_(array(10,20)); 0 10 1 20 + > SELECT * FROM _FUNC_(array(10,20)); + 0 10 + 1 20 """, since = "2.0.0", group = "generator_funcs") diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala index 752f937ecf3b0..02d9b933212bc 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala @@ -91,6 +91,7 @@ object MaskExpressionBuilder extends ExpressionBuilder { } override def build(funcName: String, expressions: Seq[Expression]): Expression = { + assert(expressions.size == 5) new Mask(expressions(0), expressions(1), expressions(2), expressions(3), expressions(4)) } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala index 04de88acc09e4..2a5b7ecae4472 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala @@ -165,9 +165,7 @@ object NamedArgumentsSupport { * @param default The default value of the argument. If the default is none, then that means the * argument is required. If no argument is provided, an exception is thrown. */ -case class NamedArgument( - name: String, - default: Option[Expression] = None) +case class NamedArgument(name: String, default: Option[Expression] = None) /** * Represents a method signature and the list of arguments it receives as input. From b7277fdf58599b4fe7832f021488c7a5aab5381e Mon Sep 17 00:00:00 2001 From: Richard Yu Date: Mon, 17 Jul 2023 12:14:49 -0700 Subject: [PATCH 5/5] Making consistent names --- .../aggregate/CountMinSketchAgg.scala | 10 +++++----- .../sql/catalyst/expressions/generators.scala | 6 +++--- .../catalyst/expressions/maskExpressions.scala | 12 ++++++------ .../plans/logical/FunctionBuilderBase.scala | 12 ++++++------ ...e.scala => NamedParameterFunctionSuite.scala} | 16 ++++++++-------- 5 files changed, 28 insertions(+), 28 deletions(-) rename sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/{NamedArgumentFunctionSuite.scala => NamedParameterFunctionSuite.scala} (93%) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala index 0e69719d7a653..b7988922bd79b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/CountMinSketchAgg.scala @@ -21,7 +21,7 @@ import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, TypeCheckResult} import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{DataTypeMismatch, TypeCheckSuccess} import org.apache.spark.sql.catalyst.expressions.{ExpectsInputTypes, Expression, ExpressionDescription, Literal} -import org.apache.spark.sql.catalyst.plans.logical.{FunctionSignature, NamedArgument} +import org.apache.spark.sql.catalyst.plans.logical.{FunctionSignature, InputParameter} import org.apache.spark.sql.catalyst.trees.QuaternaryLike import org.apache.spark.sql.errors.QueryErrorsBase import org.apache.spark.sql.types._ @@ -212,10 +212,10 @@ case class CountMinSketchAgg( // scalastyle:on line.size.limit object CountMinSketchAggExpressionBuilder extends ExpressionBuilder { final val defaultFunctionSignature = FunctionSignature(Seq( - NamedArgument("column"), - NamedArgument("epsilon"), - NamedArgument("confidence"), - NamedArgument("seed") + InputParameter("column"), + InputParameter("epsilon"), + InputParameter("confidence"), + InputParameter("seed") )) override def functionSignature: Option[FunctionSignature] = Some(defaultFunctionSignature) override def build(funcName: String, expressions: Seq[Expression]): Expression = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala index 45d836fc231ba..afaaf07d2726b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/generators.scala @@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.expressions.Cast._ import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ -import org.apache.spark.sql.catalyst.plans.logical.{FunctionSignature, NamedArgument} +import org.apache.spark.sql.catalyst.plans.logical.{FunctionSignature, InputParameter} import org.apache.spark.sql.catalyst.trees.TreePattern.{GENERATOR, TreePattern} import org.apache.spark.sql.catalyst.util.{ArrayData, MapData} import org.apache.spark.sql.catalyst.util.SQLKeywordUtils._ @@ -422,7 +422,7 @@ case class Explode(child: Expression) extends ExplodeBase { trait ExplodeGeneratorBuilderBase extends GeneratorBuilder { override def functionSignature: Option[FunctionSignature] = - Some(FunctionSignature(Seq(NamedArgument("collection")))) + Some(FunctionSignature(Seq(InputParameter("collection")))) override def buildGenerator(funcName: String, expressions: Seq[Expression]): Generator = { assert(expressions.size == 1) Explode(expressions(0)) @@ -449,7 +449,7 @@ trait ExplodeGeneratorBuilderBase extends GeneratorBuilder { // scalastyle:on line.size.limit object ExplodeExpressionBuilder extends ExpressionBuilder { override def functionSignature: Option[FunctionSignature] = - Some(FunctionSignature(Seq(NamedArgument("collection")))) + Some(FunctionSignature(Seq(InputParameter("collection")))) override def build(funcName: String, expressions: Seq[Expression]) : Expression = Explode(expressions(0)) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala index 02d9b933212bc..61a96ff5ff951 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/maskExpressions.scala @@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.analysis.{ExpressionBuilder, TypeCheckResul import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.DataTypeMismatch import org.apache.spark.sql.catalyst.expressions.codegen._ import org.apache.spark.sql.catalyst.expressions.codegen.Block._ -import org.apache.spark.sql.catalyst.plans.logical.{FunctionSignature, NamedArgument} +import org.apache.spark.sql.catalyst.plans.logical.{FunctionSignature, InputParameter} import org.apache.spark.sql.errors.QueryErrorsBase import org.apache.spark.sql.types.{AbstractDataType, DataType, StringType} import org.apache.spark.unsafe.types.UTF8String @@ -78,11 +78,11 @@ import org.apache.spark.unsafe.types.UTF8String // scalastyle:on line.size.limit object MaskExpressionBuilder extends ExpressionBuilder { override def functionSignature: Option[FunctionSignature] = { - val strArg = NamedArgument("str") - val upperCharArg = NamedArgument("upperChar", Some(Literal(Mask.MASKED_UPPERCASE))) - val lowerCharArg = NamedArgument("lowerChar", Some(Literal(Mask.MASKED_LOWERCASE))) - val digitCharArg = NamedArgument("digitChar", Some(Literal(Mask.MASKED_DIGIT))) - val otherCharArg = NamedArgument( + val strArg = InputParameter("str") + val upperCharArg = InputParameter("upperChar", Some(Literal(Mask.MASKED_UPPERCASE))) + val lowerCharArg = InputParameter("lowerChar", Some(Literal(Mask.MASKED_LOWERCASE))) + val digitCharArg = InputParameter("digitChar", Some(Literal(Mask.MASKED_DIGIT))) + val otherCharArg = InputParameter( "otherChar", Some(Literal(Mask.MASKED_IGNORE, StringType))) val functionSignature: FunctionSignature = FunctionSignature(Seq( diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala index 2a5b7ecae4472..4a2b9eae98100 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala @@ -65,13 +65,13 @@ trait FunctionBuilderBase[T] { expectedSignature: FunctionSignature, providedArguments: Seq[Expression], functionName: String) : Seq[Expression] = { - NamedArgumentsSupport.defaultRearrange(expectedSignature, providedArguments, functionName) + NamedParametersSupport.defaultRearrange(expectedSignature, providedArguments, functionName) } def build(funcName: String, expressions: Seq[Expression]): T } -object NamedArgumentsSupport { +object NamedParametersSupport { /** * This method is the default routine which rearranges the arguments in positional order according * to the function signature provided. This will also fill in any default values that exists for @@ -87,14 +87,14 @@ object NamedArgumentsSupport { functionSignature: FunctionSignature, args: Seq[Expression], functionName: String): Seq[Expression] = { - val parameters: Seq[NamedArgument] = functionSignature.parameters + val parameters: Seq[InputParameter] = functionSignature.parameters if (parameters.dropWhile(_.default.isEmpty).exists(_.default.isEmpty)) { throw QueryCompilationErrors.unexpectedRequiredParameterInFunctionSignature( functionName, functionSignature) } val (positionalArgs, namedArgs) = args.span(!_.isInstanceOf[NamedArgumentExpression]) - val namedParameters: Seq[NamedArgument] = parameters.drop(positionalArgs.size) + val namedParameters: Seq[InputParameter] = parameters.drop(positionalArgs.size) // The following loop checks for the following: // 1. Unrecognized parameter names @@ -165,7 +165,7 @@ object NamedArgumentsSupport { * @param default The default value of the argument. If the default is none, then that means the * argument is required. If no argument is provided, an exception is thrown. */ -case class NamedArgument(name: String, default: Option[Expression] = None) +case class InputParameter(name: String, default: Option[Expression] = None) /** * Represents a method signature and the list of arguments it receives as input. @@ -174,4 +174,4 @@ case class NamedArgument(name: String, default: Option[Expression] = None) * * @param parameters The list of arguments which the function takes */ -case class FunctionSignature(parameters: Seq[NamedArgument]) +case class FunctionSignature(parameters: Seq[InputParameter]) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedArgumentFunctionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedParameterFunctionSuite.scala similarity index 93% rename from sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedArgumentFunctionSuite.scala rename to sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedParameterFunctionSuite.scala index fd27512a2eda6..dd5cb5e7d03c8 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedArgumentFunctionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/NamedParameterFunctionSuite.scala @@ -20,7 +20,7 @@ import org.apache.spark.SparkThrowable import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.{Expression, Literal, NamedArgumentExpression} import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode} -import org.apache.spark.sql.catalyst.plans.logical.{FunctionBuilderBase, FunctionSignature, NamedArgument, NamedArgumentsSupport} +import org.apache.spark.sql.catalyst.plans.logical.{FunctionBuilderBase, FunctionSignature, InputParameter, NamedParametersSupport} import org.apache.spark.sql.catalyst.util.TypeUtils.toSQLId import org.apache.spark.sql.types.DataType @@ -41,10 +41,10 @@ case class DummyExpression( object DummyExpressionBuilder extends ExpressionBuilder { def defaultFunctionSignature: FunctionSignature = { - FunctionSignature(Seq(NamedArgument("k1"), - NamedArgument("k2"), - NamedArgument("k3"), - NamedArgument("k4"))) + FunctionSignature(Seq(InputParameter("k1"), + InputParameter("k2"), + InputParameter("k3"), + InputParameter("k4"))) } override def functionSignature: Option[FunctionSignature] = @@ -64,10 +64,10 @@ class NamedArgumentFunctionSuite extends AnalysisTest { final val expectedSeq = Seq(Literal("v1"), Literal("v2"), Literal("v3"), Literal("v4")) final val signature = DummyExpressionBuilder.defaultFunctionSignature final val illegalSignature = FunctionSignature(Seq( - NamedArgument("k1"), NamedArgument("k2", Option(Literal("v2"))), NamedArgument("k3"))) + InputParameter("k1"), InputParameter("k2", Option(Literal("v2"))), InputParameter("k3"))) test("Check rearrangement of expressions") { - val rearrangedArgs = NamedArgumentsSupport.defaultRearrange( + val rearrangedArgs = NamedParametersSupport.defaultRearrange( signature, args, "function") for ((returnedArg, expectedArg) <- rearrangedArgs.zip(expectedSeq)) { assert(returnedArg == expectedArg) @@ -83,7 +83,7 @@ class NamedArgumentFunctionSuite extends AnalysisTest { expressions: Seq[Expression], functionName: String = "function"): SparkThrowable = { intercept[SparkThrowable]( - NamedArgumentsSupport.defaultRearrange(functionSignature, expressions, functionName)) + NamedParametersSupport.defaultRearrange(functionSignature, expressions, functionName)) } private def parseExternalException[T <: FunctionBuilderBase[_]](