diff --git a/common/utils/src/main/resources/error/error-classes.json b/common/utils/src/main/resources/error/error-classes.json index 6c953174865f0..ac3de519bff7f 100644 --- a/common/utils/src/main/resources/error/error-classes.json +++ b/common/utils/src/main/resources/error/error-classes.json @@ -5099,11 +5099,6 @@ "Unrecognized Parquet type: ." ] }, - "_LEGACY_ERROR_TEMP_1175" : { - "message" : [ - "Unsupported data type ." - ] - }, "_LEGACY_ERROR_TEMP_1181" : { "message" : [ "Stream-stream join without equality predicate is not supported." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 53338f38ed6d2..e96474862b1d7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -1908,8 +1908,9 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase with Compilat def cannotConvertDataTypeToParquetTypeError(field: StructField): Throwable = { new AnalysisException( - errorClass = "_LEGACY_ERROR_TEMP_1175", - messageParameters = Map("dataType" -> field.dataType.catalogString)) + errorClass = "INTERNAL_ERROR", + messageParameters = Map("message" -> + s"Cannot convert Spark data type ${toSQLType(field.dataType)} to any Parquet type.")) } def incompatibleViewSchemaChangeError( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala index e22399c326f60..d4e4a41155eaf 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala @@ -24,6 +24,7 @@ import org.apache.spark.sql._ import org.apache.spark.sql.api.java.{UDF1, UDF2, UDF23Test} import org.apache.spark.sql.catalyst.expressions.{Coalesce, Literal, UnsafeRow} import org.apache.spark.sql.catalyst.parser.ParseException +import org.apache.spark.sql.execution.datasources.parquet.SparkToParquetSchemaConverter import org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog import org.apache.spark.sql.expressions.SparkUserDefinedFunction import org.apache.spark.sql.functions._ @@ -962,6 +963,24 @@ class QueryCompilationErrorsSuite "methodName" -> "update", "className" -> "org.apache.spark.sql.catalyst.expressions.UnsafeRow")) } + + test("INTERNAL_ERROR: Convert unsupported data type from Spark to Parquet") { + val converter = new SparkToParquetSchemaConverter + val dummyDataType = new DataType { + override def defaultSize: Int = 0 + + override def simpleString: String = "Dummy" + + override private[spark] def asNullable = NullType + } + checkError( + exception = intercept[AnalysisException] { + converter.convertField(StructField("test", dummyDataType)) + }, + errorClass = "INTERNAL_ERROR", + parameters = Map("message" -> "Cannot convert Spark data type \"DUMMY\" to any Parquet type.") + ) + } } class MyCastToString extends SparkUserDefinedFunction(