diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 9c494c043796b..504a6e5be44f3 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -949,6 +949,11 @@ "Literal for '' of ." ] }, + "MULTIPLE_BUCKET_TRANSFORMS" : { + "message" : [ + "Multiple bucket TRANSFORMs." + ] + }, "NATURAL_CROSS_JOIN" : { "message" : [ "NATURAL CROSS JOIN." diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala index 91809b6176c8a..d9f15d84d8932 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala @@ -60,7 +60,9 @@ private[sql] object CatalogV2Implicits { identityCols += col case BucketTransform(numBuckets, col, sortCol) => - if (bucketSpec.nonEmpty) throw QueryExecutionErrors.multipleBucketTransformsError + if (bucketSpec.nonEmpty) { + throw QueryExecutionErrors.unsupportedMultipleBucketTransformsError + } if (sortCol.isEmpty) { bucketSpec = Some(BucketSpec(numBuckets, col.map(_.fieldNames.mkString(".")), Nil)) } else { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 7e870e23fba08..ba78858debc01 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2623,9 +2623,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { "format" -> format)) } - def multipleBucketTransformsError(): SparkUnsupportedOperationException = { + def unsupportedMultipleBucketTransformsError(): SparkUnsupportedOperationException = { new SparkUnsupportedOperationException( - errorClass = "_LEGACY_ERROR_TEMP_2279", + errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORMS", messageParameters = Map.empty) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala index 6276b1a3b60f8..3b2271afc862e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala @@ -24,6 +24,7 @@ import org.mockito.ArgumentMatchers.any import org.mockito.Mockito.{mock, when} import org.mockito.invocation.InvocationOnMock +import org.apache.spark.SparkUnsupportedOperationException import org.apache.spark.sql.{AnalysisException, SaveMode} import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier} import org.apache.spark.sql.catalyst.analysis.{AnalysisContext, AnalysisTest, Analyzer, EmptyFunctionRegistry, NoSuchTableException, ResolvedFieldName, ResolvedIdentifier, ResolvedTable, ResolveSessionCatalog, UnresolvedAttribute, UnresolvedInlineTable, UnresolvedRelation, UnresolvedSubqueryColumnAliases, UnresolvedTable} @@ -292,13 +293,12 @@ class PlanResolutionSuite extends AnalysisTest { |CREATE TABLE my_tab(a INT, b STRING) USING parquet |PARTITIONED BY ($transform) """.stripMargin - - val ae = intercept[UnsupportedOperationException] { - parseAndResolve(query) - } - - assert(ae.getMessage - .contains(s"Unsupported partition transform: $transform")) + checkError( + exception = intercept[SparkUnsupportedOperationException] { + parseAndResolve(query) + }, + errorClass = "_LEGACY_ERROR_TEMP_2067", + parameters = Map("transform" -> transform)) } } @@ -310,13 +310,12 @@ class PlanResolutionSuite extends AnalysisTest { |CREATE TABLE my_tab(a INT, b STRING, c String) USING parquet |PARTITIONED BY ($transform) """.stripMargin - - val ae = intercept[UnsupportedOperationException] { - parseAndResolve(query) - } - - assert(ae.getMessage - .contains("Multiple bucket transforms are not supported.")) + checkError( + exception = intercept[SparkUnsupportedOperationException] { + parseAndResolve(query) + }, + errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORMS", + parameters = Map.empty) } }