From 568783bab6e4d720a6d625dba857063cf043bea4 Mon Sep 17 00:00:00 2001 From: panbingkun Date: Tue, 25 Oct 2022 19:18:38 +0800 Subject: [PATCH 1/5] [SPARK-40910][SQL] Replace UnsupportedOperationException with SparkUnsupportedOperationException --- .../main/resources/error/error-classes.json | 5 ++++ .../sql/errors/QueryExecutionErrors.scala | 4 ++- .../command/PlanResolutionSuite.scala | 27 +++++++++---------- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 6f5b3b5a1347..8acaafcef33c 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -4312,5 +4312,10 @@ "message" : [ "Not enough memory to build and broadcast the table to all worker nodes. As a workaround, you can either disable broadcast by setting to -1 or increase the spark driver memory by setting to a higher value" ] + }, + "_LEGACY_ERROR_TEMP_2251" : { + "message" : [ + "Multiple bucket transforms are not supported." + ] } } \ No newline at end of file diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 4aedfb3b03da..97f110cef1aa 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2614,7 +2614,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { } def multipleBucketTransformsError(): Throwable = { - new UnsupportedOperationException("Multiple bucket transforms are not supported.") + new SparkUnsupportedOperationException( + errorClass = "_LEGACY_ERROR_TEMP_2251", + messageParameters = Map.empty) } def unsupportedCreateNamespaceCommentError(): Throwable = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala index 6276b1a3b60f..6243d963ae44 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala @@ -24,6 +24,7 @@ import org.mockito.ArgumentMatchers.any import org.mockito.Mockito.{mock, when} import org.mockito.invocation.InvocationOnMock +import org.apache.spark.SparkUnsupportedOperationException import org.apache.spark.sql.{AnalysisException, SaveMode} import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier} import org.apache.spark.sql.catalyst.analysis.{AnalysisContext, AnalysisTest, Analyzer, EmptyFunctionRegistry, NoSuchTableException, ResolvedFieldName, ResolvedIdentifier, ResolvedTable, ResolveSessionCatalog, UnresolvedAttribute, UnresolvedInlineTable, UnresolvedRelation, UnresolvedSubqueryColumnAliases, UnresolvedTable} @@ -292,13 +293,12 @@ class PlanResolutionSuite extends AnalysisTest { |CREATE TABLE my_tab(a INT, b STRING) USING parquet |PARTITIONED BY ($transform) """.stripMargin - - val ae = intercept[UnsupportedOperationException] { - parseAndResolve(query) - } - - assert(ae.getMessage - .contains(s"Unsupported partition transform: $transform")) + checkError( + exception = intercept[SparkUnsupportedOperationException] { + parseAndResolve(query) + }, + errorClass = "_LEGACY_ERROR_TEMP_2067", + parameters = Map("transform" -> transform)) } } @@ -310,13 +310,12 @@ class PlanResolutionSuite extends AnalysisTest { |CREATE TABLE my_tab(a INT, b STRING, c String) USING parquet |PARTITIONED BY ($transform) """.stripMargin - - val ae = intercept[UnsupportedOperationException] { - parseAndResolve(query) - } - - assert(ae.getMessage - .contains("Multiple bucket transforms are not supported.")) + checkError( + exception = intercept[SparkUnsupportedOperationException] { + parseAndResolve(query) + }, + errorClass = "_LEGACY_ERROR_TEMP_2251", + parameters = Map.empty) } } From 5b4a3a0421a64d3b4757274e41f7f7201afc3754 Mon Sep 17 00:00:00 2001 From: panbingkun Date: Wed, 26 Oct 2022 20:20:27 +0800 Subject: [PATCH 2/5] [SPARK-40910][SQL] Replace UnsupportedOperationException with SparkUnsupportedOperationException --- core/src/main/resources/error/error-classes.json | 10 +++++----- .../sql/connector/catalog/CatalogV2Implicits.scala | 3 ++- .../apache/spark/sql/errors/QueryExecutionErrors.scala | 4 ++-- .../sql/execution/command/PlanResolutionSuite.scala | 2 +- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 8acaafcef33c..579e2e9086f6 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -949,6 +949,11 @@ "Literal for '' of ." ] }, + "MULTIPLE_BUCKET_TRANSFORM" : { + "message" : [ + "TRANSFORM on multiple bucket." + ] + }, "NATURAL_CROSS_JOIN" : { "message" : [ "NATURAL CROSS JOIN." @@ -4312,10 +4317,5 @@ "message" : [ "Not enough memory to build and broadcast the table to all worker nodes. As a workaround, you can either disable broadcast by setting to -1 or increase the spark driver memory by setting to a higher value" ] - }, - "_LEGACY_ERROR_TEMP_2251" : { - "message" : [ - "Multiple bucket transforms are not supported." - ] } } \ No newline at end of file diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala index 91809b6176c8..d53f10d78b64 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala @@ -60,7 +60,8 @@ private[sql] object CatalogV2Implicits { identityCols += col case BucketTransform(numBuckets, col, sortCol) => - if (bucketSpec.nonEmpty) throw QueryExecutionErrors.multipleBucketTransformsError + if (bucketSpec.nonEmpty) + throw QueryExecutionErrors.unsupportedMultipleBucketTransformsError if (sortCol.isEmpty) { bucketSpec = Some(BucketSpec(numBuckets, col.map(_.fieldNames.mkString(".")), Nil)) } else { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 97f110cef1aa..22a370827bf3 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2613,9 +2613,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { s"The input $valueType '$input' does not match the given number format: '$format'") } - def multipleBucketTransformsError(): Throwable = { + def unsupportedMultipleBucketTransformsError(): Throwable = { new SparkUnsupportedOperationException( - errorClass = "_LEGACY_ERROR_TEMP_2251", + errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORM", messageParameters = Map.empty) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala index 6243d963ae44..53faf158c70c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala @@ -314,7 +314,7 @@ class PlanResolutionSuite extends AnalysisTest { exception = intercept[SparkUnsupportedOperationException] { parseAndResolve(query) }, - errorClass = "_LEGACY_ERROR_TEMP_2251", + errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORM", parameters = Map.empty) } } From 7ec8fd84a6bcecb580ed5c30a223adaf2d073bd0 Mon Sep 17 00:00:00 2001 From: panbingkun Date: Wed, 26 Oct 2022 21:36:10 +0800 Subject: [PATCH 3/5] [SPARK-40910][SQL] Replace UnsupportedOperationException with SparkUnsupportedOperationException --- .../spark/sql/connector/catalog/CatalogV2Implicits.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala index d53f10d78b64..d9f15d84d893 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala @@ -60,8 +60,9 @@ private[sql] object CatalogV2Implicits { identityCols += col case BucketTransform(numBuckets, col, sortCol) => - if (bucketSpec.nonEmpty) + if (bucketSpec.nonEmpty) { throw QueryExecutionErrors.unsupportedMultipleBucketTransformsError + } if (sortCol.isEmpty) { bucketSpec = Some(BucketSpec(numBuckets, col.map(_.fieldNames.mkString(".")), Nil)) } else { From cff705061900c499dc46503c91e6af07a3b26a0e Mon Sep 17 00:00:00 2001 From: panbingkun Date: Wed, 26 Oct 2022 21:37:57 +0800 Subject: [PATCH 4/5] [SPARK-40910][SQL] Replace UnsupportedOperationException with SparkUnsupportedOperationException --- .../org/apache/spark/sql/errors/QueryExecutionErrors.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 257af050fa11..c967581e1a86 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2623,7 +2623,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { "format" -> format)) } - def unsupportedMultipleBucketTransformsError(): Throwable = { + def unsupportedMultipleBucketTransformsError(): SparkUnsupportedOperationException = { new SparkUnsupportedOperationException( errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORM", messageParameters = Map.empty) From bda740278b2b7aa55c0d800f7de812cca44c025f Mon Sep 17 00:00:00 2001 From: panbingkun Date: Thu, 27 Oct 2022 16:01:48 +0800 Subject: [PATCH 5/5] [SPARK-40910][SQL] Replace UnsupportedOperationException with SparkUnsupportedOperationException --- core/src/main/resources/error/error-classes.json | 4 ++-- .../org/apache/spark/sql/errors/QueryExecutionErrors.scala | 2 +- .../spark/sql/execution/command/PlanResolutionSuite.scala | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 6991eef01535..504a6e5be44f 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -949,9 +949,9 @@ "Literal for '' of ." ] }, - "MULTIPLE_BUCKET_TRANSFORM" : { + "MULTIPLE_BUCKET_TRANSFORMS" : { "message" : [ - "TRANSFORM on multiple bucket." + "Multiple bucket TRANSFORMs." ] }, "NATURAL_CROSS_JOIN" : { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index c967581e1a86..ba78858debc0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -2625,7 +2625,7 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { def unsupportedMultipleBucketTransformsError(): SparkUnsupportedOperationException = { new SparkUnsupportedOperationException( - errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORM", + errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORMS", messageParameters = Map.empty) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala index 53faf158c70c..3b2271afc862 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PlanResolutionSuite.scala @@ -314,7 +314,7 @@ class PlanResolutionSuite extends AnalysisTest { exception = intercept[SparkUnsupportedOperationException] { parseAndResolve(query) }, - errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORM", + errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORMS", parameters = Map.empty) } }