Skip to content
5 changes: 5 additions & 0 deletions core/src/main/resources/error/error-classes.json
Original file line number Diff line number Diff line change
Expand Up @@ -949,6 +949,11 @@
"Literal for '<value>' of <type>."
]
},
"MULTIPLE_BUCKET_TRANSFORMS" : {
"message" : [
"Multiple bucket TRANSFORMs."
]
},
"NATURAL_CROSS_JOIN" : {
"message" : [
"NATURAL CROSS JOIN."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,9 @@ private[sql] object CatalogV2Implicits {
identityCols += col

case BucketTransform(numBuckets, col, sortCol) =>
if (bucketSpec.nonEmpty) throw QueryExecutionErrors.multipleBucketTransformsError
if (bucketSpec.nonEmpty) {
throw QueryExecutionErrors.unsupportedMultipleBucketTransformsError
}
if (sortCol.isEmpty) {
bucketSpec = Some(BucketSpec(numBuckets, col.map(_.fieldNames.mkString(".")), Nil))
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2623,9 +2623,9 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
"format" -> format))
}

def multipleBucketTransformsError(): SparkUnsupportedOperationException = {
def unsupportedMultipleBucketTransformsError(): SparkUnsupportedOperationException = {
new SparkUnsupportedOperationException(
errorClass = "_LEGACY_ERROR_TEMP_2279",
errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORMS",
messageParameters = Map.empty)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import org.mockito.ArgumentMatchers.any
import org.mockito.Mockito.{mock, when}
import org.mockito.invocation.InvocationOnMock

import org.apache.spark.SparkUnsupportedOperationException
import org.apache.spark.sql.{AnalysisException, SaveMode}
import org.apache.spark.sql.catalyst.{AliasIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.{AnalysisContext, AnalysisTest, Analyzer, EmptyFunctionRegistry, NoSuchTableException, ResolvedFieldName, ResolvedIdentifier, ResolvedTable, ResolveSessionCatalog, UnresolvedAttribute, UnresolvedInlineTable, UnresolvedRelation, UnresolvedSubqueryColumnAliases, UnresolvedTable}
Expand Down Expand Up @@ -292,13 +293,12 @@ class PlanResolutionSuite extends AnalysisTest {
|CREATE TABLE my_tab(a INT, b STRING) USING parquet
|PARTITIONED BY ($transform)
""".stripMargin

val ae = intercept[UnsupportedOperationException] {
parseAndResolve(query)
}

assert(ae.getMessage
.contains(s"Unsupported partition transform: $transform"))
checkError(
exception = intercept[SparkUnsupportedOperationException] {
parseAndResolve(query)
},
errorClass = "_LEGACY_ERROR_TEMP_2067",
parameters = Map("transform" -> transform))
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

image

Copy link
Contributor Author

@panbingkun panbingkun Oct 25, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In fact, it throws SparkUnsupportedOperationException.

}
}

Expand All @@ -310,13 +310,12 @@ class PlanResolutionSuite extends AnalysisTest {
|CREATE TABLE my_tab(a INT, b STRING, c String) USING parquet
|PARTITIONED BY ($transform)
""".stripMargin

val ae = intercept[UnsupportedOperationException] {
parseAndResolve(query)
}

assert(ae.getMessage
.contains("Multiple bucket transforms are not supported."))
checkError(
exception = intercept[SparkUnsupportedOperationException] {
parseAndResolve(query)
},
errorClass = "UNSUPPORTED_FEATURE.MULTIPLE_BUCKET_TRANSFORMS",
parameters = Map.empty)
}
}

Expand Down