Skip to content

Commit 9894f9e

Browse files
committed
Use UnresolvedPartitionSpec
1 parent e2488bb commit 9894f9e

File tree

8 files changed

+37
-14
lines changed

8 files changed

+37
-14
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -587,6 +587,9 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog {
587587
case AlterTableDropPartition(ResolvedTable(_, _, table), parts, _, _) =>
588588
checkAlterTablePartition(table, parts)
589589

590+
case AlterTableRenamePartition(ResolvedTable(_, _, table), from, _) =>
591+
checkAlterTablePartition(table, Seq(from))
592+
590593
case showPartitions: ShowPartitions => checkShowPartitions(showPartitions)
591594

592595
case _ => // Falls back to the following checks

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolvePartitionSpec.scala

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.analysis
2020
import org.apache.spark.sql.catalyst.InternalRow
2121
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
2222
import org.apache.spark.sql.catalyst.expressions.{Cast, Literal}
23-
import org.apache.spark.sql.catalyst.plans.logical.{AlterTableAddPartition, AlterTableDropPartition, LogicalPlan, ShowPartitions}
23+
import org.apache.spark.sql.catalyst.plans.logical.{AlterTableAddPartition, AlterTableDropPartition, AlterTableRenamePartition, LogicalPlan, ShowPartitions}
2424
import org.apache.spark.sql.catalyst.rules.Rule
2525
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
2626
import org.apache.spark.sql.connector.catalog.SupportsPartitionManagement
@@ -51,6 +51,15 @@ object ResolvePartitionSpec extends Rule[LogicalPlan] {
5151
partitionSchema,
5252
requireExactMatchedPartitionSpec(table.name, _, partitionSchema.fieldNames)))
5353

54+
case r @ AlterTableRenamePartition(
55+
ResolvedTable(_, _, table: SupportsPartitionManagement), from, _) =>
56+
val partitionSchema = table.partitionSchema()
57+
r.copy(from = resolvePartitionSpecs(
58+
table.name,
59+
Seq(from),
60+
partitionSchema,
61+
requireExactMatchedPartitionSpec(table.name, _, partitionSchema.fieldNames)).head)
62+
5463
case r @ ShowPartitions(ResolvedTable(_, _, table: SupportsPartitionManagement), partSpecs) =>
5564
r.copy(pattern = resolvePartitionSpecs(
5665
table.name,

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3774,7 +3774,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
37743774
UnresolvedTable(
37753775
visitMultipartIdentifier(ctx.multipartIdentifier),
37763776
"ALTER TABLE ... RENAME TO PARTITION"),
3777-
visitNonOptionalPartitionSpec(ctx.from),
3777+
UnresolvedPartitionSpec(visitNonOptionalPartitionSpec(ctx.from)),
37783778
visitNonOptionalPartitionSpec(ctx.to))
37793779
}
37803780

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -678,8 +678,11 @@ case class AlterTableDropPartition(
678678
*/
679679
case class AlterTableRenamePartition(
680680
child: LogicalPlan,
681-
from: TablePartitionSpec,
681+
from: PartitionSpec,
682682
to: TablePartitionSpec) extends Command {
683+
override lazy val resolved: Boolean =
684+
childrenResolved && from.isInstanceOf[ResolvedPartitionSpec]
685+
683686
override def children: Seq[LogicalPlan] = child :: Nil
684687
}
685688

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.parser
2020
import java.util.Locale
2121

2222
import org.apache.spark.sql.AnalysisException
23-
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView, UnresolvedView}
23+
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, GlobalTempView, LocalTempView, PersistedView, UnresolvedAttribute, UnresolvedFunc, UnresolvedNamespace, UnresolvedPartitionSpec, UnresolvedRelation, UnresolvedStar, UnresolvedTable, UnresolvedTableOrView, UnresolvedView}
2424
import org.apache.spark.sql.catalyst.catalog.{ArchiveResource, BucketSpec, FileResource, FunctionResource, JarResource}
2525
import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal}
2626
import org.apache.spark.sql.catalyst.plans.logical._
@@ -2108,7 +2108,7 @@ class DDLParserSuite extends AnalysisTest {
21082108
val parsed1 = parsePlan(sql1)
21092109
val expected1 = AlterTableRenamePartition(
21102110
UnresolvedTable(Seq("table_name"), "ALTER TABLE ... RENAME TO PARTITION"),
2111-
Map("dt" -> "2008-08-08", "country" -> "us"),
2111+
UnresolvedPartitionSpec(Map("dt" -> "2008-08-08", "country" -> "us")),
21122112
Map("dt" -> "2008-09-09", "country" -> "uk"))
21132113
comparePlans(parsed1, expected1)
21142114

@@ -2120,7 +2120,7 @@ class DDLParserSuite extends AnalysisTest {
21202120
val parsed2 = parsePlan(sql2)
21212121
val expected2 = AlterTableRenamePartition(
21222122
UnresolvedTable(Seq("a", "b", "c"), "ALTER TABLE ... RENAME TO PARTITION"),
2123-
Map("ds" -> "2017-06-10"),
2123+
UnresolvedPartitionSpec(Map("ds" -> "2017-06-10")),
21242124
Map("ds" -> "2018-06-10"))
21252125
comparePlans(parsed2, expected2)
21262126
}

sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -445,7 +445,8 @@ class ResolveSessionCatalog(
445445
partSpecsAndLocs.asUnresolvedPartitionSpecs.map(spec => (spec.spec, spec.location)),
446446
ifNotExists)
447447

448-
case AlterTableRenamePartition(ResolvedV1TableIdentifier(ident), from, to) =>
448+
case AlterTableRenamePartition(
449+
ResolvedV1TableIdentifier(ident), UnresolvedPartitionSpec(from, _), to) =>
449450
AlterTableRenamePartitionCommand(
450451
ident.asTableIdentifier,
451452
from,

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -342,7 +342,7 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
342342
AlterTableDropPartitionExec(
343343
table, parts.asResolvedPartitionSpecs, ignoreIfNotExists) :: Nil
344344

345-
case AlterTableRenamePartition(_: ResolvedTable, _, _) =>
345+
case AlterTableRenamePartition(_: ResolvedTable, _: ResolvedPartitionSpec, _) =>
346346
throw new AnalysisException(
347347
"ALTER TABLE ... RENAME TO PARTITION is not supported for v2 tables.")
348348

sql/core/src/test/scala/org/apache/spark/sql/connector/AlterTablePartitionV2SQLSuite.scala

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -33,13 +33,20 @@ class AlterTablePartitionV2SQLSuite extends DatasourceV2SQLBase {
3333
}
3434

3535
test("ALTER TABLE RENAME PARTITION") {
36-
val t = "testcat.ns1.ns2.tbl"
37-
withTable(t) {
38-
spark.sql(s"CREATE TABLE $t (id bigint, data string) USING foo PARTITIONED BY (id)")
39-
val e = intercept[AnalysisException] {
40-
sql(s"ALTER TABLE $t PARTITION (id=1) RENAME TO PARTITION (id=2)")
36+
val nonPartTbl = "testcat.ns1.ns2.tbl"
37+
val partTbl = "testpart.ns1.ns2.tbl"
38+
withTable(nonPartTbl, partTbl) {
39+
spark.sql(s"CREATE TABLE $nonPartTbl (id bigint, data string) USING foo PARTITIONED BY (id)")
40+
val e1 = intercept[AnalysisException] {
41+
sql(s"ALTER TABLE $nonPartTbl PARTITION (id=1) RENAME TO PARTITION (id=2)")
4142
}
42-
assert(e.message.contains(
43+
assert(e1.message.contains(s"Table $nonPartTbl can not alter partitions"))
44+
45+
spark.sql(s"CREATE TABLE $partTbl (id bigint, data string) USING foo PARTITIONED BY (id)")
46+
val e2 = intercept[AnalysisException] {
47+
sql(s"ALTER TABLE $partTbl PARTITION (id=1) RENAME TO PARTITION (id=2)")
48+
}
49+
assert(e2.message.contains(
4350
"ALTER TABLE ... RENAME TO PARTITION is not supported for v2 tables."))
4451
}
4552
}

0 commit comments

Comments
 (0)