Skip to content

Commit 3dfdcf4

Browse files
MaxGekkHyukjinKwon
authored andcommitted
[SPARK-33788][SQL] Throw NoSuchPartitionsException from HiveExternalCatalog.dropPartitions()
### What changes were proposed in this pull request? Throw `NoSuchPartitionsException` from `ALTER TABLE .. DROP TABLE` for not existing partitions of a table in V1 Hive external catalog. ### Why are the changes needed? The behaviour of Hive external catalog deviates from V1/V2 in-memory catalogs that throw `NoSuchPartitionsException`. To improve user experience with Spark SQL, it would be better to throw the same exception. ### Does this PR introduce _any_ user-facing change? Yes, the command throws `NoSuchPartitionsException` instead of the general exception `AnalysisException`. ### How was this patch tested? By running tests for `ALTER TABLE .. DROP PARTITION`: ``` $ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly *AlterTableDropPartitionSuite" ``` Closes #30778 from MaxGekk/hive-drop-partition-exception. Authored-by: Max Gekk <max.gekk@gmail.com> Signed-off-by: HyukjinKwon <gurwls223@apache.org>
1 parent 87c5836 commit 3dfdcf4

File tree

5 files changed

+20
-59
lines changed

5 files changed

+20
-59
lines changed

sql/core/src/test/scala/org/apache/spark/sql/execution/command/AlterTableDropPartitionSuiteBase.scala

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@ import org.scalactic.source.Position
2121
import org.scalatest.Tag
2222

2323
import org.apache.spark.sql.{AnalysisException, QueryTest, Row}
24+
import org.apache.spark.sql.catalyst.analysis.NoSuchPartitionsException
2425
import org.apache.spark.sql.execution.datasources.PartitioningUtils
2526
import org.apache.spark.sql.internal.SQLConf
2627
import org.apache.spark.sql.test.SQLTestUtils
@@ -146,4 +147,20 @@ trait AlterTableDropPartitionSuiteBase extends QueryTest with SQLTestUtils {
146147
assert(errMsg.contains(notFullPartitionSpecErr))
147148
}
148149
}
150+
151+
test("partition not exists") {
152+
withNsTable("ns", "tbl") { t =>
153+
sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing PARTITIONED BY (id)")
154+
sql(s"ALTER TABLE $t ADD PARTITION (id=1) LOCATION 'loc'")
155+
156+
val errMsg = intercept[NoSuchPartitionsException] {
157+
sql(s"ALTER TABLE $t DROP PARTITION (id=1), PARTITION (id=2)")
158+
}.getMessage
159+
assert(errMsg.contains("partitions not found in table"))
160+
161+
checkPartitions(t, Map("id" -> "1"))
162+
sql(s"ALTER TABLE $t DROP IF EXISTS PARTITION (id=1), PARTITION (id=2)")
163+
checkPartitions(t)
164+
}
165+
}
149166
}

sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/AlterTableDropPartitionSuite.scala

Lines changed: 1 addition & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.sql.execution.command.v1
1919

20-
import org.apache.spark.sql.catalyst.analysis.NoSuchPartitionsException
2120
import org.apache.spark.sql.connector.catalog.CatalogManager
2221
import org.apache.spark.sql.execution.command
2322
import org.apache.spark.sql.test.SharedSparkSession
@@ -32,21 +31,4 @@ trait AlterTableDropPartitionSuiteBase extends command.AlterTableDropPartitionSu
3231

3332
class AlterTableDropPartitionSuite
3433
extends AlterTableDropPartitionSuiteBase
35-
with SharedSparkSession {
36-
37-
test("partition not exists") {
38-
withNsTable("ns", "tbl") { t =>
39-
sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing PARTITIONED BY (id)")
40-
sql(s"ALTER TABLE $t ADD PARTITION (id=1) LOCATION 'loc'")
41-
42-
val errMsg = intercept[NoSuchPartitionsException] {
43-
sql(s"ALTER TABLE $t DROP PARTITION (id=1), PARTITION (id=2)")
44-
}.getMessage
45-
assert(errMsg.contains("partitions not found in table"))
46-
47-
checkPartitions(t, Map("id" -> "1"))
48-
sql(s"ALTER TABLE $t DROP IF EXISTS PARTITION (id=1), PARTITION (id=2)")
49-
checkPartitions(t)
50-
}
51-
}
52-
}
34+
with SharedSparkSession

sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableDropPartitionSuite.scala

Lines changed: 0 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ package org.apache.spark.sql.execution.command.v2
1919

2020
import org.apache.spark.SparkConf
2121
import org.apache.spark.sql.AnalysisException
22-
import org.apache.spark.sql.catalyst.analysis.NoSuchPartitionsException
2322
import org.apache.spark.sql.connector.{InMemoryPartitionTableCatalog, InMemoryTableCatalog}
2423
import org.apache.spark.sql.execution.command
2524
import org.apache.spark.sql.test.SharedSparkSession
@@ -38,22 +37,6 @@ class AlterTableDropPartitionSuite
3837
.set(s"spark.sql.catalog.$catalog", classOf[InMemoryPartitionTableCatalog].getName)
3938
.set(s"spark.sql.catalog.non_part_$catalog", classOf[InMemoryTableCatalog].getName)
4039

41-
test("partition not exists") {
42-
withNsTable("ns", "tbl") { t =>
43-
sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing PARTITIONED BY (id)")
44-
sql(s"ALTER TABLE $t ADD PARTITION (id=1) LOCATION 'loc'")
45-
46-
val errMsg = intercept[NoSuchPartitionsException] {
47-
sql(s"ALTER TABLE $t DROP PARTITION (id=1), PARTITION (id=2)")
48-
}.getMessage
49-
assert(errMsg.contains("partitions not found in table"))
50-
51-
checkPartitions(t, Map("id" -> "1"))
52-
sql(s"ALTER TABLE $t DROP IF EXISTS PARTITION (id=1), PARTITION (id=2)")
53-
checkPartitions(t)
54-
}
55-
}
56-
5740
test("SPARK-33650: drop partition into a table which doesn't support partition management") {
5841
withNsTable("ns", "tbl", s"non_part_$catalog") { t =>
5942
sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing")

sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ import org.apache.spark.internal.Logging
4949
import org.apache.spark.metrics.source.HiveCatalogMetrics
5050
import org.apache.spark.sql.AnalysisException
5151
import org.apache.spark.sql.catalyst.TableIdentifier
52-
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException, PartitionsAlreadyExistException}
52+
import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException, NoSuchPartitionsException, PartitionsAlreadyExistException}
5353
import org.apache.spark.sql.catalyst.catalog._
5454
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
5555
import org.apache.spark.sql.catalyst.expressions.Expression
@@ -630,9 +630,7 @@ private[hive] class HiveClientImpl(
630630
// (b='1', c='1') and (b='1', c='2'), a partial spec of (b='1') will match both.
631631
val parts = client.getPartitions(hiveTable, s.asJava).asScala
632632
if (parts.isEmpty && !ignoreIfNotExists) {
633-
throw new AnalysisException(
634-
s"No partition is dropped. One partition spec '$s' does not exist in table '$table' " +
635-
s"database '$db'")
633+
throw new NoSuchPartitionsException(db, table, Seq(s))
636634
}
637635
parts.map(_.getValues)
638636
}.distinct

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/AlterTableDropPartitionSuite.scala

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.sql.hive.execution.command
1919

20-
import org.apache.spark.sql.AnalysisException
2120
import org.apache.spark.sql.execution.command.v1
2221
import org.apache.spark.sql.hive.test.TestHiveSingleton
2322

@@ -27,22 +26,4 @@ class AlterTableDropPartitionSuite
2726

2827
override def version: String = "Hive V1"
2928
override def defaultUsing: String = "USING HIVE"
30-
31-
override protected val notFullPartitionSpecErr = "No partition is dropped"
32-
33-
test("partition not exists") {
34-
withNsTable("ns", "tbl") { t =>
35-
sql(s"CREATE TABLE $t (id bigint, data string) $defaultUsing PARTITIONED BY (id)")
36-
sql(s"ALTER TABLE $t ADD PARTITION (id=1) LOCATION 'loc'")
37-
38-
val errMsg = intercept[AnalysisException] {
39-
sql(s"ALTER TABLE $t DROP PARTITION (id=1), PARTITION (id=2)")
40-
}.getMessage
41-
assert(errMsg.contains("No partition is dropped"))
42-
43-
checkPartitions(t, Map("id" -> "1"))
44-
sql(s"ALTER TABLE $t DROP IF EXISTS PARTITION (id=1), PARTITION (id=2)")
45-
checkPartitions(t)
46-
}
47-
}
4829
}

0 commit comments

Comments
 (0)