diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala index 7e7429acf695..32b3e279a33a 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala @@ -49,7 +49,7 @@ import org.apache.spark.internal.Logging import org.apache.spark.metrics.source.HiveCatalogMetrics import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException, PartitionsAlreadyExistException} +import org.apache.spark.sql.catalyst.analysis.{NoSuchDatabaseException, NoSuchPartitionException, NoSuchPartitionsException, PartitionsAlreadyExistException} import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.Expression @@ -630,9 +630,7 @@ private[hive] class HiveClientImpl( // (b='1', c='1') and (b='1', c='2'), a partial spec of (b='1') will match both. val parts = client.getPartitions(hiveTable, s.asJava).asScala if (parts.isEmpty && !ignoreIfNotExists) { - throw new AnalysisException( - s"No partition is dropped. One partition spec '$s' does not exist in table '$table' " + - s"database '$db'") + throw new NoSuchPartitionsException(db, table, Seq(s)) } parts.map(_.getValues) }.distinct diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index afe1df68e676..6d45ad890921 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -28,7 +28,7 @@ import org.scalatest.BeforeAndAfterEach import org.apache.spark.SparkException import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode} import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.{NoSuchPartitionException, PartitionsAlreadyExistException, TableAlreadyExistsException} +import org.apache.spark.sql.catalyst.analysis.{NoSuchPartitionException, NoSuchPartitionsException, PartitionsAlreadyExistException, TableAlreadyExistsException} import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.connector.FakeV2Provider @@ -2886,4 +2886,20 @@ class HiveDDLSuite checkAnswer(sql("SHOW PARTITIONS t"), Seq(Row("id=1"), Row("id=2"))) } } + + test("SPARK-33788: partition not exists") { + withTable("t") { + sql(s"CREATE TABLE t (data string) PARTITIONED BY (id bigint)") + sql(s"ALTER TABLE t ADD PARTITION (id=1)") + + val errMsg = intercept[NoSuchPartitionsException] { + sql(s"ALTER TABLE t DROP PARTITION (id=1), PARTITION (id=2)") + }.getMessage + assert(errMsg.contains("partitions not found in table")) + + checkAnswer(sql("SHOW PARTITIONS t"), Seq(Row("id=1"))) + sql(s"ALTER TABLE t DROP IF EXISTS PARTITION (id=1), PARTITION (id=2)") + checkAnswer(sql("SHOW PARTITIONS t"), Seq.empty) + } + } }