diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala index c10e0bbfde21..e3896c598eac 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/InMemoryCatalog.scala @@ -139,11 +139,8 @@ class InMemoryCatalog( if (catalog.contains(db)) { if (!cascade) { // If cascade is false, make sure the database is empty. - if (catalog(db).tables.nonEmpty) { - throw QueryCompilationErrors.databaseNotEmptyError(db, "tables") - } - if (catalog(db).functions.nonEmpty) { - throw QueryCompilationErrors.databaseNotEmptyError(db, "functions") + if (catalog(db).tables.nonEmpty || catalog(db).functions.nonEmpty) { + throw QueryCompilationErrors.cannotDropNonemptyDatabaseError(db) } } // Remove the database. diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 79de8b6a1d95..c948b70940f9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -540,8 +540,14 @@ object QueryCompilationErrors { s"rename temporary view from '$oldName' to '$newName': destination view already exists") } - def databaseNotEmptyError(db: String, details: String): Throwable = { - new AnalysisException(s"Database $db is not empty. One or more $details exist.") + def cannotDropNonemptyDatabaseError(db: String): Throwable = { + new AnalysisException(s"Cannot drop a non-empty database: $db. " + + "Use CASCADE option to drop a non-empty database.") + } + + def cannotDropNonemptyNamespaceError(namespace: Seq[String]): Throwable = { + new AnalysisException(s"Cannot drop a non-empty namespace: ${namespace.quoted}. " + + "Use CASCADE option to drop a non-empty namespace.") } def invalidNameForTableOrDatabaseError(name: String): Throwable = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 6f0ed23e1022..31d17fce09b2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -610,12 +610,6 @@ object QueryExecutionErrors { "Schema of v1 relation: " + v1Schema) } - def cannotDropNonemptyNamespaceError(namespace: Seq[String]): Throwable = { - new SparkException( - s"Cannot drop a non-empty namespace: ${namespace.quoted}. " + - "Use CASCADE option to drop a non-empty namespace.") - } - def noRecordsFromEmptyDataReaderError(): Throwable = { new IOException("No records should be returned from EmptyDataReader") } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DropNamespaceExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DropNamespaceExec.scala index dbd5cbd87494..9a9d8e1d4d57 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DropNamespaceExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DropNamespaceExec.scala @@ -20,7 +20,7 @@ package org.apache.spark.sql.execution.datasources.v2 import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.connector.catalog.CatalogPlugin -import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors} +import org.apache.spark.sql.errors.QueryCompilationErrors /** * Physical plan node for dropping a namespace. @@ -42,12 +42,12 @@ case class DropNamespaceExec( if (!cascade) { if (catalog.asTableCatalog.listTables(ns).nonEmpty || nsCatalog.listNamespaces(ns).nonEmpty) { - throw QueryExecutionErrors.cannotDropNonemptyNamespaceError(namespace) + throw QueryCompilationErrors.cannotDropNonemptyNamespaceError(namespace) } } if (!nsCatalog.dropNamespace(ns)) { - throw QueryExecutionErrors.cannotDropNonemptyNamespaceError(namespace) + throw QueryCompilationErrors.cannotDropNonemptyNamespaceError(namespace) } } else if (!ifExists) { throw QueryCompilationErrors.noSuchNamespaceError(ns) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DropNamespaceSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DropNamespaceSuiteBase.scala index 1ff4e1bac049..376f376c32d5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DropNamespaceSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DropNamespaceSuiteBase.scala @@ -36,7 +36,7 @@ trait DropNamespaceSuiteBase extends QueryTest with DDLCommandTestUtils { protected def builtinTopNamespaces: Seq[String] = Seq.empty protected def isCasePreserving: Boolean = true - protected def assertDropFails(): Unit + protected def namespaceAlias: String = "namespace" protected def checkNamespace(expected: Seq[String]) = { val df = spark.sql(s"SHOW NAMESPACES IN $catalog") @@ -72,7 +72,10 @@ trait DropNamespaceSuiteBase extends QueryTest with DDLCommandTestUtils { checkNamespace(Seq("ns") ++ builtinTopNamespaces) // $catalog.ns.table is present, thus $catalog.ns cannot be dropped. - assertDropFails() + val e = intercept[AnalysisException] { + sql(s"DROP NAMESPACE $catalog.ns") + } + assert(e.getMessage.contains(s"Cannot drop a non-empty $namespaceAlias: ns")) sql(s"DROP TABLE $catalog.ns.table") // Now that $catalog.ns is empty, it can be dropped. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/DropNamespaceSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/DropNamespaceSuite.scala index 2db4423f1be3..24e51317575d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/DropNamespaceSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/DropNamespaceSuite.scala @@ -31,12 +31,7 @@ import org.apache.spark.sql.execution.command trait DropNamespaceSuiteBase extends command.DropNamespaceSuiteBase { override protected def builtinTopNamespaces: Seq[String] = Seq("default") - override protected def assertDropFails(): Unit = { - val e = intercept[AnalysisException] { - sql(s"DROP NAMESPACE $catalog.ns") - } - assert(e.getMessage.contains("Database ns is not empty. One or more tables exist")) - } + override protected def namespaceAlias(): String = "database" test("drop default namespace") { val message = intercept[AnalysisException] { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DropNamespaceSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DropNamespaceSuite.scala index ddc913e0d80b..6f82ee5827ed 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DropNamespaceSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DropNamespaceSuite.scala @@ -17,18 +17,9 @@ package org.apache.spark.sql.execution.command.v2 -import org.apache.spark.SparkException import org.apache.spark.sql.execution.command /** * The class contains tests for the `DROP NAMESPACE` command to check V2 table catalogs. */ -class DropNamespaceSuite extends command.DropNamespaceSuiteBase with CommandSuiteBase { - // TODO: Unify the error that throws from v1 and v2 test suite into `AnalysisException` - override protected def assertDropFails(): Unit = { - val e = intercept[SparkException] { - sql(s"DROP NAMESPACE $catalog.ns") - } - assert(e.getMessage.contains("Cannot drop a non-empty namespace: ns")) - } -} +class DropNamespaceSuite extends command.DropNamespaceSuiteBase with CommandSuiteBase diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala index 5fccce2678f8..52b21598edc0 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala @@ -41,6 +41,7 @@ import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.ExternalCatalogUtils._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, CharVarcharUtils} +import org.apache.spark.sql.errors.QueryCompilationErrors import org.apache.spark.sql.execution.command.DDLUtils import org.apache.spark.sql.execution.datasources.{PartitioningUtils, SourceOptions} import org.apache.spark.sql.hive.client.HiveClient @@ -196,7 +197,15 @@ private[spark] class HiveExternalCatalog(conf: SparkConf, hadoopConf: Configurat db: String, ignoreIfNotExists: Boolean, cascade: Boolean): Unit = withClient { - client.dropDatabase(db, ignoreIfNotExists, cascade) + try { + client.dropDatabase(db, ignoreIfNotExists, cascade) + } catch { + case NonFatal(exception) => + if (exception.getClass.getName.equals("org.apache.hadoop.hive.ql.metadata.HiveException") + && exception.getMessage.contains(s"Database $db is not empty.")) { + throw QueryCompilationErrors.cannotDropNonemptyDatabaseError(db) + } else throw exception + } } /** diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 3bf3ee58082e..014feb33df5e 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -1235,7 +1235,7 @@ class HiveDDLSuite if (tableExists && !cascade) { assertAnalysisError( sqlDropDatabase, - s"Database $dbName is not empty. One or more tables exist.") + s"Cannot drop a non-empty database: $dbName.") // the database directory was not removed assert(fs.exists(new Path(expectedDBLocation))) } else {