diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index d2d145606b627..25aa9008101f6 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -93,8 +93,8 @@ statement (WITH (DBPROPERTIES | PROPERTIES) tablePropertyList))* #createNamespace | ALTER (database | NAMESPACE) multipartIdentifier SET (DBPROPERTIES | PROPERTIES) tablePropertyList #setNamespaceProperties - | ALTER database db=errorCapturingIdentifier - SET locationSpec #setDatabaseLocation + | ALTER (database | NAMESPACE) multipartIdentifier + SET locationSpec #setNamespaceLocation | DROP (database | NAMESPACE) (IF EXISTS)? multipartIdentifier (RESTRICT | CASCADE)? #dropNamespace | SHOW (DATABASES | NAMESPACES) ((FROM | IN) multipartIdentifier)? diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala index 0d8d58520edad..5a4fb2e865903 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala @@ -94,7 +94,11 @@ class ResolveCatalogs(val catalogManager: CatalogManager) s"because view support in catalog has not been implemented yet") case AlterNamespaceSetPropertiesStatement(NonSessionCatalog(catalog, nameParts), properties) => - AlterNamespaceSetProperties(catalog, nameParts, properties) + AlterNamespaceSetProperties(catalog.asNamespaceCatalog, nameParts, properties) + + case AlterNamespaceSetLocationStatement(NonSessionCatalog(catalog, nameParts), location) => + AlterNamespaceSetProperties( + catalog.asNamespaceCatalog, nameParts, Map("location" -> location)) case DescribeTableStatement( nameParts @ NonSessionCatalog(catalog, tableName), partitionSpec, isExtended) => @@ -176,7 +180,7 @@ class ResolveCatalogs(val catalogManager: CatalogManager) DropNamespace(catalog, nameParts, ifExists, cascade) case DescribeNamespaceStatement(NonSessionCatalog(catalog, nameParts), extended) => - DescribeNamespace(catalog, nameParts, extended) + DescribeNamespace(catalog.asNamespaceCatalog, nameParts, extended) case ShowNamespacesStatement(Some(CatalogAndNamespace(catalog, namespace)), pattern) => ShowNamespaces(catalog.asNamespaceCatalog, namespace, pattern) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 6be163b4b3c2a..eedd500488b07 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -2545,6 +2545,22 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging } } + /** + * Create an [[AlterNamespaceSetLocationStatement]] logical plan. + * + * For example: + * {{{ + * ALTER (DATABASE|SCHEMA|NAMESPACE) namespace SET LOCATION path; + * }}} + */ + override def visitSetNamespaceLocation(ctx: SetNamespaceLocationContext): LogicalPlan = { + withOrigin(ctx) { + AlterNamespaceSetLocationStatement( + visitMultipartIdentifier(ctx.multipartIdentifier), + visitLocationSpec(ctx.locationSpec)) + } + } + /** * Create a [[ShowNamespacesStatement]] command. */ diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala index 875ff4e4a2821..7a98cccc3d7a4 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala @@ -357,6 +357,13 @@ case class AlterNamespaceSetPropertiesStatement( namespace: Seq[String], properties: Map[String, String]) extends ParsedStatement +/** + * ALTER (DATABASE|SCHEMA|NAMESPACE) ... SET LOCATION command, as parsed from SQL. + */ +case class AlterNamespaceSetLocationStatement( + namespace: Seq[String], + location: String) extends ParsedStatement + /** * A SHOW NAMESPACES statement, as parsed from SQL. */ diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala index 22edb3619a929..f7f8b2778d234 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala @@ -259,7 +259,7 @@ case class DropNamespace( * The logical plan of the DESCRIBE NAMESPACE command that works for v2 catalogs. */ case class DescribeNamespace( - catalog: CatalogPlugin, + catalog: SupportsNamespaces, namespace: Seq[String], extended: Boolean) extends Command { @@ -275,7 +275,7 @@ case class DescribeNamespace( * command that works for v2 catalogs. */ case class AlterNamespaceSetProperties( - catalog: CatalogPlugin, + catalog: SupportsNamespaces, namespace: Seq[String], properties: Map[String, String]) extends Command diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index e87f53910c2c7..d3ca6f7a8eee2 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -1192,6 +1192,20 @@ class DDLParserSuite extends AnalysisTest { Seq("a", "b", "c"), Map("b" -> "b"))) } + test("set namespace location") { + comparePlans( + parsePlan("ALTER DATABASE a.b.c SET LOCATION '/home/user/db'"), + AlterNamespaceSetLocationStatement(Seq("a", "b", "c"), "/home/user/db")) + + comparePlans( + parsePlan("ALTER SCHEMA a.b.c SET LOCATION '/home/user/db'"), + AlterNamespaceSetLocationStatement(Seq("a", "b", "c"), "/home/user/db")) + + comparePlans( + parsePlan("ALTER NAMESPACE a.b.c SET LOCATION '/home/user/db'"), + AlterNamespaceSetLocationStatement(Seq("a", "b", "c"), "/home/user/db")) + } + test("show databases: basic") { comparePlans( parsePlan("SHOW DATABASES"), diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala index cac320edc47e2..708203119f4bb 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala @@ -172,6 +172,13 @@ class ResolveSessionCatalog( } AlterDatabasePropertiesCommand(nameParts.head, properties) + case AlterNamespaceSetLocationStatement(SessionCatalog(_, nameParts), location) => + if (nameParts.length != 1) { + throw new AnalysisException( + s"The database name is not valid: ${nameParts.quoted}") + } + AlterDatabaseSetLocationCommand(nameParts.head, location) + case DescribeTableStatement( nameParts @ SessionCatalog(catalog, tableName), partitionSpec, isExtended) => loadTable(catalog, tableName.asIdentifier).collect { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index 7fe4c00f5dc8d..8241f850e3aa7 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -227,22 +227,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) { options = Option(ctx.tablePropertyList).map(visitPropertyKeyValues).getOrElse(Map.empty)) } - /** - * Create an [[AlterDatabaseSetLocationCommand]] command. - * - * For example: - * {{{ - * ALTER (DATABASE|SCHEMA) database SET LOCATION path; - * }}} - */ - override def visitSetDatabaseLocation( - ctx: SetDatabaseLocationContext): LogicalPlan = withOrigin(ctx) { - AlterDatabaseSetLocationCommand( - ctx.db.getText, - visitLocationSpec(ctx.locationSpec) - ) - } - /** * Create a plan for a DESCRIBE FUNCTION command. */ diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/AlterNamespaceSetPropertiesExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/AlterNamespaceSetPropertiesExec.scala index fd6922cd1c3ab..1eebe4cdb6a86 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/AlterNamespaceSetPropertiesExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/AlterNamespaceSetPropertiesExec.scala @@ -19,23 +19,20 @@ package org.apache.spark.sql.execution.datasources.v2 import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.expressions.Attribute -import org.apache.spark.sql.connector.catalog.{CatalogPlugin, NamespaceChange} +import org.apache.spark.sql.connector.catalog.{NamespaceChange, SupportsNamespaces} /** * Physical plan node for setting properties of namespace. */ case class AlterNamespaceSetPropertiesExec( - catalog: CatalogPlugin, + catalog: SupportsNamespaces, namespace: Seq[String], - props: Map[String, String]) - extends V2CommandExec { + props: Map[String, String]) extends V2CommandExec { override protected def run(): Seq[InternalRow] = { - import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._ - val changes = props.map{ case (k, v) => NamespaceChange.setProperty(k, v) }.toSeq - catalog.asNamespaceCatalog.alterNamespace(namespace.toArray, changes: _*) + catalog.alterNamespace(namespace.toArray, changes: _*) Seq.empty } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeNamespaceExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeNamespaceExec.scala index 5c20e5ae08383..7c5cfcbbc7e3c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeNamespaceExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeNamespaceExec.scala @@ -23,7 +23,7 @@ import scala.collection.mutable.ArrayBuffer import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.encoders.RowEncoder import org.apache.spark.sql.catalyst.expressions.{Attribute, GenericRowWithSchema} -import org.apache.spark.sql.connector.catalog.CatalogPlugin +import org.apache.spark.sql.connector.catalog.SupportsNamespaces import org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.COMMENT_TABLE_PROP import org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.LOCATION_TABLE_PROP import org.apache.spark.sql.execution.datasources.v2.V2SessionCatalog.RESERVED_PROPERTIES @@ -34,19 +34,15 @@ import org.apache.spark.sql.types.StructType */ case class DescribeNamespaceExec( output: Seq[Attribute], - catalog: CatalogPlugin, + catalog: SupportsNamespaces, namespace: Seq[String], isExtended: Boolean) extends V2CommandExec { - private val encoder = RowEncoder(StructType.fromAttributes(output)).resolveAndBind() override protected def run(): Seq[InternalRow] = { - import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._ - val rows = new ArrayBuffer[InternalRow]() - val nsCatalog = catalog.asNamespaceCatalog val ns = namespace.toArray - val metadata = nsCatalog.loadNamespaceMetadata(ns) + val metadata = catalog.loadNamespaceMetadata(ns) rows += toCatalystRow("Namespace Name", ns.last) rows += toCatalystRow("Description", metadata.get(COMMENT_TABLE_PROP)) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index e5bf8e337c8d3..36d2deaa309ee 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -948,6 +948,20 @@ class DataSourceV2SQLSuite } } + test("AlterNamespaceSetLocation using v2 catalog") { + withNamespace("testcat.ns1.ns2") { + sql("CREATE NAMESPACE IF NOT EXISTS testcat.ns1.ns2 COMMENT " + + "'test namespace' LOCATION '/tmp/ns_test_1'") + sql("ALTER NAMESPACE testcat.ns1.ns2 SET LOCATION '/tmp/ns_test_2'") + val descriptionDf = sql("DESCRIBE NAMESPACE EXTENDED testcat.ns1.ns2") + assert(descriptionDf.collect() === Seq( + Row("Namespace Name", "ns2"), + Row("Description", "test namespace"), + Row("Location", "/tmp/ns_test_2") + )) + } + } + test("ShowNamespaces: show root namespaces with default v2 catalog") { spark.conf.set(SQLConf.DEFAULT_CATALOG.key, "testcat") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala index b11e02a9c52bd..37d08137220b7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala @@ -80,15 +80,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession { containsThesePhrases = Seq("key_without_value")) } - test("alter database set location") { - // ALTER (DATABASE|SCHEMA) database_name SET LOCATION - val sql1 = "ALTER DATABASE database_name SET LOCATION '/home/user/db'" - val parsed1 = parser.parsePlan(sql1) - - val expected1 = AlterDatabaseSetLocationCommand("database_name", "/home/user/db") - comparePlans(parsed1, expected1) - } - test("create function") { val sql1 = """