diff --git a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 index a4a23d4b33297..c08860ea337b8 100644 --- a/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 +++ b/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4 @@ -189,7 +189,7 @@ statement (LIKE? pattern=STRING)? #showTables | SHOW TABLE EXTENDED ((FROM | IN) db=errorCapturingIdentifier)? LIKE pattern=STRING partitionSpec? #showTable - | SHOW TBLPROPERTIES table=tableIdentifier + | SHOW TBLPROPERTIES table=multipartIdentifier ('(' key=tablePropertyKey ')')? #showTblProperties | SHOW COLUMNS (FROM | IN) table=multipartIdentifier ((FROM | IN) namespace=multipartIdentifier)? #showColumns diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index df0321c8f5fc8..ead122ed9c56b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -691,6 +691,11 @@ class Analyzer( .map(rel => alter.copy(table = rel)) .getOrElse(alter) + case show @ ShowTableProperties(u: UnresolvedV2Relation, _) => + CatalogV2Util.loadRelation(u.catalog, u.tableName) + .map(rel => show.copy(table = rel)) + .getOrElse(show) + case u: UnresolvedV2Relation => CatalogV2Util.loadRelation(u.catalog, u.tableName).getOrElse(u) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala index bca07262a6f74..5b48f874a8701 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala @@ -207,6 +207,11 @@ class ResolveCatalogs(val catalogManager: CatalogManager) case ShowCurrentNamespaceStatement() => ShowCurrentNamespace(catalogManager) + + case ShowTablePropertiesStatement( + nameParts @ NonSessionCatalog(catalog, tableName), propertyKey) => + val r = UnresolvedV2Relation(nameParts, catalog.asTableCatalog, tableName.asIdentifier) + ShowTableProperties(r, propertyKey) } object NonSessionCatalog { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 00a1964c9501b..ce8e1f53f7189 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -3193,4 +3193,20 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging originalText = source(ctx.query), query = plan(ctx.query)) } + + /** + * A command for users to list the properties for a table. If propertyKey is specified, the value + * for the propertyKey is returned. If propertyKey is not specified, all the keys and their + * corresponding values are returned. + * The syntax of using this command in SQL is: + * {{{ + * SHOW TBLPROPERTIES multi_part_name[('propertyKey')]; + * }}} + */ + override def visitShowTblProperties( + ctx: ShowTblPropertiesContext): LogicalPlan = withOrigin(ctx) { + ShowTablePropertiesStatement( + visitMultipartIdentifier(ctx.table), + Option(ctx.key).map(visitTablePropertyKey)) + } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala index 6707a80213cdf..a83fe43b84b4a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala @@ -448,3 +448,10 @@ case class ShowColumnsStatement( * A SHOW CURRENT NAMESPACE statement, as parsed from SQL */ case class ShowCurrentNamespaceStatement() extends ParsedStatement + +/** + * A SHOW TBLPROPERTIES statement, as parsed from SQL + */ +case class ShowTablePropertiesStatement( + tableName: Seq[String], + propertyKey: Option[String]) extends ParsedStatement diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala index f2e7a0699fd97..f27d10924ef97 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala @@ -412,3 +412,14 @@ case class ShowCurrentNamespace(catalogManager: CatalogManager) extends Command AttributeReference("catalog", StringType, nullable = false)(), AttributeReference("namespace", StringType, nullable = false)()) } + +/** + * The logical plan of the SHOW TBLPROPERTIES command that works for v2 catalogs. + */ +case class ShowTableProperties( + table: NamedRelation, + propertyKey: Option[String]) extends Command{ + override val output: Seq[Attribute] = Seq( + AttributeReference("key", StringType, nullable = false)(), + AttributeReference("value", StringType, nullable = false)()) +} diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index 11fc530928898..a8938759ba45a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -1586,6 +1586,16 @@ class DDLParserSuite extends AnalysisTest { comparePlans(parsed, expected) } + test("SHOW TBLPROPERTIES table") { + comparePlans( + parsePlan("SHOW TBLPROPERTIES a.b.c"), + ShowTablePropertiesStatement(Seq("a", "b", "c"), None)) + + comparePlans( + parsePlan("SHOW TBLPROPERTIES a.b.c('propKey1')"), + ShowTablePropertiesStatement(Seq("a", "b", "c"), Some("propKey1"))) + } + private case class TableSpec( name: Seq[String], schema: Option[StructType], diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala index bcf067ba0b97a..280f4d9d7a889 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala @@ -428,6 +428,11 @@ class ResolveSessionCatalog( v1TableName.asTableIdentifier, originalText, query) + + case ShowTablePropertiesStatement(SessionCatalog(_, tableName), propertyKey) => + ShowTablePropertiesCommand( + tableName.asTableIdentifier, + propertyKey) } private def parseV1Table(tableName: Seq[String], sql: String): Seq[String] = { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala index e58cf2372c7f3..b34eca9fb2ab6 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala @@ -106,22 +106,6 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder(conf) { partitionSpec = partitionSpec) } - /** - * A command for users to list the properties for a table. If propertyKey is specified, the value - * for the propertyKey is returned. If propertyKey is not specified, all the keys and their - * corresponding values are returned. - * The syntax of using this command in SQL is: - * {{{ - * SHOW TBLPROPERTIES table_name[('propertyKey')]; - * }}} - */ - override def visitShowTblProperties( - ctx: ShowTblPropertiesContext): LogicalPlan = withOrigin(ctx) { - ShowTablePropertiesCommand( - visitTableIdentifier(ctx.tableIdentifier), - Option(ctx.key).map(visitTablePropertyKey)) - } - /** * Create a [[RefreshResource]] logical plan. */ diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala index 0a7785b0e088b..5115da4a39c70 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala @@ -22,7 +22,7 @@ import scala.collection.JavaConverters._ import org.apache.spark.sql.{AnalysisException, Strategy} import org.apache.spark.sql.catalyst.expressions.{And, PredicateHelper, SubqueryExpression} import org.apache.spark.sql.catalyst.planning.PhysicalOperation -import org.apache.spark.sql.catalyst.plans.logical.{AlterTable, AppendData, CreateNamespace, CreateTableAsSelect, CreateV2Table, DeleteFromTable, DescribeTable, DropNamespace, DropTable, LogicalPlan, OverwriteByExpression, OverwritePartitionsDynamic, RefreshTable, Repartition, ReplaceTable, ReplaceTableAsSelect, SetCatalogAndNamespace, ShowCurrentNamespace, ShowNamespaces, ShowTables} +import org.apache.spark.sql.catalyst.plans.logical.{AlterTable, AppendData, CreateNamespace, CreateTableAsSelect, CreateV2Table, DeleteFromTable, DescribeTable, DropNamespace, DropTable, LogicalPlan, OverwriteByExpression, OverwritePartitionsDynamic, RefreshTable, Repartition, ReplaceTable, ReplaceTableAsSelect, SetCatalogAndNamespace, ShowCurrentNamespace, ShowNamespaces, ShowTableProperties, ShowTables} import org.apache.spark.sql.connector.catalog.{StagingTableCatalog, TableCapability} import org.apache.spark.sql.connector.read.streaming.{ContinuousStream, MicroBatchStream} import org.apache.spark.sql.execution.{FilterExec, ProjectExec, SparkPlan} @@ -213,6 +213,9 @@ object DataSourceV2Strategy extends Strategy with PredicateHelper { case r: ShowCurrentNamespace => ShowCurrentNamespaceExec(r.output, r.catalogManager) :: Nil + case r @ ShowTableProperties(DataSourceV2Relation(table, _, _), propertyKey) => + ShowTablePropertiesExec(r.output, table, propertyKey) :: Nil + case _ => Nil } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala new file mode 100644 index 0000000000000..7905c35f55de0 --- /dev/null +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowTablePropertiesExec.scala @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.datasources.v2 + +import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.encoders.RowEncoder +import org.apache.spark.sql.catalyst.expressions.{Attribute, GenericRowWithSchema} +import org.apache.spark.sql.connector.catalog.Table + +/** + * Physical plan node for showing table properties. + */ +case class ShowTablePropertiesExec( + output: Seq[Attribute], + catalogTable: Table, + propertyKey: Option[String]) extends V2CommandExec { + + override protected def run(): Seq[InternalRow] = { + import scala.collection.JavaConverters._ + val encoder = RowEncoder(schema).resolveAndBind() + + val properties = catalogTable.properties.asScala + propertyKey match { + case Some(p) => + val propValue = properties + .getOrElse(p, s"Table ${catalogTable.name} does not have property: $p") + Seq(encoder.toRow(new GenericRowWithSchema(Array(p, propValue), schema)).copy()) + case None => + properties.keys.map(k => + encoder.toRow(new GenericRowWithSchema(Array(k, properties(k)), schema)).copy()).toSeq + } + } +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index ce41847f5b3a3..1d1452c9f25d8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -1562,6 +1562,63 @@ class DataSourceV2SQLSuite assert(e.message.contains("ALTER VIEW QUERY is only supported with v1 tables")) } + test("SHOW TBLPROPERTIES: v2 table") { + val t = "testcat.ns1.ns2.tbl" + withTable(t) { + val owner = "andrew" + val status = "new" + val provider = "foo" + spark.sql(s"CREATE TABLE $t (id bigint, data string) USING $provider " + + s"TBLPROPERTIES ('owner'='$owner', 'status'='$status')") + + val properties = sql(s"SHOW TBLPROPERTIES $t") + + val schema = new StructType() + .add("key", StringType, nullable = false) + .add("value", StringType, nullable = false) + + val expected = Seq( + Row("owner", owner), + Row("provider", provider), + Row("status", status)) + + assert(properties.schema === schema) + assert(expected === properties.collect()) + } + } + + test("SHOW TBLPROPERTIES(key): v2 table") { + val t = "testcat.ns1.ns2.tbl" + withTable(t) { + val owner = "andrew" + val status = "new" + val provider = "foo" + spark.sql(s"CREATE TABLE $t (id bigint, data string) USING $provider " + + s"TBLPROPERTIES ('owner'='$owner', 'status'='$status')") + + val properties = sql(s"SHOW TBLPROPERTIES $t ('status')") + + val expected = Seq(Row("status", status)) + + assert(expected === properties.collect()) + } + } + + test("SHOW TBLPROPERTIES(key): v2 table, key not found") { + val t = "testcat.ns1.ns2.tbl" + withTable(t) { + val nonExistingKey = "nonExistingKey" + spark.sql(s"CREATE TABLE $t (id bigint, data string) USING foo " + + s"TBLPROPERTIES ('owner'='andrew', 'status'='new')") + + val properties = sql(s"SHOW TBLPROPERTIES $t ('$nonExistingKey')") + + val expected = Seq(Row(nonExistingKey, s"Table $t does not have property: $nonExistingKey")) + + assert(expected === properties.collect()) + } + } + private def testV1Command(sqlCommand: String, sqlParams: String): Unit = { val e = intercept[AnalysisException] { sql(s"$sqlCommand $sqlParams") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala index 940df21131e09..0b9962cba4e1a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala @@ -532,15 +532,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession { """.stripMargin) } - test("show tblproperties") { - val parsed1 = parser.parsePlan("SHOW TBLPROPERTIES tab1") - val expected1 = ShowTablePropertiesCommand(TableIdentifier("tab1", None), None) - val parsed2 = parser.parsePlan("SHOW TBLPROPERTIES tab1('propKey1')") - val expected2 = ShowTablePropertiesCommand(TableIdentifier("tab1", None), Some("propKey1")) - comparePlans(parsed1, expected1) - comparePlans(parsed2, expected2) - } - test("SPARK-14383: DISTRIBUTE and UNSET as non-keywords") { val sql = "SELECT distribute, unset FROM x" val parsed = parser.parsePlan(sql)