diff --git a/docs/sql-migration-guide.md b/docs/sql-migration-guide.md index bb086ff657e4..04ec327b7cd7 100644 --- a/docs/sql-migration-guide.md +++ b/docs/sql-migration-guide.md @@ -338,6 +338,8 @@ license: | - Since Spark 3.0, `ADD FILE` can be used to add file directories as well. Earlier only single files can be added using this command. To restore the behaviour of earlier versions, set `spark.sql.legacy.addDirectory.recursive` to false. + - Since Spark 3.0, `SHOW TBLPROPERTIES` on a temporary view will cause `AnalysisException`. In Spark version 2.4 and earlier, it returned an empty result. + ## Upgrading from Spark SQL 2.4 to 2.4.1 - The value of `spark.executor.heartbeatInterval`, when specified without units like "30" rather than "30s", was diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 6e0e257038fa..5317af494d8e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -820,11 +820,6 @@ class Analyzer( .map(rel => alter.copy(table = rel)) .getOrElse(alter) - case show @ ShowTableProperties(u: UnresolvedV2Relation, _) => - CatalogV2Util.loadRelation(u.catalog, u.tableName) - .map(rel => show.copy(table = rel)) - .getOrElse(show) - case u: UnresolvedV2Relation => CatalogV2Util.loadRelation(u.catalog, u.tableName).getOrElse(u) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala index b79dcd0c1eca..88a3c0a73a10 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveCatalogs.scala @@ -191,11 +191,6 @@ class ResolveCatalogs(val catalogManager: CatalogManager) case ShowCurrentNamespaceStatement() => ShowCurrentNamespace(catalogManager) - - case ShowTablePropertiesStatement( - nameParts @ NonSessionCatalogAndTable(catalog, tbl), propertyKey) => - val r = UnresolvedV2Relation(nameParts, catalog.asTableCatalog, tbl.asIdentifier) - ShowTableProperties(r, propertyKey) } object NonSessionCatalogAndTable { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 5ddeff50e47b..f744787c9082 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -3503,8 +3503,8 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging */ override def visitShowTblProperties( ctx: ShowTblPropertiesContext): LogicalPlan = withOrigin(ctx) { - ShowTablePropertiesStatement( - visitMultipartIdentifier(ctx.table), + ShowTableProperties( + UnresolvedTable(visitMultipartIdentifier(ctx.table)), Option(ctx.key).map(visitTablePropertyKey)) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala index 1e097899602a..44f7b4143926 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/statements.scala @@ -438,13 +438,6 @@ case class ShowColumnsStatement( */ case class ShowCurrentNamespaceStatement() extends ParsedStatement -/** - * A SHOW TBLPROPERTIES statement, as parsed from SQL - */ -case class ShowTablePropertiesStatement( - tableName: Seq[String], - propertyKey: Option[String]) extends ParsedStatement - /** * A DESCRIBE FUNCTION statement, as parsed from SQL */ diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala index e98b2cf7abfc..e1e7eac4cc08 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/v2Commands.scala @@ -474,8 +474,10 @@ case class ShowCurrentNamespace(catalogManager: CatalogManager) extends Command * The logical plan of the SHOW TBLPROPERTIES command that works for v2 catalogs. */ case class ShowTableProperties( - table: NamedRelation, - propertyKey: Option[String]) extends Command{ + table: LogicalPlan, + propertyKey: Option[String]) extends Command { + override def children: Seq[LogicalPlan] = table :: Nil + override val output: Seq[Attribute] = Seq( AttributeReference("key", StringType, nullable = false)(), AttributeReference("value", StringType, nullable = false)()) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index 3a4c08235731..47387fa18411 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -1912,11 +1912,11 @@ class DDLParserSuite extends AnalysisTest { test("SHOW TBLPROPERTIES table") { comparePlans( parsePlan("SHOW TBLPROPERTIES a.b.c"), - ShowTablePropertiesStatement(Seq("a", "b", "c"), None)) + ShowTableProperties(UnresolvedTable(Seq("a", "b", "c")), None)) comparePlans( parsePlan("SHOW TBLPROPERTIES a.b.c('propKey1')"), - ShowTablePropertiesStatement(Seq("a", "b", "c"), Some("propKey1"))) + ShowTableProperties(UnresolvedTable(Seq("a", "b", "c")), Some("propKey1"))) } test("DESCRIBE FUNCTION") { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala index 106fdc433396..8b0d339dbb86 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala @@ -485,10 +485,8 @@ class ResolveSessionCatalog( replace, viewType) - case ShowTablePropertiesStatement(SessionCatalogAndTable(_, tbl), propertyKey) => - ShowTablePropertiesCommand( - tbl.asTableIdentifier, - propertyKey) + case ShowTableProperties(r: ResolvedTable, propertyKey) if isSessionCatalog(r.catalog) => + ShowTablePropertiesCommand(r.identifier.asTableIdentifier, propertyKey) case DescribeFunctionStatement(CatalogAndIdentifier(catalog, ident), extended) => val functionIdent = diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 447d00c11e7c..a92fbdf25975 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -900,22 +900,15 @@ case class ShowTablePropertiesCommand(table: TableIdentifier, propertyKey: Optio } override def run(sparkSession: SparkSession): Seq[Row] = { - val catalog = sparkSession.sessionState.catalog - - if (catalog.isTemporaryTable(table)) { - Seq.empty[Row] - } else { - val catalogTable = sparkSession.sessionState.catalog.getTableMetadata(table) - - propertyKey match { - case Some(p) => - val propValue = catalogTable - .properties - .getOrElse(p, s"Table ${catalogTable.qualifiedName} does not have property: $p") - Seq(Row(propValue)) - case None => - catalogTable.properties.map(p => Row(p._1, p._2)).toSeq - } + val catalogTable = sparkSession.sessionState.catalog.getTableMetadata(table) + propertyKey match { + case Some(p) => + val propValue = catalogTable + .properties + .getOrElse(p, s"Table ${catalogTable.qualifiedName} does not have property: $p") + Seq(Row(propValue)) + case None => + catalogTable.properties.map(p => Row(p._1, p._2)).toSeq } } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala index b452b66e0381..745edee14507 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala @@ -249,7 +249,7 @@ object DataSourceV2Strategy extends Strategy with PredicateHelper { case r: ShowCurrentNamespace => ShowCurrentNamespaceExec(r.output, r.catalogManager) :: Nil - case r @ ShowTableProperties(DataSourceV2Relation(table, _, _), propertyKey) => + case r @ ShowTableProperties(ResolvedTable(_, _, table), propertyKey) => ShowTablePropertiesExec(r.output, table, propertyKey) :: Nil case AlterNamespaceSetOwner(ResolvedNamespace(catalog, namespace), name, typ) => diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala index 07cff1e7d99f..dbbf2b29fe8b 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala @@ -130,10 +130,10 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto } test("show tblproperties for datasource table - errors") { - val message1 = intercept[NoSuchTableException] { + val message = intercept[AnalysisException] { sql("SHOW TBLPROPERTIES badtable") }.getMessage - assert(message1.contains("Table or view 'badtable' not found in database 'default'")) + assert(message.contains("Table not found: badtable")) // When key is not found, a row containing the error is returned. checkAnswer( @@ -147,7 +147,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto checkAnswer(sql("SHOW TBLPROPERTIES parquet_tab2('`prop2Key`')"), Row("prop2Val")) } - test("show tblproperties for spark temporary table - empty row") { + test("show tblproperties for spark temporary table - AnalysisException is thrown") { withTempView("parquet_temp") { sql( """ @@ -155,8 +155,10 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto |USING org.apache.spark.sql.parquet.DefaultSource """.stripMargin) - // An empty sequence of row is returned for session temporary table. - checkAnswer(sql("SHOW TBLPROPERTIES parquet_temp"), Nil) + val message = intercept[AnalysisException] { + sql("SHOW TBLPROPERTIES parquet_temp") + }.getMessage + assert(message.contains("parquet_temp is a temp view not table")) } }