diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 2a849023ea13b..b72e34c5f5057 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -856,8 +856,13 @@ class Analyzer( u.failAnalysis(s"${ident.quoted} is a temp view not table.") } u - case u @ UnresolvedTableOrView(ident) => - lookupTempView(ident).map(_ => ResolvedView(ident.asIdentifier)).getOrElse(u) + case u @ UnresolvedTableOrView(ident, acceptTempView) => + lookupTempView(ident).map { _ => + if (!acceptTempView) { + u.failAnalysis(s"${ident.quoted} is a temp view, not a table or permanent view.") + } + ResolvedView(ident.asIdentifier) + }.getOrElse(u) } def lookupTempView(identifier: Seq[String]): Option[LogicalPlan] = { @@ -905,7 +910,7 @@ class Analyzer( .map(ResolvedTable(catalog.asTableCatalog, ident, _)) .getOrElse(u) - case u @ UnresolvedTableOrView(NonSessionCatalogAndIdentifier(catalog, ident)) => + case u @ UnresolvedTableOrView(NonSessionCatalogAndIdentifier(catalog, ident), _) => CatalogV2Util.loadTable(catalog, ident) .map(ResolvedTable(catalog.asTableCatalog, ident, _)) .getOrElse(u) @@ -994,7 +999,7 @@ class Analyzer( case table => table }.getOrElse(u) - case u @ UnresolvedTableOrView(identifier) => + case u @ UnresolvedTableOrView(identifier, _) => lookupTableOrView(identifier).getOrElse(u) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala index 351be32ee438e..a307a846f635b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala @@ -99,9 +99,12 @@ trait CheckAnalysis extends PredicateHelper { case u: UnresolvedTable => u.failAnalysis(s"Table not found: ${u.multipartIdentifier.quoted}") - case u: UnresolvedTableOrView => + case u: UnresolvedTableOrView if u.acceptTempView => u.failAnalysis(s"Table or view not found: ${u.multipartIdentifier.quoted}") + case u: UnresolvedTableOrView if !u.acceptTempView => + u.failAnalysis(s"Table or permanent view not found: ${u.multipartIdentifier.quoted}") + case u: UnresolvedRelation => u.failAnalysis(s"Table or view not found: ${u.multipartIdentifier.quoted}") diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala index a16763f2cf943..97b6b0cab4665 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala @@ -46,7 +46,8 @@ case class UnresolvedTable(multipartIdentifier: Seq[String]) extends LeafNode { * Holds the name of a table or view that has yet to be looked up in a catalog. It will * be resolved to [[ResolvedTable]] or [[ResolvedView]] during analysis. */ -case class UnresolvedTableOrView(multipartIdentifier: Seq[String]) extends LeafNode { +case class UnresolvedTableOrView(multipartIdentifier: Seq[String], acceptTempView: Boolean = true) + extends LeafNode { override lazy val resolved: Boolean = false override def output: Seq[Attribute] = Nil } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index d6ae89f49c57a..0072d0c12751d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -3583,7 +3583,7 @@ class AstBuilder(conf: SQLConf) extends SqlBaseBaseVisitor[AnyRef] with Logging override def visitShowTblProperties( ctx: ShowTblPropertiesContext): LogicalPlan = withOrigin(ctx) { ShowTableProperties( - UnresolvedTableOrView(visitMultipartIdentifier(ctx.table)), + UnresolvedTableOrView(visitMultipartIdentifier(ctx.table), acceptTempView = false), Option(ctx.key).map(visitTablePropertyKey)) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index 02e086d5d7895..b9d8fdb501d2d 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -2016,11 +2016,11 @@ class DDLParserSuite extends AnalysisTest { test("SHOW TBLPROPERTIES table") { comparePlans( parsePlan("SHOW TBLPROPERTIES a.b.c"), - ShowTableProperties(UnresolvedTableOrView(Seq("a", "b", "c")), None)) + ShowTableProperties(UnresolvedTableOrView(Seq("a", "b", "c"), false), None)) comparePlans( parsePlan("SHOW TBLPROPERTIES a.b.c('propKey1')"), - ShowTableProperties(UnresolvedTableOrView(Seq("a", "b", "c")), Some("propKey1"))) + ShowTableProperties(UnresolvedTableOrView(Seq("a", "b", "c"), false), Some("propKey1"))) } test("DESCRIBE FUNCTION") { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 7aebdddf1d59c..d1670a23fa2ac 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -920,7 +920,7 @@ case class ShowTablePropertiesCommand(table: TableIdentifier, propertyKey: Optio override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog if (catalog.isTemporaryTable(table)) { - Seq.empty[Row] + throw new AnalysisException(s"SHOW TBLPROPERTIES is not allowed on a temporary view: $table") } else { val catalogTable = catalog.getTableMetadata(table) propertyKey match { diff --git a/sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out b/sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out index eaaf894590d35..655e197967b58 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-tblproperties.sql.out @@ -101,9 +101,10 @@ struct<> -- !query SHOW TBLPROPERTIES tv -- !query schema -struct +struct<> -- !query output - +org.apache.spark.sql.AnalysisException +tv is a temp view, not a table or permanent view.; line 1 pos 0 -- !query diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala index dcec8bf5c0cc6..c8ed0b7e36676 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala @@ -179,7 +179,7 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto val message = intercept[AnalysisException] { sql("SHOW TBLPROPERTIES badtable") }.getMessage - assert(message.contains("Table or view not found: badtable")) + assert(message.contains("Table or permanent view not found: badtable")) // When key is not found, a row containing the error is returned. checkAnswer(