diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 82cbb4aa4744..d39c368a3a19 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -754,6 +754,8 @@ case class AlterTableSetLocationCommand( // No partition spec is specified, so we set the location for the table itself catalog.alterTable(table.withNewStorage(locationUri = Some(location))) } + + sparkSession.sessionState.catalog.refreshTable(table.identifier) Seq.empty[Row] } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index b44f20e367f0..e2c635af776d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -1952,4 +1952,28 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { } } } + + Seq(true, false).foreach { shouldCache => + val testName = if (shouldCache) "cached" else "non-cached" + test(s"refresh $testNames table after alter the location") { + withTable("t", "t1", "t2", "t3") { + withTempDir { dir => + spark.sql( + """ + |CREATE TABLE t(a string) + |USING parquet + """.stripMargin) + spark.sql("INSERT INTO TABLE t SELECT 1") + if (shouldCache) { + spark.catalog.cacheTable("t") + } + checkAnswer(spark.table("t"), Row("1") :: Nil) + spark.sql(s"ALTER TABLE t SET LOCATION '$dir'") + checkAnswer(spark.table("t"), Nil) + } + + // TODO: partition table tests + } + } + } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 792ac1e25949..6eee19ac98d4 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -1587,4 +1587,73 @@ class HiveDDLSuite } } } + + Seq(true, false).foreach { shouldCache => + val testName = if (shouldCache) "cached" else "non-cached" + test(s"refresh $testNames table after alter the location") { + withTable("t", "t1", "t2", "t3") { + withTempDir { dir => + spark.sql( + """ + |CREATE TABLE t(a string) + |USING parquet + """.stripMargin) + spark.sql("INSERT INTO TABLE t SELECT 1") + if (shouldCache) { + spark.catalog.cacheTable("t") + } + checkAnswer(spark.table("t"), Row("1") :: Nil) + spark.sql(s"ALTER TABLE t SET LOCATION '$dir'") + checkAnswer(spark.table("t"), Nil) + } + + withTempDir { dir => + spark.sql( + """ + |CREATE TABLE t1(a string, b string) + |USING parquet + |PARTITIONED BY(b) + """.stripMargin) + spark.sql("INSERT INTO TABLE t1 PARTITION(b=1) SELECT 2") + if (shouldCache) { + spark.catalog.cacheTable("t") + } + checkAnswer(spark.table("t1"), Row("2", "1") :: Nil) + spark.sql(s"ALTER TABLE t1 PARTITION(b=1)SET LOCATION '$dir'") + checkAnswer(spark.table("t1"), Nil) + } + + withTempDir { dir => + spark.sql( + """ + |CREATE TABLE t2(a string) + |USING hive + """.stripMargin) + spark.sql("INSERT INTO TABLE t2 SELECT 1") + if (shouldCache) { + spark.catalog.cacheTable("t") + } + checkAnswer(spark.table("t2"), Row("1") :: Nil) + spark.sql(s"ALTER TABLE t2 SET LOCATION '$dir'") + checkAnswer(spark.table("t2"), Nil) + } + + withTempDir { dir => + spark.sql( + """ + |CREATE TABLE t3(a string, b string) + |USING hive + |PARTITIONED BY(b) + """.stripMargin) + spark.sql("INSERT INTO TABLE t3 PARTITION(b=1) SELECT 2") + if (shouldCache) { + spark.catalog.cacheTable("t") + } + checkAnswer(spark.table("t3"), Row("2", "1") :: Nil) + spark.sql(s"ALTER TABLE t3 PARTITION(b=1)SET LOCATION '$dir'") + checkAnswer(spark.table("t3"), Nil) + } + } + } + } }