diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index be28df3a51557..8246165641a93 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -722,7 +722,7 @@ class SQLContext private[sql]( * only during the lifetime of this instance of SQLContext. */ private[sql] def registerDataFrameAsTable(df: DataFrame, tableName: String): Unit = { - catalog.registerTable(TableIdentifier(tableName), df.logicalPlan) + catalog.registerTable(sqlParser.parseTableIdentifier(tableName), df.logicalPlan) } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index 4ff99bdf2937d..2260145668aae 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -1291,4 +1291,16 @@ class DataFrameSuite extends QueryTest with SharedSQLContext { Seq(1 -> "a").toDF("i", "j").filter($"i".cast(StringType) === "1"), Row(1, "a")) } + + test("SPARK-12982: Add table name validation in temp table registration") { + val df = Seq("foo", "bar").map(Tuple1.apply).toDF("col") + // invalid table name test as below + intercept[AnalysisException](df.registerTempTable("t~")) + // valid table name test as below + df.registerTempTable("table1") + // another invalid table name test as below + intercept[AnalysisException](df.registerTempTable("#$@sum")) + // another invalid table name test as below + intercept[AnalysisException](df.registerTempTable("table!#")) + } }