Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -425,7 +425,9 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {

logWarning(s"CREATE TEMPORARY TABLE ... USING ... is deprecated, please use " +
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

can you check when did we forbid this? maybe it's time to fully remove the support

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@cloud-fan Thanks for reviewing! I found that this deprecation was added by your PR #14482 back on 8/5/2016. Do you think it is the right time to remove CREATE TEMPORARY TABLE... completely?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are we referring to the same PR? I found the change here https://github.com/apache/spark/pull/14482/files#diff-1bb4f7bd5a2656f48bcd3c857167a11bR362, where this WARN message is added in SparkSQLParser.scala

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I mean it was moved from other places, so this warning should have been added earlier.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see. sorry. I misunderstood. Let me check further.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

#13414 deprecated this. Merged on Jun 7th, 2016.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it was merged to 2.0, let's keep it

"CREATE TEMPORARY VIEW ... USING ... instead")
CreateTempViewUsing(table, schema, replace = true, global = false, provider, options)
// Unlike CREATE TEMPORARY VIEW USING, CREATE TEMPORARY TABLE USING does not support
// IF NOT EXISTS. Users are not allowed to replace the existing temp table.
CreateTempViewUsing(table, schema, replace = false, global = false, provider, options)
} else {
CreateTable(tableDesc, mode, None)
}
Expand Down
18 changes: 9 additions & 9 deletions sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1571,7 +1571,7 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
}
}

test("specifying database name for a temporary table is not allowed") {
test("specifying database name for a temporary view is not allowed") {
withTempPath { dir =>
val path = dir.toURI.toString
val df =
Expand All @@ -1585,23 +1585,23 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
intercept[AnalysisException] {
spark.sql(
s"""
|CREATE TEMPORARY TABLE db.t
|USING parquet
|OPTIONS (
| path '$path'
|)
""".stripMargin)
|CREATE TEMPORARY VIEW db.t
|USING parquet
|OPTIONS (
| path '$path'
|)
""".stripMargin)
}.getMessage

// If you use backticks to quote the name then it's OK.
spark.sql(
s"""
|CREATE TEMPORARY TABLE `db.t`
|CREATE TEMPORARY VIEW `db.t`
|USING parquet
|OPTIONS (
| path '$path'
|)
""".stripMargin)
""".stripMargin)
checkAnswer(spark.table("`db.t`"), df)
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -903,24 +903,24 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
withTempView("show1a", "show2b") {
sql(
"""
|CREATE TEMPORARY TABLE show1a
|USING org.apache.spark.sql.sources.DDLScanSource
|OPTIONS (
| From '1',
| To '10',
| Table 'test1'
|
|)
|CREATE TEMPORARY VIEW show1a
|USING org.apache.spark.sql.sources.DDLScanSource
|OPTIONS (
| From '1',
| To '10',
| Table 'test1'
|
|)
""".stripMargin)
sql(
"""
|CREATE TEMPORARY TABLE show2b
|USING org.apache.spark.sql.sources.DDLScanSource
|OPTIONS (
| From '1',
| To '10',
| Table 'test1'
|)
|CREATE TEMPORARY VIEW show2b
|USING org.apache.spark.sql.sources.DDLScanSource
|OPTIONS (
| From '1',
| To '10',
| Table 'test1'
|)
""".stripMargin)
assert(
sql("SHOW TABLE EXTENDED LIKE 'show*'").count() >= 2)
Expand Down Expand Up @@ -958,20 +958,20 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
Nil)
}

test("drop table - temporary table") {
test("drop view - temporary view") {
val catalog = spark.sessionState.catalog
sql(
"""
|CREATE TEMPORARY TABLE tab1
|USING org.apache.spark.sql.sources.DDLScanSource
|OPTIONS (
| From '1',
| To '10',
| Table 'test1'
|)
|CREATE TEMPORARY VIEW tab1
|USING org.apache.spark.sql.sources.DDLScanSource
|OPTIONS (
| From '1',
| To '10',
| Table 'test1'
|)
""".stripMargin)
assert(catalog.listTables("default") == Seq(TableIdentifier("tab1")))
sql("DROP TABLE tab1")
sql("DROP VIEW tab1")
assert(catalog.listTables("default") == Nil)
}

Expand Down Expand Up @@ -1690,6 +1690,16 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
}
}

test("block creating duplicate temp table") {
withView("t_temp") {
sql("CREATE TEMPORARY VIEW t_temp AS SELECT 1, 2")
val e = intercept[TempTableAlreadyExistsException] {
sql("CREATE TEMPORARY TABLE t_temp (c3 int, c4 string) USING JSON")
}.getMessage
assert(e.contains("Temporary table 't_temp' already exists"))
}
}

test("truncate table - external table, temporary table, view (not allowed)") {
import testImplicits._
withTempPath { tempDir =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,10 @@ class RowDataSourceStrategySuite extends SparkFunSuite with BeforeAndAfter with
conn.commit()
sql(
s"""
|CREATE TEMPORARY TABLE inttypes
|CREATE OR REPLACE TEMPORARY VIEW inttypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.INTTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\n", " "))
""".stripMargin.replaceAll("\n", " "))
}

after {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,16 +186,17 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils {
}

test("test different encoding") {
// scalastyle:off
spark.sql(
s"""
|CREATE TEMPORARY TABLE carsTable USING csv
|OPTIONS (path "${testFile(carsFile8859)}", header "true",
|charset "iso-8859-1", delimiter "þ")
""".stripMargin.replaceAll("\n", " "))
// scalastyle:on

verifyCars(spark.table("carsTable"), withHeader = true)
withView("carsTable") {
// scalastyle:off
spark.sql(
s"""
|CREATE TEMPORARY VIEW carsTable USING csv
|OPTIONS (path "${testFile(carsFile8859)}", header "true",
|charset "iso-8859-1", delimiter "þ")
""".stripMargin.replaceAll("\n", " "))
// scalastyle:on
verifyCars(spark.table("carsTable"), withHeader = true)
}
}

test("test aliases sep and encoding for delimiter and charset") {
Expand All @@ -213,27 +214,31 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils {
}

test("DDL test with tab separated file") {
spark.sql(
s"""
|CREATE TEMPORARY TABLE carsTable USING csv
|OPTIONS (path "${testFile(carsTsvFile)}", header "true", delimiter "\t")
""".stripMargin.replaceAll("\n", " "))

verifyCars(spark.table("carsTable"), numFields = 6, withHeader = true, checkHeader = false)
withView("carsTable") {
spark.sql(
s"""
|CREATE TEMPORARY VIEW carsTable USING csv
|OPTIONS (path "${testFile(carsTsvFile)}", header "true", delimiter "\t")
""".stripMargin.replaceAll("\n", " "))

verifyCars(spark.table("carsTable"), numFields = 6, withHeader = true, checkHeader = false)
}
}

test("DDL test parsing decimal type") {
spark.sql(
s"""
|CREATE TEMPORARY TABLE carsTable
|(yearMade double, makeName string, modelName string, priceTag decimal,
| comments string, grp string)
|USING csv
|OPTIONS (path "${testFile(carsTsvFile)}", header "true", delimiter "\t")
""".stripMargin.replaceAll("\n", " "))

assert(
spark.sql("SELECT makeName FROM carsTable where priceTag > 60000").collect().size === 1)
withView("carsTable") {
spark.sql(
s"""
|CREATE TEMPORARY VIEW carsTable
|(yearMade double, makeName string, modelName string, priceTag decimal,
| comments string, grp string)
|USING csv
|OPTIONS (path "${testFile(carsTsvFile)}", header "true", delimiter "\t")
""".stripMargin.replaceAll("\n", " "))

assert(
spark.sql("SELECT makeName FROM carsTable where priceTag > 60000").collect().size === 1)
}
}

test("test for DROPMALFORMED parsing mode") {
Expand Down Expand Up @@ -300,28 +305,34 @@ class CSVSuite extends QueryTest with SharedSQLContext with SQLTestUtils {
}

test("DDL test with empty file") {
spark.sql(s"""
|CREATE TEMPORARY TABLE carsTable
|(yearMade double, makeName string, modelName string, comments string, grp string)
|USING csv
|OPTIONS (path "${testFile(emptyFile)}", header "false")
""".stripMargin.replaceAll("\n", " "))

assert(spark.sql("SELECT count(*) FROM carsTable").collect().head(0) === 0)
withView("carsTable") {
spark.sql(
s"""
|CREATE TEMPORARY VIEW carsTable
|(yearMade double, makeName string, modelName string, comments string, grp string)
|USING csv
|OPTIONS (path "${testFile(emptyFile)}", header "false")
""".stripMargin.replaceAll("\n", " "))

assert(spark.sql("SELECT count(*) FROM carsTable").collect().head(0) === 0)
}
}

test("DDL test with schema") {
spark.sql(s"""
|CREATE TEMPORARY TABLE carsTable
|(yearMade double, makeName string, modelName string, comments string, blank string)
|USING csv
|OPTIONS (path "${testFile(carsFile)}", header "true")
""".stripMargin.replaceAll("\n", " "))

val cars = spark.table("carsTable")
verifyCars(cars, withHeader = true, checkHeader = false, checkValues = false)
assert(
cars.schema.fieldNames === Array("yearMade", "makeName", "modelName", "comments", "blank"))
withView("carsTable") {
spark.sql(
s"""
|CREATE TEMPORARY VIEW carsTable
|(yearMade double, makeName string, modelName string, comments string, blank string)
|USING csv
|OPTIONS (path "${testFile(carsFile)}", header "true")
""".stripMargin.replaceAll("\n", " "))

val cars = spark.table("carsTable")
verifyCars(cars, withHeader = true, checkHeader = false, checkValues = false)
assert(
cars.schema.fieldNames === Array("yearMade", "makeName", "modelName", "comments", "blank"))
}
}

test("save csv") {
Expand Down
Loading