diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index 7ddb5941c265..a33b7009df33 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -26,6 +26,7 @@ import org.apache.spark.sql.catalyst.expressions.{EqualTo, Literal} import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.connector.catalog.TableChange.ColumnPosition.{after, first} import org.apache.spark.sql.connector.expressions.{ApplyTransform, BucketTransform, DaysTransform, FieldReference, HoursTransform, IdentityTransform, LiteralValue, MonthsTransform, Transform, YearsTransform} +import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructType, TimestampType} import org.apache.spark.unsafe.types.UTF8String @@ -2163,18 +2164,20 @@ class DDLParserSuite extends AnalysisTest { } test("create table - without using") { - val sql = "CREATE TABLE 1m.2g(a INT)" - val expectedTableSpec = TableSpec( - Seq("1m", "2g"), - Some(new StructType().add("a", IntegerType)), - Seq.empty[Transform], - None, - Map.empty[String, String], - None, - Map.empty[String, String], - None, - None) + withSQLConf(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key -> "false") { + val sql = "CREATE TABLE 1m.2g(a INT)" + val expectedTableSpec = TableSpec( + Seq("1m", "2g"), + Some(new StructType().add("a", IntegerType)), + Seq.empty[Transform], + None, + Map.empty[String, String], + None, + Map.empty[String, String], + None, + None) - testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false) + testCreateOrReplaceDdl(sql, expectedTableSpec, expectedIfNotExists = false) + } } } diff --git a/sql/core/src/test/resources/sql-tests/inputs/describe-table-column.sql b/sql/core/src/test/resources/sql-tests/inputs/describe-table-column.sql index 821cb473751e..d55e398329b7 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/describe-table-column.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/describe-table-column.sql @@ -52,7 +52,7 @@ DROP TABLE desc_complex_col_table; --Test case insensitive -CREATE TABLE customer(CName STRING); +CREATE TABLE customer(CName STRING) USING PARQUET; INSERT INTO customer VALUES('Maria'); diff --git a/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/create_view.sql b/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/create_view.sql index 39e708478e29..21ffd85f7d01 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/create_view.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/postgreSQL/create_view.sql @@ -41,7 +41,7 @@ DROP TABLE emp; -- These views are left around mainly to exercise special cases in pg_dump. -- [SPARK-19842] Informational Referential Integrity Constraints Support in Spark -CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20)); +CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20)) USING PARQUET; -- CREATE VIEW key_dependent_view AS SELECT * FROM view_base_table GROUP BY key; diff --git a/sql/core/src/test/resources/sql-tests/results/describe-table-column.sql.out b/sql/core/src/test/resources/sql-tests/results/describe-table-column.sql.out index ae9240ec588d..c6d3d45879eb 100644 --- a/sql/core/src/test/resources/sql-tests/results/describe-table-column.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/describe-table-column.sql.out @@ -267,7 +267,7 @@ struct<> -- !query -CREATE TABLE customer(CName STRING) +CREATE TABLE customer(CName STRING) USING PARQUET -- !query schema struct<> -- !query output diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out index 1f2bd5795cf1..ae1cb2f17170 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/create_view.sql.out @@ -42,7 +42,7 @@ struct<> -- !query -CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20)) +CREATE TABLE view_base_table (key int /* PRIMARY KEY */, data varchar(20)) USING PARQUET -- !query schema struct<> -- !query output diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index ba4200d84d46..3304b259a85b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -257,6 +257,7 @@ class DataSourceV2SQLSuite } test("CreateTable: without USING clause") { + spark.conf.set(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key, "false") // unset this config to use the default v2 session catalog. spark.conf.unset(V2_SESSION_CATALOG_IMPLEMENTATION.key) val testCatalog = catalog("testcat").asTableCatalog @@ -613,6 +614,7 @@ class DataSourceV2SQLSuite } test("CreateTableAsSelect: without USING clause") { + spark.conf.set(SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED.key, "false") // unset this config to use the default v2 session catalog. spark.conf.unset(V2_SESSION_CATALOG_IMPLEMENTATION.key) val testCatalog = catalog("testcat").asTableCatalog diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala index bacd64efedc8..28e5082886b6 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala @@ -40,7 +40,8 @@ import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types.{IntegerType, StructField, StructType} class DDLParserSuite extends AnalysisTest with SharedSparkSession { - private lazy val parser = new SparkSqlParser(new SQLConf) + private lazy val parser = new SparkSqlParser(new SQLConf().copy( + SQLConf.LEGACY_CREATE_HIVE_TABLE_BY_DEFAULT_ENABLED -> false)) private def assertUnsupported(sql: String, containsThesePhrases: Seq[String] = Seq()): Unit = { val e = intercept[ParseException] { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala index c81080ebe4d6..7c48f046c72a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala @@ -846,16 +846,16 @@ class InsertSuite extends DataSourceTest with SharedSparkSession { test("SPARK-29174 Support LOCAL in INSERT OVERWRITE DIRECTORY to data source") { withTempPath { dir => val path = dir.toURI.getPath - sql(s"""create table tab1 ( a int) location '$path'""") + sql(s"""create table tab1 ( a int) using parquet location '$path'""") sql("insert into tab1 values(1)") checkAnswer(sql("select * from tab1"), Seq(1).map(i => Row(i))) - sql("create table tab2 ( a int)") + sql("create table tab2 ( a int) using parquet") sql("insert into tab2 values(2)") checkAnswer(sql("select * from tab2"), Seq(2).map(i => Row(i))) sql(s"""insert overwrite local directory '$path' using parquet select * from tab2""") sql("refresh table tab1") checkAnswer(sql("select * from tab1"), Seq(2).map(i => Row(i))) - } + } } test("SPARK-29174 fail LOCAL in INSERT OVERWRITE DIRECT remote path") { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala index 488175a22bad..c1eab63ec073 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/StatisticsSuite.scala @@ -1520,10 +1520,12 @@ class StatisticsSuite extends StatisticsCollectionTestBase with TestHiveSingleto val ext_tbl = "SPARK_30269_external" withTempDir { dir => withTable(tbl, ext_tbl) { - sql(s"CREATE TABLE $tbl (key INT, value STRING, ds STRING) PARTITIONED BY (ds)") + sql(s"CREATE TABLE $tbl (key INT, value STRING, ds STRING)" + + "USING parquet PARTITIONED BY (ds)") sql( s""" | CREATE TABLE $ext_tbl (key INT, value STRING, ds STRING) + | USING PARQUET | PARTITIONED BY (ds) | LOCATION '${dir.toURI}' """.stripMargin)