Skip to content

Commit 3e4cd4b

Browse files
committed
code clean
1 parent 7dbb16c commit 3e4cd4b

File tree

2 files changed

+6
-42
lines changed

2 files changed

+6
-42
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1393,10 +1393,6 @@ private[spark] object QueryCompilationErrors {
13931393
new AnalysisException("multi-part identifier cannot be empty.")
13941394
}
13951395

1396-
def cannotCreateTablesWithNullTypeError(): Throwable = {
1397-
new AnalysisException(s"Cannot create tables with ${NullType.simpleString} type.")
1398-
}
1399-
14001396
def functionUnsupportedInV2CatalogError(): Throwable = {
14011397
new AnalysisException("function is only supported in v1 catalog")
14021398
}

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala

Lines changed: 6 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -2393,79 +2393,47 @@ class HiveDDLSuite
23932393
}
23942394
}
23952395

2396-
test("SPARK-36241: support creating tables with void datatype") {
2397-
// CTAS with void type
2396+
test("SPARK-36241: support creating tables with null datatype") {
2397+
// CTAS with null type
23982398
withTable("t1", "t2", "t3") {
23992399
assertAnalysisError(
24002400
"CREATE TABLE t1 USING PARQUET AS SELECT NULL AS null_col",
2401-
"Parquet data source does not support void data type")
2401+
"Parquet data source does not support null data type")
24022402
sql("CREATE TABLE t2 AS SELECT NULL AS null_col")
24032403
checkAnswer(sql("SELECT * FROM t2"), Row(null))
2404-
// sql("CREATE TABLE t3 STORED AS PARQUET AS SELECT NULL AS null_col")
2405-
// checkAnswer(sql("SELECT * FROM t3"), Row(null))
24062404
}
24072405

2408-
// Replace table AS SELECT with void type
2409-
// withTable("t") {
2410-
// sql("CREATE OR REPLACE TABLE t AS SELECT NULL as null_col")
2411-
// checkAnswer(sql("SELECT * FROM t"), Row(null))
2412-
// }
2413-
2414-
// Create table with void type
2406+
// Create table with null type
24152407
withTable("t1", "t2", "t3", "t4") {
24162408
assertAnalysisError(
24172409
"CREATE TABLE t1 (v VOID) USING PARQUET",
2418-
"Parquet data source does not support void data type")
2410+
"Parquet data source does not support null data type")
24192411
sql("CREATE TABLE t2 (v VOID) USING hive")
24202412
checkAnswer(sql("SELECT * FROM t2"), Seq.empty)
24212413
sql("CREATE TABLE t3 (v VOID)")
24222414
checkAnswer(sql("SELECT * FROM t3"), Seq.empty)
2423-
// sql("CREATE TABLE t4 (v VOID) STORED AS PARQUET")
2424-
// checkAnswer(sql("SELECT * FROM t4"), Seq.empty)
24252415
}
24262416

2427-
// Replace table with void type
2428-
// withTable("t") {
2429-
// sql("CREATE OR REPLACE TABLE t (v VOID)")
2430-
// checkAnswer(sql("SELECT * FROM t"), Seq.empty)
2431-
// }
2432-
2433-
// Make sure spark.catalog.createTable with void type will fail
2417+
// Make sure spark.catalog.createTable with null type will fail
24342418
val schema1 = new StructType().add("c", NullType)
2435-
// checkHiveTableNullType(schema1)
24362419
checkDSTableNullType(schema1)
24372420

24382421
val schema2 = new StructType()
24392422
.add("c", StructType(Seq(StructField.apply("c1", NullType))))
2440-
// checkHiveTableNullType(schema2)
24412423
checkDSTableNullType(schema2)
24422424

24432425
val schema3 = new StructType().add("c", ArrayType(NullType))
2444-
// checkHiveTableNullType(schema3)
24452426
checkDSTableNullType(schema3)
24462427

24472428
val schema4 = new StructType()
24482429
.add("c", MapType(StringType, NullType))
2449-
// checkHiveTableNullType(schema4)
24502430
checkDSTableNullType(schema4)
24512431

24522432
val schema5 = new StructType()
24532433
.add("c", MapType(NullType, StringType))
2454-
// checkHiveTableNullType(schema5)
24552434
checkDSTableNullType(schema5)
24562435
}
24572436

2458-
private def checkHiveTableNullType(schema: StructType): Unit = {
2459-
withTable("t") {
2460-
spark.catalog.createTable(
2461-
tableName = "t",
2462-
source = "hive",
2463-
schema = schema,
2464-
options = Map("fileFormat" -> "parquet"))
2465-
checkAnswer(sql("SELECT * FROM t"), Seq.empty)
2466-
}
2467-
}
2468-
24692437
private def checkDSTableNullType(schema: StructType): Unit = {
24702438
withTable("t") {
24712439
spark.catalog.createTable(

0 commit comments

Comments
 (0)