Skip to content

Commit fda353f

Browse files
committed
[SPARK-20680][SQL] Spark-sql do not support for void column datatype of view
1 parent 8c67aa7 commit fda353f

File tree

4 files changed

+20
-0
lines changed

4 files changed

+20
-0
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1504,6 +1504,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with Logging {
15041504
case ("decimal", precision :: Nil) => DecimalType(precision.getText.toInt, 0)
15051505
case ("decimal", precision :: scale :: Nil) =>
15061506
DecimalType(precision.getText.toInt, scale.getText.toInt)
1507+
case ("void", Nil) => NullType
15071508
case (dt, params) =>
15081509
val dtStr = if (params.nonEmpty) s"$dt(${params.mkString(",")})" else dt
15091510
throw new ParseException(s"DataType $dtStr is not supported.", ctx)

sql/catalyst/src/main/scala/org/apache/spark/sql/types/NullType.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,11 @@ class NullType private() extends DataType {
3333
override def defaultSize: Int = 1
3434

3535
private[spark] override def asNullable: NullType = this
36+
37+
/**
38+
* Readable string representation for NULL type.
39+
*/
40+
override def simpleString: String = "void"
3641
}
3742

3843
/**

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,7 @@ class DataTypeParserSuite extends SparkFunSuite {
5858
checkDataType("varchAr(20)", StringType)
5959
checkDataType("cHaR(27)", StringType)
6060
checkDataType("BINARY", BinaryType)
61+
checkDataType("void", NullType)
6162

6263
checkDataType("array<doublE>", ArrayType(DoubleType, true))
6364
checkDataType("Array<map<int, tinYint>>", ArrayType(MapType(IntegerType, ByteType, true), true))

sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1928,4 +1928,17 @@ class HiveDDLSuite
19281928
}
19291929
}
19301930
}
1931+
1932+
test("SPARK-20680: Spark-sql do not support for void column datatype of view") {
1933+
withTable("t", "tabNullType") {
1934+
val client = spark.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog].client
1935+
client.runSqlHive("CREATE TABLE t (t1 int)")
1936+
client.runSqlHive("INSERT INTO t VALUES (3)")
1937+
client.runSqlHive("CREATE TABLE tabNullType AS SELECT NULL AS col FROM t")
1938+
checkAnswer(spark.table("tabNullType"), Row(null))
1939+
// table description shows "void" representation for NULL type.
1940+
val desc = spark.sql("DESC tabNullType").collect().toSeq
1941+
assert(desc.contains(Row("col", "void", null)))
1942+
}
1943+
}
19311944
}

0 commit comments

Comments
 (0)