Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,13 @@ private[sql] class JDBCRDD(
conversions(i) match {
case BooleanConversion => mutableRow.setBoolean(i, rs.getBoolean(pos))
case DateConversion =>
mutableRow.update(i, DateUtils.fromJavaDate(rs.getDate(pos)))
// DateUtils.fromJavaDate does not handle null value, so we need to check it.
val dateVal = rs.getDate(pos)
if (dateVal != null) {
mutableRow.update(i, DateUtils.fromJavaDate(dateVal))
} else {
mutableRow.update(i, null)
}
case DecimalConversion =>
val decimalVal = rs.getBigDecimal(pos)
if (decimalVal == null) {
Expand Down
25 changes: 25 additions & 0 deletions sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,8 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
).executeUpdate()
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
+ "'1996-01-01', '2002-02-20 11:22:33.543543543')").executeUpdate()
conn.prepareStatement("insert into test.timetypes values ('12:34:56', "
+ "null, '2002-02-20 11:22:33.543543543')").executeUpdate()
conn.commit()
sql(
s"""
Expand All @@ -127,6 +129,23 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
|OPTIONS (url '$url', dbtable 'TEST.FLTTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\n", " "))

conn.prepareStatement(
s"""
|create table test.nulltypes (a INT, b BOOLEAN, c TINYINT, d BINARY(20), e VARCHAR(20),
|f VARCHAR_IGNORECASE(20), g CHAR(20), h BLOB, i CLOB, j TIME, k DATE, l TIMESTAMP,
|m DOUBLE, n REAL, o DECIMAL(40, 20))
""".stripMargin.replaceAll("\n", " ")).executeUpdate()
conn.prepareStatement("insert into test.nulltypes values ("
+ "null, null, null, null, null, null, null, null, null, "
+ "null, null, null, null, null, null)").executeUpdate()
conn.commit()
sql(
s"""
|CREATE TEMPORARY TABLE nulltypes
|USING org.apache.spark.sql.jdbc
|OPTIONS (url '$url', dbtable 'TEST.NULLTYPES', user 'testUser', password 'testPass')
""".stripMargin.replaceAll("\n", " "))

// Untested: IDENTITY, OTHER, UUID, ARRAY, and GEOMETRY types.
}

Expand Down Expand Up @@ -254,6 +273,7 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
val rows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.TIMETYPES").collect()
val cachedRows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.TIMETYPES").cache().collect()
assert(rows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
assert(rows(1).getAs[java.sql.Date](1) === null)
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
}

Expand All @@ -266,6 +286,11 @@ class JDBCSuite extends FunSuite with BeforeAndAfter {
assert(cachedRows(0).getAs[java.sql.Date](1) === java.sql.Date.valueOf("1996-01-01"))
}

test("test types for null value") {
val rows = TestSQLContext.jdbc(urlWithUserAndPass, "TEST.NULLTYPES").collect()
assert((0 to 14).forall(i => rows(0).isNullAt(i)))
}

test("H2 floating-point types") {
val rows = sql("SELECT * FROM flttypes").collect()
assert(rows(0).getDouble(0) === 1.00000000000000022) // Yes, I meant ==.
Expand Down