From e13326d341ca86bff33cb200cc52630875fee0f0 Mon Sep 17 00:00:00 2001 From: shivsood Date: Tue, 29 Oct 2019 18:55:44 -0700 Subject: [PATCH 1/5] Fixed ShortType wrongly set as Int in JDBCUtils.scala. Added test case write/read tables from dataframe with colms as shorttype --- .../jdbc/MsSqlServerIntegrationSuite.scala | 21 +++++++++++++++++++ .../datasources/jdbc/JdbcUtils.scala | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala index efd7ca74c796..6e6da7cb7cc7 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala @@ -202,4 +202,25 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { df2.write.jdbc(jdbcUrl, "datescopy", new Properties) df3.write.jdbc(jdbcUrl, "stringscopy", new Properties) } + + test("Write tables with ShortType") { + import testImplicits._ + val df = Seq(-32768.toShort, 0.toShort, 1.toShort, 38.toShort, 32768.toShort).toDF("a") + val tablename = "shorttable" + df.write + .format("jdbc") + .mode("overwrite") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .save() + val df2 = spark.read + .format("jdbc") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .load() + assert(df.count == df2.count) + val rows = df2.collect() + val colType = rows(0).toSeq.map(x => x.getClass.toString) + assert(colType(0) == "class java.lang.Short") + } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala index 55ca4e3624bd..b8d79f029c67 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala @@ -546,7 +546,7 @@ object JdbcUtils extends Logging { case ShortType => (stmt: PreparedStatement, row: Row, pos: Int) => - stmt.setInt(pos + 1, row.getShort(pos)) + stmt.setShort(pos + 1, row.getShort(pos)) case ByteType => (stmt: PreparedStatement, row: Row, pos: Int) => From 395fe995bc42c083a187c81b972322462f662557 Mon Sep 17 00:00:00 2001 From: shivsood Date: Fri, 1 Nov 2019 14:48:59 -0700 Subject: [PATCH 2/5] fix for ShortType and ByteType in JDBCUtils.scala, Unit test cases in JDBCWriterSuite and E2E test case in dockers integration suite --- .../jdbc/MsSqlServerIntegrationSuite.scala | 27 ++++++++-- .../sql/jdbc/MySQLIntegrationSuite.scala | 46 ++++++++++++++++- .../sql/jdbc/PostgresIntegrationSuite.scala | 42 ++++++++++++++++ .../datasources/jdbc/JdbcUtils.scala | 10 ++-- .../spark/sql/jdbc/JDBCWriteSuite.scala | 50 +++++++++++++++++++ 5 files changed, 165 insertions(+), 10 deletions(-) diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala index 6e6da7cb7cc7..062f3a160fdd 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala @@ -59,7 +59,7 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { """ |INSERT INTO numbers VALUES ( |0, - |255, 32767, 2147483647, 9223372036854775807, + |127, 32767, 2147483647, 9223372036854775807, |123456789012345.123456789012345, 123456789012345.123456789012345, |123456789012345.123456789012345, |123, 12345.12, @@ -119,7 +119,7 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { val types = row.toSeq.map(x => x.getClass.toString) assert(types.length == 12) assert(types(0).equals("class java.lang.Boolean")) - assert(types(1).equals("class java.lang.Integer")) + assert(types(1).equals("class java.lang.Byte")) assert(types(2).equals("class java.lang.Short")) assert(types(3).equals("class java.lang.Integer")) assert(types(4).equals("class java.lang.Long")) @@ -131,7 +131,7 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { assert(types(10).equals("class java.math.BigDecimal")) assert(types(11).equals("class java.math.BigDecimal")) assert(row.getBoolean(0) == false) - assert(row.getInt(1) == 255) + assert(row.getByte(1) == 127) assert(row.getShort(2) == 32767) assert(row.getInt(3) == 2147483647) assert(row.getLong(4) == 9223372036854775807L) @@ -223,4 +223,25 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { val colType = rows(0).toSeq.map(x => x.getClass.toString) assert(colType(0) == "class java.lang.Short") } + + test("Write tables with ByteType") { + import testImplicits._ + val df = Seq(-127.toByte, 0.toByte, 1.toByte, 38.toByte, 128.toByte).toDF("a") + val tablename = "bytetable" + df.write + .format("jdbc") + .mode("overwrite") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .save() + val df2 = spark.read + .format("jdbc") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .load() + assert(df.count == df2.count) + val rows = df2.collect() + val colType = rows(0).toSeq.map(x => x.getClass.toString) + assert(colType(0) == "class java.lang.Byte") + } } diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala index bba1b5275269..e9465de6ad2d 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala @@ -84,7 +84,7 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite { assert(types.length == 9) assert(types(0).equals("class java.lang.Boolean")) assert(types(1).equals("class java.lang.Long")) - assert(types(2).equals("class java.lang.Integer")) + assert(types(2).equals("class java.lang.Short")) assert(types(3).equals("class java.lang.Integer")) assert(types(4).equals("class java.lang.Integer")) assert(types(5).equals("class java.lang.Long")) @@ -93,7 +93,7 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite { assert(types(8).equals("class java.lang.Double")) assert(rows(0).getBoolean(0) == false) assert(rows(0).getLong(1) == 0x225) - assert(rows(0).getInt(2) == 17) + assert(rows(0).getShort(2) == 17) assert(rows(0).getInt(3) == 77777) assert(rows(0).getInt(4) == 123456789) assert(rows(0).getLong(5) == 123456789012345L) @@ -181,4 +181,46 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite { """.stripMargin.replaceAll("\n", " ")) assert(sql("select x, y from queryOption").collect.toSet == expectedResult) } + + test("Write tables with ShortType") { + import testImplicits._ + val df = Seq(-32768.toShort, 0.toShort, 1.toShort, 38.toShort, 32768.toShort).toDF("a") + val tablename = "shorttable" + df.write + .format("jdbc") + .mode("overwrite") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .save() + val df2 = spark.read + .format("jdbc") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .load() + assert(df.count == df2.count) + val rows = df2.collect() + val colType = rows(0).toSeq.map(x => x.getClass.toString) + assert(colType(0) == "class java.lang.Short") + } + + test("Write tables with ByteType") { + import testImplicits._ + val df = Seq(-127.toByte, 0.toByte, 1.toByte, 38.toByte, 128.toByte).toDF("a") + val tablename = "bytetable" + df.write + .format("jdbc") + .mode("overwrite") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .save() + val df2 = spark.read + .format("jdbc") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .load() + assert(df.count == df2.count) + val rows = df2.collect() + val colType = rows(0).toSeq.map(x => x.getClass.toString) + assert(colType(0) == "class java.lang.Byte") + } } diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala index 599f00def075..d55a99ea3b39 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala @@ -219,4 +219,46 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite { assert(rows(0).getShort(0) === 1) assert(rows(0).getShort(1) === 2) } + + test("Write tables with ShortType") { + import testImplicits._ + val df = Seq(-32768.toShort, 0.toShort, 1.toShort, 38.toShort, 32768.toShort).toDF("a") + val tablename = "shorttable" + df.write + .format("jdbc") + .mode("overwrite") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .save() + val df2 = spark.read + .format("jdbc") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .load() + assert(df.count == df2.count) + val rows = df2.collect() + val colType = rows(0).toSeq.map(x => x.getClass.toString) + assert(colType(0) == "class java.lang.Short") + } + + test("Write tables with ByteType") { + import testImplicits._ + val df = Seq(-127.toByte, 0.toByte, 1.toByte, 38.toByte, 128.toByte).toDF("a") + val tablename = "bytetable" + df.write + .format("jdbc") + .mode("overwrite") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .save() + val df2 = spark.read + .format("jdbc") + .option("url", jdbcUrl) + .option("dbtable", tablename) + .load() + assert(df.count == df2.count) + val rows = df2.collect() + val colType = rows(0).toSeq.map(x => x.getClass.toString) + assert(colType(0) == "class java.lang.Short") + } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala index b8d79f029c67..2ccb9315ebc6 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala @@ -170,8 +170,8 @@ object JdbcUtils extends Logging { case LongType => Option(JdbcType("BIGINT", java.sql.Types.BIGINT)) case DoubleType => Option(JdbcType("DOUBLE PRECISION", java.sql.Types.DOUBLE)) case FloatType => Option(JdbcType("REAL", java.sql.Types.FLOAT)) - case ShortType => Option(JdbcType("INTEGER", java.sql.Types.SMALLINT)) - case ByteType => Option(JdbcType("BYTE", java.sql.Types.TINYINT)) + case ShortType => Option(JdbcType("SMALLINT", java.sql.Types.SMALLINT)) + case ByteType => Option(JdbcType("TINYINT", java.sql.Types.TINYINT)) case BooleanType => Option(JdbcType("BIT(1)", java.sql.Types.BIT)) case StringType => Option(JdbcType("TEXT", java.sql.Types.CLOB)) case BinaryType => Option(JdbcType("BLOB", java.sql.Types.BLOB)) @@ -235,7 +235,7 @@ object JdbcUtils extends Logging { case java.sql.Types.REF => StringType case java.sql.Types.REF_CURSOR => null case java.sql.Types.ROWID => LongType - case java.sql.Types.SMALLINT => IntegerType + case java.sql.Types.SMALLINT => ShortType case java.sql.Types.SQLXML => StringType case java.sql.Types.STRUCT => StringType case java.sql.Types.TIME => TimestampType @@ -244,7 +244,7 @@ object JdbcUtils extends Logging { case java.sql.Types.TIMESTAMP => TimestampType case java.sql.Types.TIMESTAMP_WITH_TIMEZONE => null - case java.sql.Types.TINYINT => IntegerType + case java.sql.Types.TINYINT => ByteType case java.sql.Types.VARBINARY => BinaryType case java.sql.Types.VARCHAR => StringType case _ => @@ -550,7 +550,7 @@ object JdbcUtils extends Logging { case ByteType => (stmt: PreparedStatement, row: Row, pos: Int) => - stmt.setInt(pos + 1, row.getByte(pos)) + stmt.setByte(pos + 1, row.getByte(pos)) case BooleanType => (stmt: PreparedStatement, row: Row, pos: Int) => diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala index 8021ef1a17a1..a4ebab3c8cdc 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala @@ -89,6 +89,8 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter { } private lazy val arr2x2 = Array[Row](Row.apply("dave", 42), Row.apply("mary", 222)) + private lazy val shortarr2x2 = Array[Row](Row.apply("dave", 42.toShort), Row.apply("mary", 31.toShort)) + private lazy val bytearr2x2 = Array[Row](Row.apply("dave", 42.toByte), Row.apply("mary", 31.toByte)) private lazy val arr1x2 = Array[Row](Row.apply("fred", 3)) private lazy val schema2 = StructType( StructField("name", StringType) :: @@ -104,6 +106,14 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter { StructField("NAME", StringType) :: StructField("ID", IntegerType) :: Nil) + private lazy val schema5 = StructType( + StructField("NAME", StringType) :: + StructField("ID", ShortType) :: Nil) + + private lazy val schema6 = StructType( + StructField("NAME", StringType) :: + StructField("ID", ByteType) :: Nil) + test("Basic CREATE") { val df = spark.createDataFrame(sparkContext.parallelize(arr2x2), schema2) @@ -574,6 +584,46 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter { } } + test("test writing table with ShortType") { + val df = spark.createDataFrame(sparkContext.parallelize(shortarr2x2), schema5) + val tablename = "shorttable" + df.write + .format("jdbc") + .mode("overwrite") + .option("url", url) + .option("dbtable", tablename) + .save() + val df2 = spark.read + .format("jdbc") + .option("url", url) + .option("dbtable", tablename) + .load() + assert(df.count == df2.count) + val rows = df2.collect() + val colType = rows(0).toSeq.map(x => x.getClass.toString) + assert(colType(1) == "class java.lang.Short") + } + + test("test writing table with ByteType") { + val df = spark.createDataFrame(sparkContext.parallelize(bytearr2x2), schema6) + val tablename = "bytetable" + df.write + .format("jdbc") + .mode("overwrite") + .option("url", url) + .option("dbtable", tablename) + .save() + val df2 = spark.read + .format("jdbc") + .option("url", url) + .option("dbtable", tablename) + .load() + assert(df.count == df2.count) + val rows = df2.collect() + val colType = rows(0).toSeq.map(x => x.getClass.toString) + assert(colType(1) == "class java.lang.Byte") + } + private def runAndVerifyRecordsWritten(expected: Long)(job: => Unit): Unit = { assert(expected === runAndReturnMetrics(job, _.taskMetrics.outputMetrics.recordsWritten)) } From 66dfd4c37abcb4944ce20c6276d1ac13ad9e2191 Mon Sep 17 00:00:00 2001 From: shivsood Date: Fri, 1 Nov 2019 16:37:29 -0700 Subject: [PATCH 3/5] test fixes --- .../spark/sql/jdbc/JDBCWriteSuite.scala | 24 +++++++------------ 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala index a4ebab3c8cdc..047562dfe7e5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala @@ -89,8 +89,6 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter { } private lazy val arr2x2 = Array[Row](Row.apply("dave", 42), Row.apply("mary", 222)) - private lazy val shortarr2x2 = Array[Row](Row.apply("dave", 42.toShort), Row.apply("mary", 31.toShort)) - private lazy val bytearr2x2 = Array[Row](Row.apply("dave", 42.toByte), Row.apply("mary", 31.toByte)) private lazy val arr1x2 = Array[Row](Row.apply("fred", 3)) private lazy val schema2 = StructType( StructField("name", StringType) :: @@ -106,14 +104,6 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter { StructField("NAME", StringType) :: StructField("ID", IntegerType) :: Nil) - private lazy val schema5 = StructType( - StructField("NAME", StringType) :: - StructField("ID", ShortType) :: Nil) - - private lazy val schema6 = StructType( - StructField("NAME", StringType) :: - StructField("ID", ByteType) :: Nil) - test("Basic CREATE") { val df = spark.createDataFrame(sparkContext.parallelize(arr2x2), schema2) @@ -584,8 +574,9 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter { } } - test("test writing table with ShortType") { - val df = spark.createDataFrame(sparkContext.parallelize(shortarr2x2), schema5) + test("Write tables with ShortType") { + import testImplicits._ + val df = Seq(-32768.toShort, 0.toShort, 1.toShort, 38.toShort, 32768.toShort).toDF("a") val tablename = "shorttable" df.write .format("jdbc") @@ -601,11 +592,12 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter { assert(df.count == df2.count) val rows = df2.collect() val colType = rows(0).toSeq.map(x => x.getClass.toString) - assert(colType(1) == "class java.lang.Short") + assert(colType(0) == "class java.lang.Short") } - test("test writing table with ByteType") { - val df = spark.createDataFrame(sparkContext.parallelize(bytearr2x2), schema6) + test("Write tables with ByteType") { + import testImplicits._ + val df = Seq(-127.toByte, 0.toByte, 1.toByte, 38.toByte, 128.toByte).toDF("a") val tablename = "bytetable" df.write .format("jdbc") @@ -621,7 +613,7 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter { assert(df.count == df2.count) val rows = df2.collect() val colType = rows(0).toSeq.map(x => x.getClass.toString) - assert(colType(1) == "class java.lang.Byte") + assert(colType(0) == "class java.lang.Byte") } private def runAndVerifyRecordsWritten(expected: Long)(job: => Unit): Unit = { From 9cdad2b0a22d47300874f1a47ca22738d5a2e467 Mon Sep 17 00:00:00 2001 From: shivsood Date: Fri, 8 Nov 2019 16:01:13 -0800 Subject: [PATCH 4/5] fixed test failure in JDBCSuite.scala to get tinyint and smallint as getByte and getShort instead of getInt. Added PR number to test title as suggested --- .../apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala | 4 ++-- .../org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala | 4 ++-- .../org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala | 4 ++-- .../src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala | 4 ++-- .../test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala | 4 ++-- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala index 062f3a160fdd..f1cd3343b792 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MsSqlServerIntegrationSuite.scala @@ -203,7 +203,7 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { df3.write.jdbc(jdbcUrl, "stringscopy", new Properties) } - test("Write tables with ShortType") { + test("SPARK-29644: Write tables with ShortType") { import testImplicits._ val df = Seq(-32768.toShort, 0.toShort, 1.toShort, 38.toShort, 32768.toShort).toDF("a") val tablename = "shorttable" @@ -224,7 +224,7 @@ class MsSqlServerIntegrationSuite extends DockerJDBCIntegrationSuite { assert(colType(0) == "class java.lang.Short") } - test("Write tables with ByteType") { + test("SPARK-29644: Write tables with ByteType") { import testImplicits._ val df = Seq(-127.toByte, 0.toByte, 1.toByte, 38.toByte, 128.toByte).toDF("a") val tablename = "bytetable" diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala index e9465de6ad2d..1d6186daa499 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala @@ -182,7 +182,7 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite { assert(sql("select x, y from queryOption").collect.toSet == expectedResult) } - test("Write tables with ShortType") { + test("SPARK-29644: Write tables with ShortType") { import testImplicits._ val df = Seq(-32768.toShort, 0.toShort, 1.toShort, 38.toShort, 32768.toShort).toDF("a") val tablename = "shorttable" @@ -203,7 +203,7 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite { assert(colType(0) == "class java.lang.Short") } - test("Write tables with ByteType") { + test("SPARK-29644: Write tables with ByteType") { import testImplicits._ val df = Seq(-127.toByte, 0.toByte, 1.toByte, 38.toByte, 128.toByte).toDF("a") val tablename = "bytetable" diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala index d55a99ea3b39..0a1fae73232b 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala @@ -220,7 +220,7 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite { assert(rows(0).getShort(1) === 2) } - test("Write tables with ShortType") { + test("SPARK-29644: Write tables with ShortType") { import testImplicits._ val df = Seq(-32768.toShort, 0.toShort, 1.toShort, 38.toShort, 32768.toShort).toDF("a") val tablename = "shorttable" @@ -241,7 +241,7 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite { assert(colType(0) == "class java.lang.Short") } - test("Write tables with ByteType") { + test("SPARK-29644: Write tables with ByteType") { import testImplicits._ val df = Seq(-127.toByte, 0.toByte, 1.toByte, 38.toByte, 128.toByte).toDF("a") val tablename = "bytetable" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala index 715534b0458d..63b809eabf19 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala @@ -578,8 +578,8 @@ class JDBCSuite extends QueryTest assert(rows.length === 1) assert(rows(0).getInt(0) === 1) assert(rows(0).getBoolean(1) === false) - assert(rows(0).getInt(2) === 3) - assert(rows(0).getInt(3) === 4) + assert(rows(0).getByte(2) === 3.toByte) + assert(rows(0).getShort(3) === 4.toShort) assert(rows(0).getLong(4) === 1234567890123L) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala index 047562dfe7e5..f43776ad9198 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCWriteSuite.scala @@ -574,7 +574,7 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter { } } - test("Write tables with ShortType") { + test("SPARK-29644: Write tables with ShortType") { import testImplicits._ val df = Seq(-32768.toShort, 0.toShort, 1.toShort, 38.toShort, 32768.toShort).toDF("a") val tablename = "shorttable" @@ -595,7 +595,7 @@ class JDBCWriteSuite extends SharedSparkSession with BeforeAndAfter { assert(colType(0) == "class java.lang.Short") } - test("Write tables with ByteType") { + test("SPARK-29644: Write tables with ByteType") { import testImplicits._ val df = Seq(-127.toByte, 0.toByte, 1.toByte, 38.toByte, 128.toByte).toDF("a") val tablename = "bytetable" From 06008946964cc7acb8df3ff781513e4577dd2116 Mon Sep 17 00:00:00 2001 From: shivsood Date: Wed, 13 Nov 2019 14:08:04 -0800 Subject: [PATCH 5/5] removed redundant test cases. --- .../sql/jdbc/MySQLIntegrationSuite.scala | 42 ------------------- .../sql/jdbc/PostgresIntegrationSuite.scala | 42 ------------------- 2 files changed, 84 deletions(-) diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala index 1d6186daa499..8401b0a8a752 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/MySQLIntegrationSuite.scala @@ -181,46 +181,4 @@ class MySQLIntegrationSuite extends DockerJDBCIntegrationSuite { """.stripMargin.replaceAll("\n", " ")) assert(sql("select x, y from queryOption").collect.toSet == expectedResult) } - - test("SPARK-29644: Write tables with ShortType") { - import testImplicits._ - val df = Seq(-32768.toShort, 0.toShort, 1.toShort, 38.toShort, 32768.toShort).toDF("a") - val tablename = "shorttable" - df.write - .format("jdbc") - .mode("overwrite") - .option("url", jdbcUrl) - .option("dbtable", tablename) - .save() - val df2 = spark.read - .format("jdbc") - .option("url", jdbcUrl) - .option("dbtable", tablename) - .load() - assert(df.count == df2.count) - val rows = df2.collect() - val colType = rows(0).toSeq.map(x => x.getClass.toString) - assert(colType(0) == "class java.lang.Short") - } - - test("SPARK-29644: Write tables with ByteType") { - import testImplicits._ - val df = Seq(-127.toByte, 0.toByte, 1.toByte, 38.toByte, 128.toByte).toDF("a") - val tablename = "bytetable" - df.write - .format("jdbc") - .mode("overwrite") - .option("url", jdbcUrl) - .option("dbtable", tablename) - .save() - val df2 = spark.read - .format("jdbc") - .option("url", jdbcUrl) - .option("dbtable", tablename) - .load() - assert(df.count == df2.count) - val rows = df2.collect() - val colType = rows(0).toSeq.map(x => x.getClass.toString) - assert(colType(0) == "class java.lang.Byte") - } } diff --git a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala index 0a1fae73232b..599f00def075 100644 --- a/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala +++ b/external/docker-integration-tests/src/test/scala/org/apache/spark/sql/jdbc/PostgresIntegrationSuite.scala @@ -219,46 +219,4 @@ class PostgresIntegrationSuite extends DockerJDBCIntegrationSuite { assert(rows(0).getShort(0) === 1) assert(rows(0).getShort(1) === 2) } - - test("SPARK-29644: Write tables with ShortType") { - import testImplicits._ - val df = Seq(-32768.toShort, 0.toShort, 1.toShort, 38.toShort, 32768.toShort).toDF("a") - val tablename = "shorttable" - df.write - .format("jdbc") - .mode("overwrite") - .option("url", jdbcUrl) - .option("dbtable", tablename) - .save() - val df2 = spark.read - .format("jdbc") - .option("url", jdbcUrl) - .option("dbtable", tablename) - .load() - assert(df.count == df2.count) - val rows = df2.collect() - val colType = rows(0).toSeq.map(x => x.getClass.toString) - assert(colType(0) == "class java.lang.Short") - } - - test("SPARK-29644: Write tables with ByteType") { - import testImplicits._ - val df = Seq(-127.toByte, 0.toByte, 1.toByte, 38.toByte, 128.toByte).toDF("a") - val tablename = "bytetable" - df.write - .format("jdbc") - .mode("overwrite") - .option("url", jdbcUrl) - .option("dbtable", tablename) - .save() - val df2 = spark.read - .format("jdbc") - .option("url", jdbcUrl) - .option("dbtable", tablename) - .load() - assert(df.count == df2.count) - val rows = df2.collect() - val colType = rows(0).toSeq.map(x => x.getClass.toString) - assert(colType(0) == "class java.lang.Short") - } }