Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 19 additions & 4 deletions sql/core/src/main/scala/org/apache/spark/sql/jdbc/JDBCRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ private[sql] class JDBCRDD(
abstract class JDBCConversion
case object BooleanConversion extends JDBCConversion
case object DateConversion extends JDBCConversion
case object DecimalConversion extends JDBCConversion
case class DecimalConversion(precisionInfo: Option[(Int, Int)]) extends JDBCConversion
case object DoubleConversion extends JDBCConversion
case object FloatConversion extends JDBCConversion
case object IntegerConversion extends JDBCConversion
Expand All @@ -311,8 +311,8 @@ private[sql] class JDBCRDD(
schema.fields.map(sf => sf.dataType match {
case BooleanType => BooleanConversion
case DateType => DateConversion
case DecimalType.Unlimited => DecimalConversion
case DecimalType.Fixed(d) => DecimalConversion
case DecimalType.Unlimited => DecimalConversion(None)
case DecimalType.Fixed(d) => DecimalConversion(Some(d))
case DoubleType => DoubleConversion
case FloatType => FloatConversion
case IntegerType => IntegerConversion
Expand Down Expand Up @@ -369,7 +369,22 @@ private[sql] class JDBCRDD(
} else {
mutableRow.update(i, null)
}
case DecimalConversion =>
// When connecting with Oracle DB through JDBC, the precision and scale of BigDecimal
// object returned by ResultSet.getBigDecimal is not correctly matched to the table
// schema reported by ResultSetMetaData.getPrecision and ResultSetMetaData.getScale.
// If inserting values like 19999 into a column with NUMBER(12, 2) type, you get through
// a BigDecimal object with scale as 0. But the dataframe schema has correct type as
// DecimalType(12, 2). Thus, after saving the dataframe into parquet file and then
// retrieve it, you will get wrong result 199.99.
// So it is needed to set precision and scale for Decimal based on JDBC metadata.
case DecimalConversion(Some((p, s))) =>
val decimalVal = rs.getBigDecimal(pos)
if (decimalVal == null) {
mutableRow.update(i, null)
} else {
mutableRow.update(i, Decimal(decimalVal, p, s))
}
case DecimalConversion(None) =>
val decimalVal = rs.getBigDecimal(pos)
if (decimalVal == null) {
mutableRow.update(i, null)
Expand Down