diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala index 2d9bccc0854a3..a556ac9f12947 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala @@ -219,6 +219,12 @@ abstract class BinaryArithmetic extends BinaryOperator protected val evalMode: EvalMode.Value + private lazy val internalDataType: DataType = (left.dataType, right.dataType) match { + case (DecimalType.Fixed(p1, s1), DecimalType.Fixed(p2, s2)) => + resultDecimalType(p1, s1, p2, s2) + case _ => left.dataType + } + protected def failOnError: Boolean = evalMode match { // The TRY mode executes as if it would fail on errors, except that it would capture the errors // and return null results. @@ -234,11 +240,7 @@ abstract class BinaryArithmetic extends BinaryOperator case _ => super.checkInputDataTypes() } - override def dataType: DataType = (left.dataType, right.dataType) match { - case (DecimalType.Fixed(p1, s1), DecimalType.Fixed(p2, s2)) => - resultDecimalType(p1, s1, p2, s2) - case _ => left.dataType - } + override def dataType: DataType = internalDataType // When `spark.sql.decimalOperations.allowPrecisionLoss` is set to true, if the precision / scale // needed are out of the range of available values, the scale is reduced up to 6, in order to