diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json index 9d35b1a1a69dd..c4b59799f88d5 100644 --- a/core/src/main/resources/error/error-classes.json +++ b/core/src/main/resources/error/error-classes.json @@ -78,7 +78,7 @@ }, "DIVIDE_BY_ZERO" : { "message" : [ - "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set to \"false\" (except for ANSI interval type) to bypass this error." + "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set to \"false\" to bypass this error." ], "sqlState" : "22012" }, @@ -210,6 +210,12 @@ "" ] }, + "INTERVAL_DIVIDED_BY_ZERO" : { + "message" : [ + "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead." + ], + "sqlState" : "22012" + }, "INVALID_ARRAY_INDEX" : { "message" : [ "The index is out of bounds. The array has elements. Use `try_element_at` and increase the array index by 1(the starting array index is 1 for `try_element_at`) to tolerate accessing element at invalid index and return NULL instead. If necessary set to \"false\" to bypass this error." diff --git a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala index 4b14e2402a73b..76d7e3048d79a 100644 --- a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala +++ b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala @@ -168,7 +168,7 @@ class SparkThrowableSuite extends SparkFunSuite { "[DIVIDE_BY_ZERO] Division by zero. " + "Use `try_divide` to tolerate divisor being 0 and return NULL instead. " + "If necessary set foo to \"false\" " + - "(except for ANSI interval type) to bypass this error.") + "to bypass this error.") } test("Error message is formatted") { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala index 0a275d0760f61..17a2714c61188 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala @@ -617,18 +617,22 @@ trait IntervalDivide { num: Any, context: Option[SQLQueryContext]): Unit = dataType match { case _: DecimalType => - if (num.asInstanceOf[Decimal].isZero) throw QueryExecutionErrors.divideByZeroError(context) - case _ => if (num == 0) throw QueryExecutionErrors.divideByZeroError(context) + if (num.asInstanceOf[Decimal].isZero) { + throw QueryExecutionErrors.intervalDividedByZeroError(context) + } + case _ => if (num == 0) throw QueryExecutionErrors.intervalDividedByZeroError(context) } def divideByZeroCheckCodegen( dataType: DataType, value: String, errorContextReference: String): String = dataType match { + // scalastyle:off line.size.limit case _: DecimalType => - s"if ($value.isZero()) throw QueryExecutionErrors.divideByZeroError($errorContextReference);" + s"if ($value.isZero()) throw QueryExecutionErrors.intervalDividedByZeroError($errorContextReference);" case _ => - s"if ($value == 0) throw QueryExecutionErrors.divideByZeroError($errorContextReference);" + s"if ($value == 0) throw QueryExecutionErrors.intervalDividedByZeroError($errorContextReference);" + // scalastyle:on line.size.limit } } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala index de486157cbb9d..b4695062c085a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala @@ -733,7 +733,7 @@ object IntervalUtils { * @throws ArithmeticException if the result overflows any field value or divided by zero */ def divideExact(interval: CalendarInterval, num: Double): CalendarInterval = { - if (num == 0) throw QueryExecutionErrors.divideByZeroError(None) + if (num == 0) throw QueryExecutionErrors.intervalDividedByZeroError(None) fromDoubles(interval.months / num, interval.days / num, interval.microseconds / num) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 80918a9d8ba9a..35a40ce684f35 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -204,6 +204,14 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase { summary = getSummary(context)) } + def intervalDividedByZeroError(context: Option[SQLQueryContext]): ArithmeticException = { + new SparkArithmeticException( + errorClass = "INTERVAL_DIVIDED_BY_ZERO", + messageParameters = Array.empty, + context = context, + summary = getSummary(context)) + } + def invalidArrayIndexError( index: Int, numElements: Int, diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index d79cc37ab78df..e96ab297d2af2 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -225,7 +225,7 @@ select interval '2 seconds' / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" (except for ANSI interval type) to bypass this error. +[INTERVAL_DIVIDED_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. == SQL(line 1, position 8) == select interval '2 seconds' / 0 ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -261,7 +261,7 @@ select interval '2' year / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" (except for ANSI interval type) to bypass this error. +[INTERVAL_DIVIDED_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. == SQL(line 1, position 8) == select interval '2' year / 0 ^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 652e1e724b9a8..53172283d1245 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -201,7 +201,7 @@ select interval '2 seconds' / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" (except for ANSI interval type) to bypass this error. +[INTERVAL_DIVIDED_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. == SQL(line 1, position 8) == select interval '2 seconds' / 0 ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -237,7 +237,7 @@ select interval '2' year / 0 struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" (except for ANSI interval type) to bypass this error. +[INTERVAL_DIVIDED_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. == SQL(line 1, position 8) == select interval '2' year / 0 ^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out index 8da1a1ca13956..664263ee8e736 100755 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out @@ -647,7 +647,7 @@ select bigint('9223372036854775800') / bigint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" (except for ANSI interval type) to bypass this error. +[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 8) == select bigint('9223372036854775800') / bigint('0') ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -659,7 +659,7 @@ select bigint('-9223372036854775808') / smallint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" (except for ANSI interval type) to bypass this error. +[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 8) == select bigint('-9223372036854775808') / smallint('0') ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -671,7 +671,7 @@ select smallint('100') / bigint('0') struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" (except for ANSI interval type) to bypass this error. +[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 8) == select smallint('100') / bigint('0') ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out index 4b8945033dfef..87e0abb285d85 100644 --- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out @@ -174,7 +174,7 @@ SELECT 1 AS one FROM test_having WHERE 1/a = 1 HAVING 1 < 2 struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" (except for ANSI interval type) to bypass this error. +[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 40) == ...1 AS one FROM test_having WHERE 1/a = 1 HAVING 1 < 2 ^^^ diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out index 5e7a9b96a82dd..a16887457c9df 100644 --- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out @@ -174,7 +174,7 @@ SELECT 1 AS one FROM test_having WHERE 1/udf(a) = 1 HAVING 1 < 2 struct<> -- !query output org.apache.spark.SparkArithmeticException -[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" (except for ANSI interval type) to bypass this error. +[DIVIDE_BY_ZERO] Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set "spark.sql.ansi.enabled" to "false" to bypass this error. == SQL(line 1, position 40) == ...1 AS one FROM test_having WHERE 1/udf(a) = 1 HAVING 1 < 2 ^^^^^^^^ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala index 8d7359e449d74..36349c5e1f284 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala @@ -48,7 +48,7 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase msg = "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. " + "If necessary set " + - s"""$ansiConf to "false" (except for ANSI interval type) to bypass this error.""" + + s"""$ansiConf to "false" to bypass this error.""" + """ |== SQL(line 1, position 8) == |select 6/0 @@ -57,6 +57,16 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase sqlState = Some("22012")) } + test("INTERVAL_DIVIDED_BY_ZERO: interval divided by zero") { + checkError( + exception = intercept[SparkArithmeticException] { + sql("select interval 1 day / 0").collect() + }, + errorClass = "INTERVAL_DIVIDED_BY_ZERO", + parameters = Map.empty + ) + } + test("INVALID_FRACTION_OF_SECOND: in the function make_timestamp") { checkError( exception = intercept[SparkDateTimeException] {