diff --git a/core/src/main/scala-2.12/org/apache/spark/util/OrderingUtil.scala b/core/src/main/scala-2.12/org/apache/spark/util/OrderingUtil.scala deleted file mode 100644 index 193ae36f3c08..000000000000 --- a/core/src/main/scala-2.12/org/apache/spark/util/OrderingUtil.scala +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.util - -/** - * This class only exists to bridge the difference between Scala 2.12 and Scala 2.13's - * support for floating-point ordering. It is implemented separately for both as there - * is no method that exists in both for comparison. - * - * It functions like Ordering.Double in Scala 2.12. - */ -private[spark] object OrderingUtil { - - def compareDouble(x: Double, y: Double): Int = Ordering.Double.compare(x, y) - - def compareFloat(x: Float, y: Float): Int = Ordering.Float.compare(x, y) - -} diff --git a/core/src/main/scala-2.13/org/apache/spark/util/OrderingUtil.scala b/core/src/main/scala-2.13/org/apache/spark/util/OrderingUtil.scala deleted file mode 100644 index e86150667270..000000000000 --- a/core/src/main/scala-2.13/org/apache/spark/util/OrderingUtil.scala +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.spark.util - -/** - * This class only exists to bridge the difference between Scala 2.12 and Scala 2.13's - * support for floating-point ordering. It is implemented separately for both as there - * is no method that exists in both for comparison. - * - * It functions like Ordering.Double.TotalOrdering in Scala 2.13, which matches java.lang.Double - * rather than Scala 2.12's Ordering.Double in handling of NaN. - */ -private[spark] object OrderingUtil { - - def compareDouble(x: Double, y: Double): Int = Ordering.Double.TotalOrdering.compare(x, y) - - def compareFloat(x: Float, y: Float): Int = Ordering.Float.TotalOrdering.compare(x, y) - -} diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala index 723fbdf73f8d..317d4b640742 100644 --- a/core/src/main/scala/org/apache/spark/util/Utils.scala +++ b/core/src/main/scala/org/apache/spark/util/Utils.scala @@ -1744,34 +1744,6 @@ private[spark] object Utils extends Logging { hashAbs } - /** - * NaN-safe version of `java.lang.Double.compare()` which allows NaN values to be compared - * according to semantics where NaN == NaN and NaN is greater than any non-NaN double. - */ - def nanSafeCompareDoubles(x: Double, y: Double): Int = { - val xIsNan: Boolean = java.lang.Double.isNaN(x) - val yIsNan: Boolean = java.lang.Double.isNaN(y) - if ((xIsNan && yIsNan) || (x == y)) 0 - else if (xIsNan) 1 - else if (yIsNan) -1 - else if (x > y) 1 - else -1 - } - - /** - * NaN-safe version of `java.lang.Float.compare()` which allows NaN values to be compared - * according to semantics where NaN == NaN and NaN is greater than any non-NaN float. - */ - def nanSafeCompareFloats(x: Float, y: Float): Int = { - val xIsNan: Boolean = java.lang.Float.isNaN(x) - val yIsNan: Boolean = java.lang.Float.isNaN(y) - if ((xIsNan && yIsNan) || (x == y)) 0 - else if (xIsNan) 1 - else if (yIsNan) -1 - else if (x > y) 1 - else -1 - } - /** * Returns the system properties map that is thread-safe to iterator over. It gets the * properties which have been set explicitly, as well as those for which only a default value diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala index 7e4a40b60aac..8f8902e497d4 100644 --- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala @@ -849,36 +849,6 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with Logging { assert(buffer.toString === "st circular test circular") } - test("nanSafeCompareDoubles") { - def shouldMatchDefaultOrder(a: Double, b: Double): Unit = { - assert(Utils.nanSafeCompareDoubles(a, b) === JDouble.compare(a, b)) - assert(Utils.nanSafeCompareDoubles(b, a) === JDouble.compare(b, a)) - } - shouldMatchDefaultOrder(0d, 0d) - shouldMatchDefaultOrder(0d, 1d) - shouldMatchDefaultOrder(Double.MinValue, Double.MaxValue) - assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.NaN) === 0) - assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.PositiveInfinity) === 1) - assert(Utils.nanSafeCompareDoubles(Double.NaN, Double.NegativeInfinity) === 1) - assert(Utils.nanSafeCompareDoubles(Double.PositiveInfinity, Double.NaN) === -1) - assert(Utils.nanSafeCompareDoubles(Double.NegativeInfinity, Double.NaN) === -1) - } - - test("nanSafeCompareFloats") { - def shouldMatchDefaultOrder(a: Float, b: Float): Unit = { - assert(Utils.nanSafeCompareFloats(a, b) === JFloat.compare(a, b)) - assert(Utils.nanSafeCompareFloats(b, a) === JFloat.compare(b, a)) - } - shouldMatchDefaultOrder(0f, 0f) - shouldMatchDefaultOrder(1f, 1f) - shouldMatchDefaultOrder(Float.MinValue, Float.MaxValue) - assert(Utils.nanSafeCompareFloats(Float.NaN, Float.NaN) === 0) - assert(Utils.nanSafeCompareFloats(Float.NaN, Float.PositiveInfinity) === 1) - assert(Utils.nanSafeCompareFloats(Float.NaN, Float.NegativeInfinity) === 1) - assert(Utils.nanSafeCompareFloats(Float.PositiveInfinity, Float.NaN) === -1) - assert(Utils.nanSafeCompareFloats(Float.NegativeInfinity, Float.NaN) === -1) - } - test("isDynamicAllocationEnabled") { val conf = new SparkConf() conf.set("spark.master", "yarn") diff --git a/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala index bd1921d7b770..bb03f0d3cdc2 100644 --- a/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala +++ b/core/src/test/scala/org/apache/spark/util/collection/SorterSuite.scala @@ -23,7 +23,6 @@ import java.util.concurrent.TimeUnit import org.apache.spark.SparkFunSuite import org.apache.spark.internal.Logging -import org.apache.spark.util.OrderingUtil import org.apache.spark.util.Utils.timeIt import org.apache.spark.util.random.XORShiftRandom @@ -60,7 +59,7 @@ class SorterSuite extends SparkFunSuite with Logging { Arrays.sort(keys) new Sorter(new KVArraySortDataFormat[Double, Number]) - .sort(keyValueArray, 0, keys.length, OrderingUtil.compareDouble) + .sort(keyValueArray, 0, keys.length, (x, y) => java.lang.Double.compare(x, y)) keys.zipWithIndex.foreach { case (k, i) => assert(k === keyValueArray(2 * i)) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala index 7b8e92d665d0..88f3c9e92d64 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala @@ -625,8 +625,8 @@ class CodegenContext extends Logging { def genComp(dataType: DataType, c1: String, c2: String): String = dataType match { // java boolean doesn't support > or < operator case BooleanType => s"($c1 == $c2 ? 0 : ($c1 ? 1 : -1))" - case DoubleType => s"org.apache.spark.util.Utils.nanSafeCompareDoubles($c1, $c2)" - case FloatType => s"org.apache.spark.util.Utils.nanSafeCompareFloats($c1, $c2)" + case DoubleType => s"java.lang.Double.compare($c1, $c2)" + case FloatType => s"java.lang.Float.compare($c1, $c2)" // use c1 - c2 may overflow case dt: DataType if isPrimitiveType(dt) => s"($c1 > $c2 ? 1 : $c1 < $c2 ? -1 : 0)" case BinaryType => s"org.apache.spark.sql.catalyst.util.TypeUtils.compareBinary($c1, $c2)" diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala index ba0fe34a8c28..8a5cf63b12ed 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DoubleType.scala @@ -39,7 +39,7 @@ class DoubleType private() extends FractionalType { private[sql] val numeric = implicitly[Numeric[Double]] private[sql] val fractional = implicitly[Fractional[Double]] private[sql] val ordering = - (x: Double, y: Double) => Utils.nanSafeCompareDoubles(x, y) + (x: Double, y: Double) => java.lang.Double.compare(x, y) private[sql] val asIntegral = DoubleAsIfIntegral override private[sql] def exactNumeric = DoubleExactNumeric diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala index 9bd216ceb6f3..738291eaf776 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/FloatType.scala @@ -39,7 +39,7 @@ class FloatType private() extends FractionalType { private[sql] val numeric = implicitly[Numeric[Float]] private[sql] val fractional = implicitly[Fractional[Float]] private[sql] val ordering = - (x: Float, y: Float) => Utils.nanSafeCompareFloats(x, y) + (x: Float, y: Float) => java.lang.Float.compare(x, y) private[sql] val asIntegral = FloatAsIfIntegral override private[sql] def exactNumeric = FloatExactNumeric diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/numerics.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/numerics.scala index 71fed3e69b65..1ac85360f944 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/numerics.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/numerics.scala @@ -21,7 +21,6 @@ import scala.math.Numeric._ import scala.math.Ordering import org.apache.spark.sql.types.Decimal.DecimalIsConflicted -import org.apache.spark.util.OrderingUtil object ByteExactNumeric extends ByteIsIntegral with Ordering.ByteOrdering { private def checkOverflow(res: Int, x: Byte, y: Byte, op: String): Unit = { @@ -149,7 +148,7 @@ object FloatExactNumeric extends FloatIsFractional { } } - override def compare(x: Float, y: Float): Int = OrderingUtil.compareFloat(x, y) + override def compare(x: Float, y: Float): Int = java.lang.Float.compare(x, y) } object DoubleExactNumeric extends DoubleIsFractional { @@ -177,7 +176,7 @@ object DoubleExactNumeric extends DoubleIsFractional { } } - override def compare(x: Double, y: Double): Int = OrderingUtil.compareDouble(x, y) + override def compare(x: Double, y: Double): Int = java.lang.Double.compare(x, y) } object DecimalExactNumeric extends DecimalIsConflicted {