diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala index c383eec3d56b4..5e8113ac8658e 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala @@ -346,6 +346,16 @@ class CodeGenerationSuite extends SparkFunSuite with ExpressionEvalHelper { projection(row) } + test("SPARK-22226: splitExpressions should not generate codes beyond 64KB") { + val colNumber = 10000 + val attrs = (1 to colNumber).map(colIndex => AttributeReference(s"_$colIndex", IntegerType)()) + val lit = Literal(1000) + val exprs = attrs.flatMap { a => + Seq(If(lit < a, lit, a), sqrt(a)) + } + UnsafeProjection.create(exprs, attrs) + } + test("SPARK-22543: split large predicates into blocks due to JVM code size limit") { val length = 600 diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index 279b7b8d49f52..c0b277f76ae68 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -2408,18 +2408,6 @@ class DataFrameSuite extends QueryTest with SharedSQLContext { Seq(Row(7, 1, 1), Row(7, 1, 2), Row(7, 2, 1), Row(7, 2, 2), Row(7, 3, 1), Row(7, 3, 2))) } - test("SPARK-22226: splitExpressions should not generate codes beyond 64KB") { - val colNumber = 10000 - val input = spark.range(2).rdd.map(_ => Row(1 to colNumber: _*)) - val df = sqlContext.createDataFrame(input, StructType( - (1 to colNumber).map(colIndex => StructField(s"_$colIndex", IntegerType, false)))) - val newCols = (1 to colNumber).flatMap { colIndex => - Seq(expr(s"if(1000 < _$colIndex, 1000, _$colIndex)"), - expr(s"sqrt(_$colIndex)")) - } - df.select(newCols: _*).collect() - } - test("SPARK-22271: mean overflows and returns null for some decimal variables") { val d = 0.034567890 val df = Seq(d, d, d, d, d, d, d, d, d, d).toDF("DecimalCol")