From 865e0af572edad7fd775c25e317055ffa0df2a08 Mon Sep 17 00:00:00 2001 From: Yuming Wang Date: Sun, 9 Sep 2018 12:22:29 +0800 Subject: [PATCH] Fix InferFiltersFromConstraintsSuite test error --- .../catalyst/optimizer/InferFiltersFromConstraintsSuite.scala | 2 +- .../src/test/scala/org/apache/spark/sql/DataFrameSuite.scala | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/InferFiltersFromConstraintsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/InferFiltersFromConstraintsSuite.scala index e4671f0d1cce6..a40ba2dc38b70 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/InferFiltersFromConstraintsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/InferFiltersFromConstraintsSuite.scala @@ -196,7 +196,7 @@ class InferFiltersFromConstraintsSuite extends PlanTest { test("constraints should be inferred from aliased literals") { val originalLeft = testRelation.subquery('left).as("left") - val optimizedLeft = testRelation.subquery('left).where(IsNotNull('a) && 'a === 2).as("left") + val optimizedLeft = testRelation.subquery('left).where(IsNotNull('a) && 'a <=> 2).as("left") val right = Project(Seq(Literal(2).as("two")), testRelation.subquery('right)).as("right") val condition = Some("left.a".attr === "right.two".attr) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index 139d3021bfa19..435b887cb3c78 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -2553,7 +2553,6 @@ class DataFrameSuite extends QueryTest with SharedSQLContext { } test("SPARK-25368 Incorrect predicate pushdown returns wrong result") { - // This test must disable ConvertToLocalRelation in the test cases, see: SPARK-25267 def check(newCol: Column, filter: Column, result: Seq[Row]): Unit = { val df1 = spark.createDataFrame(Seq( (1, 1)