diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala index 23f05ce84667c..906acc1292006 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala @@ -17,6 +17,8 @@ package org.apache.spark.sql.catalyst.parser import org.apache.spark.sql.Row +import org.apache.spark.sql.catalyst.TableIdentifier +import org.apache.spark.sql.catalyst.analysis._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans._ import org.apache.spark.sql.catalyst.plans.logical._ @@ -426,4 +428,68 @@ class PlanParserSuite extends PlanTest { "Number of aliases must match the number of fields in an inline table.") intercept[ArrayIndexOutOfBoundsException](parsePlan("values (1, 'a'), (2, 'b', 5Y)")) } + + test("nesting UNION") { + val parsed = parsePlan( + """ + |SELECT `u_1`.`id` FROM (((SELECT `t0`.`id` FROM `default`.`t0`) + |UNION ALL (SELECT `t0`.`id` FROM `default`.`t0`)) UNION ALL + |(SELECT `t0`.`id` FROM `default`.`t0`)) AS u_1 + """.stripMargin) + + val expected = Project( + "u_1.id".attr :: Nil, + SubqueryAlias("u_1", + Union( + Union( + Project( + "t0.id".attr :: Nil, + UnresolvedRelation(TableIdentifier("t0", Some("default")), None)), + Project( + "t0.id".attr :: Nil, + UnresolvedRelation(TableIdentifier("t0", Some("default")), None))), + Project( + "t0.id".attr :: Nil, + UnresolvedRelation(TableIdentifier("t0", Some("default")), None))))) + + comparePlans(parsed, expected) + + val parsedSame = parsePlan( + """ + |SELECT `u_1`.`id` FROM ((SELECT `t0`.`id` FROM `default`.`t0`) + |UNION ALL (SELECT `t0`.`id` FROM `default`.`t0`) UNION ALL + |(SELECT `t0`.`id` FROM `default`.`t0`)) AS u_1 + """.stripMargin) + + comparePlans(parsedSame, expected) + + val parsed2 = parsePlan( + """ + |SELECT `u_1`.`id` FROM ((((SELECT `t0`.`id` FROM `default`.`t0`) + |UNION ALL (SELECT `t0`.`id` FROM `default`.`t0`)) UNION ALL + |(SELECT `t0`.`id` FROM `default`.`t0`)) + |UNION ALL (SELECT `t0`.`id` FROM `default`.`t0`)) AS u_1 + """.stripMargin) + + val expected2 = Project( + "u_1.id".attr :: Nil, + SubqueryAlias("u_1", + Union( + Union( + Union( + Project( + "t0.id".attr :: Nil, + UnresolvedRelation(TableIdentifier("t0", Some("default")), None)), + Project( + "t0.id".attr :: Nil, + UnresolvedRelation(TableIdentifier("t0", Some("default")), None))), + Project( + "t0.id".attr :: Nil, + UnresolvedRelation(TableIdentifier("t0", Some("default")), None))), + Project( + "t0.id".attr :: Nil, + UnresolvedRelation(TableIdentifier("t0", Some("default")), None))))) + + comparePlans(parsed2, expected2) + } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala index 79774f5913900..9f34729271756 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala @@ -1114,6 +1114,91 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter { } } + test("nested union") { + sql( + """ + | EXPLAIN + | SELECT count(1) FROM ( + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src) src + """.stripMargin) + + val countForSrc = sql("SELECT count(1) FROM src").first() + + val countForUnion25Src = sql( + """ + | SELECT count(1) FROM ( + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src UNION ALL + | SELECT key, value FROM src) src + """.stripMargin).first() + + assert(countForSrc.getLong(0) == 500) + assert(countForUnion25Src.getLong(0) == 500 * 25) + + val nested = sql( + """ + | SELECT u_1.key FROM (((SELECT key FROM src) + | UNION ALL (SELECT key FROM src)) UNION ALL + | (SELECT key FROM src)) AS u_1 + """.stripMargin).collect() + + assert(nested.size == 500 * 3) + } + test("parse HQL set commands") { // Adapted from its SQL counterpart. val testKey = "spark.sql.key.usedfortestonly"