diff --git a/sql/catalyst/src/main/antlr3/org/apache/spark/sql/catalyst/parser/SparkSqlParser.g b/sql/catalyst/src/main/antlr3/org/apache/spark/sql/catalyst/parser/SparkSqlParser.g index 2c13d3056f46..3cd31e11ebdd 100644 --- a/sql/catalyst/src/main/antlr3/org/apache/spark/sql/catalyst/parser/SparkSqlParser.g +++ b/sql/catalyst/src/main/antlr3/org/apache/spark/sql/catalyst/parser/SparkSqlParser.g @@ -651,7 +651,7 @@ import java.util.HashMap; return false; } private CommonTree throwSetOpException() throws RecognitionException { - throw new FailedPredicateException(input, "orderByClause clusterByClause distributeByClause sortByClause limitClause can only be applied to the whole union.", ""); + throw new FailedPredicateException(input, "orderByClause clusterByClause distributeByClause sortByClause can only be applied to the whole union.", ""); } private CommonTree throwColumnNameException() throws RecognitionException { throw new FailedPredicateException(input, Arrays.toString(excludedCharForColumnName) + " can not be used in column name in create table statement.", ""); @@ -2250,7 +2250,7 @@ selectStatement[boolean topLevel] (set=setOpSelectStatement[$selectStatement.tree, topLevel])? -> {set == null}? {$selectStatement.tree} - -> {o==null && c==null && d==null && sort==null && l==null}? + -> {o==null && c==null && d==null && sort==null}? {$set.tree} -> {throwSetOpException()} ; @@ -2322,8 +2322,9 @@ simpleSelectStatement groupByClause? havingClause? ((window_clause) => window_clause)? + ((limitClause) => limitClause)? -> ^(TOK_QUERY fromClause? ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE)) - selectClause whereClause? groupByClause? havingClause? window_clause?)) + selectClause whereClause? groupByClause? havingClause? window_clause? limitClause?)) ; selectStatementWithCTE diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystQlSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystQlSuite.scala index d7204c348831..84dc9ac59649 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystQlSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/CatalystQlSuite.scala @@ -17,7 +17,10 @@ package org.apache.spark.sql.catalyst +import org.apache.spark.sql.catalyst.analysis._ +import org.apache.spark.sql.catalyst.expressions.Literal import org.apache.spark.sql.catalyst.plans.PlanTest +import org.apache.spark.sql.catalyst.plans.logical.{Limit, Project, Subquery, Union} class CatalystQlSuite extends PlanTest { val parser = new CatalystQl() @@ -49,4 +52,40 @@ class CatalystQlSuite extends PlanTest { parser.createPlan("select sum(product + 1) over (partition by (product + (1)) order by 2) " + "from windowData") } + + test("limit clause: a support in set operation") { + val plan1 = parser.createPlan("select key from (select * from t1) x limit 1") + val correctPlan1 = + Limit (Literal(1), + Project(Seq(UnresolvedAlias(UnresolvedAttribute("key"))), + Subquery("x", + Project(Seq(UnresolvedAlias(UnresolvedStar(None))), + UnresolvedRelation(TableIdentifier("t1")))))) + comparePlans(plan1, correctPlan1) + + val plan2 = parser.createPlan("select key from (select * from t1 limit 2) x limit 1") + val correctPlan2 = + Limit (Literal(1), + Project(Seq(UnresolvedAlias(UnresolvedAttribute("key"))), + Subquery("x", + Limit (Literal(2), + Project(Seq(UnresolvedAlias(UnresolvedStar(None))), + UnresolvedRelation(TableIdentifier("t1"))))))) + comparePlans(plan2, correctPlan2) + + val plan3 = parser.createPlan("select key from ((select * from testData limit 1) " + + "union all (select * from testData limit 1)) x limit 1") + val correctPlan3 = + Limit (Literal(1), + Project(Seq(UnresolvedAlias(UnresolvedAttribute("key"))), + Subquery("x", + Union( + Limit (Literal(1), + Project(Seq(UnresolvedAlias(UnresolvedStar(None))), + UnresolvedRelation(TableIdentifier("testData")))), + Limit (Literal(1), + Project(Seq(UnresolvedAlias(UnresolvedStar(None))), + UnresolvedRelation(TableIdentifier("testData")))))))) + comparePlans(plan3, correctPlan3) + } }