Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -651,7 +651,7 @@ import java.util.HashMap;
return false;
}
private CommonTree throwSetOpException() throws RecognitionException {
throw new FailedPredicateException(input, "orderByClause clusterByClause distributeByClause sortByClause limitClause can only be applied to the whole union.", "");
throw new FailedPredicateException(input, "orderByClause clusterByClause distributeByClause sortByClause can only be applied to the whole union.", "");
}
private CommonTree throwColumnNameException() throws RecognitionException {
throw new FailedPredicateException(input, Arrays.toString(excludedCharForColumnName) + " can not be used in column name in create table statement.", "");
Expand Down Expand Up @@ -2250,7 +2250,7 @@ selectStatement[boolean topLevel]
(set=setOpSelectStatement[$selectStatement.tree, topLevel])?
-> {set == null}?
{$selectStatement.tree}
-> {o==null && c==null && d==null && sort==null && l==null}?
-> {o==null && c==null && d==null && sort==null}?
{$set.tree}
-> {throwSetOpException()}
;
Expand Down Expand Up @@ -2322,8 +2322,9 @@ simpleSelectStatement
groupByClause?
havingClause?
((window_clause) => window_clause)?
((limitClause) => limitClause)?
-> ^(TOK_QUERY fromClause? ^(TOK_INSERT ^(TOK_DESTINATION ^(TOK_DIR TOK_TMP_FILE))
selectClause whereClause? groupByClause? havingClause? window_clause?))
selectClause whereClause? groupByClause? havingClause? window_clause? limitClause?))
;

selectStatementWithCTE
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,10 @@

package org.apache.spark.sql.catalyst

import org.apache.spark.sql.catalyst.analysis._
import org.apache.spark.sql.catalyst.expressions.Literal
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical.{Limit, Project, Subquery, Union}

class CatalystQlSuite extends PlanTest {
val parser = new CatalystQl()
Expand Down Expand Up @@ -49,4 +52,40 @@ class CatalystQlSuite extends PlanTest {
parser.createPlan("select sum(product + 1) over (partition by (product + (1)) order by 2) " +
"from windowData")
}

test("limit clause: a support in set operation") {
val plan1 = parser.createPlan("select key from (select * from t1) x limit 1")
val correctPlan1 =
Limit (Literal(1),
Project(Seq(UnresolvedAlias(UnresolvedAttribute("key"))),
Subquery("x",
Project(Seq(UnresolvedAlias(UnresolvedStar(None))),
UnresolvedRelation(TableIdentifier("t1"))))))
comparePlans(plan1, correctPlan1)

val plan2 = parser.createPlan("select key from (select * from t1 limit 2) x limit 1")
val correctPlan2 =
Limit (Literal(1),
Project(Seq(UnresolvedAlias(UnresolvedAttribute("key"))),
Subquery("x",
Limit (Literal(2),
Project(Seq(UnresolvedAlias(UnresolvedStar(None))),
UnresolvedRelation(TableIdentifier("t1")))))))
comparePlans(plan2, correctPlan2)

val plan3 = parser.createPlan("select key from ((select * from testData limit 1) " +
"union all (select * from testData limit 1)) x limit 1")
val correctPlan3 =
Limit (Literal(1),
Project(Seq(UnresolvedAlias(UnresolvedAttribute("key"))),
Subquery("x",
Union(
Limit (Literal(1),
Project(Seq(UnresolvedAlias(UnresolvedStar(None))),
UnresolvedRelation(TableIdentifier("testData")))),
Limit (Literal(1),
Project(Seq(UnresolvedAlias(UnresolvedStar(None))),
UnresolvedRelation(TableIdentifier("testData"))))))))
comparePlans(plan3, correctPlan3)
}
}