Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
-- Automatically generated by SQLQueryTestSuite
-- !query
CREATE TABLE t1(col1 INT, col2 STRING)
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`t1`, false


-- !query
CREATE TABLE t2(col1 STRUCT<a: STRING>, a STRING)
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`t2`, false


-- !query
SELECT LEN(LOWER('X')) AS a, 1 AS b, b AS c GROUP BY LOWER('X') ORDER BY LOWER('X')
-- !query analysis
Project [a#x, b#x, c#x]
+- Sort [lower(X)#x ASC NULLS FIRST], true
+- Project [len(lower(X)#x) AS a#x, b#x, b#x AS c#x, lower(X)#x AS lower(X)#x]
+- Project [lower(X)#x, 1 AS b#x]
+- Aggregate [lower(X)], [lower(X) AS lower(X)#x]
+- OneRowRelation


-- !query
SELECT LEN(LOWER('X')) AS a, 1 AS b, b AS c GROUP BY LOWER('X') HAVING LOWER('X') = 'x'
-- !query analysis
Project [a#x, b#x, c#x]
+- Filter (lower(X)#x = x)
+- Project [len(lower(X)#x) AS a#x, b#x, b#x AS c#x, lower(X)#x AS lower(X)#x]
+- Project [lower(X)#x, 1 AS b#x]
+- Aggregate [lower(X)], [lower(X) AS lower(X)#x]
+- OneRowRelation


-- !query
SELECT col1.field, field FROM VALUES(named_struct('field', 1))
-- !query analysis
Project [field#x, field#x]
+- Project [col1#x, col1#x.field AS field#x]
+- LocalRelation [col1#x]


-- !query
SELECT col1.field, field FROM VALUES(map('field', 1))
-- !query analysis
Project [field#x, field#x]
+- Project [col1#x, col1#x[field] AS field#x]
+- LocalRelation [col1#x]


-- !query
SELECT COUNT(col1) as alias, SUM(col1) + alias FROM t1 GROUP BY ALL
-- !query analysis
Project [alias#xL, (sum(col1)#xL + alias#xL) AS (sum(col1) + lateralAliasReference(alias))#xL]
+- Project [count(col1)#xL, sum(col1)#xL, count(col1)#xL AS alias#xL]
+- Aggregate [count(col1#x) AS count(col1)#xL, sum(col1#x) AS sum(col1)#xL]
+- SubqueryAlias spark_catalog.default.t1
+- Relation spark_catalog.default.t1[col1#x,col2#x] parquet


-- !query
SELECT COUNT(col1) as alias, SUM(col1) + alias, SUM(col1) + col1 FROM t1 GROUP BY ALL
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "UNRESOLVED_ALL_IN_GROUP_BY",
"sqlState" : "42803",
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 74,
"stopIndex" : 85,
"fragment" : "GROUP BY ALL"
} ]
}


-- !query
DROP TABLE t1
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.t1


-- !query
DROP TABLE t2
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.t2
Original file line number Diff line number Diff line change
Expand Up @@ -1136,6 +1136,14 @@ Project [try_validate_utf8(collate(utf8_binary#x, utf8_lcase_rtrim)) AS try_vali
+- Relation spark_catalog.default.t1[s#x,utf8_binary#x,utf8_lcase#x] parquet


-- !query
SELECT CASE WHEN utf8_lcase = 'XX' THEN 'XX' ELSE utf8_lcase END FROM t1
-- !query analysis
Project [CASE WHEN (utf8_lcase#x = XX) THEN XX ELSE utf8_lcase#x END AS CASE WHEN (utf8_lcase = 'XX' collate UTF8_LCASE) THEN 'XX' collate UTF8_LCASE ELSE utf8_lcase END#x]
+- SubqueryAlias spark_catalog.default.t1
+- Relation spark_catalog.default.t1[s#x,utf8_binary#x,utf8_lcase#x] parquet


-- !query
drop table t1
-- !query analysis
Expand Down
101 changes: 101 additions & 0 deletions sql/core/src/test/resources/sql-tests/analyzer-results/cte.sql.out
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,12 @@ CreateViewCommand `t3`, select * from t, false, false, LocalTempView, UNSUPPORTE
+- LocalRelation [id#x]


-- !query
create table t4(col1 TIMESTAMP)
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`t4`, false


-- !query
WITH s AS (SELECT 1 FROM s) SELECT * FROM s
-- !query analysis
Expand Down Expand Up @@ -1031,6 +1037,94 @@ WithCTE
+- CTERelationRef xxxx, true, [1#x], false, false, 1


-- !query
SELECT * FROM (
WITH cte1 AS (SELECT * FROM t4) SELECT t4.col1 FROM t4 JOIN cte1 USING (col1)
)
-- !query analysis
Project [col1#x]
+- SubqueryAlias __auto_generated_subquery_name
+- WithCTE
:- CTERelationDef xxxx, false
: +- SubqueryAlias cte1
: +- Project [col1#x]
: +- SubqueryAlias spark_catalog.default.t4
: +- Relation spark_catalog.default.t4[col1#x] parquet
+- Project [col1#x]
+- Project [col1#x]
+- Join Inner, (col1#x = col1#x)
:- SubqueryAlias spark_catalog.default.t4
: +- Relation spark_catalog.default.t4[col1#x] parquet
+- SubqueryAlias cte1
+- CTERelationRef xxxx, true, [col1#x], false, false


-- !query
SELECT * FROM (
WITH cte1 AS (SELECT * FROM t4) SELECT cte1.col1 FROM t4 JOIN cte1 USING (col1)
)
-- !query analysis
Project [col1#x]
+- SubqueryAlias __auto_generated_subquery_name
+- WithCTE
:- CTERelationDef xxxx, false
: +- SubqueryAlias cte1
: +- Project [col1#x]
: +- SubqueryAlias spark_catalog.default.t4
: +- Relation spark_catalog.default.t4[col1#x] parquet
+- Project [col1#x]
+- Project [col1#x, col1#x]
+- Join Inner, (col1#x = col1#x)
:- SubqueryAlias spark_catalog.default.t4
: +- Relation spark_catalog.default.t4[col1#x] parquet
+- SubqueryAlias cte1
+- CTERelationRef xxxx, true, [col1#x], false, false


-- !query
SELECT * FROM (
WITH cte1 AS (SELECT * FROM t4) SELECT t4.col1 FROM cte1 JOIN t4 USING (col1)
)
-- !query analysis
Project [col1#x]
+- SubqueryAlias __auto_generated_subquery_name
+- WithCTE
:- CTERelationDef xxxx, false
: +- SubqueryAlias cte1
: +- Project [col1#x]
: +- SubqueryAlias spark_catalog.default.t4
: +- Relation spark_catalog.default.t4[col1#x] parquet
+- Project [col1#x]
+- Project [col1#x, col1#x]
+- Join Inner, (col1#x = col1#x)
:- SubqueryAlias cte1
: +- CTERelationRef xxxx, true, [col1#x], false, false
+- SubqueryAlias spark_catalog.default.t4
+- Relation spark_catalog.default.t4[col1#x] parquet


-- !query
SELECT * FROM (
WITH cte1 AS (SELECT * FROM t4) SELECT cte1.col1 FROM cte1 JOIN t4 USING (col1)
)
-- !query analysis
Project [col1#x]
+- SubqueryAlias __auto_generated_subquery_name
+- WithCTE
:- CTERelationDef xxxx, false
: +- SubqueryAlias cte1
: +- Project [col1#x]
: +- SubqueryAlias spark_catalog.default.t4
: +- Relation spark_catalog.default.t4[col1#x] parquet
+- Project [col1#x]
+- Project [col1#x]
+- Join Inner, (col1#x = col1#x)
:- SubqueryAlias cte1
: +- CTERelationRef xxxx, true, [col1#x], false, false
+- SubqueryAlias spark_catalog.default.t4
+- Relation spark_catalog.default.t4[col1#x] parquet


-- !query
DROP VIEW IF EXISTS t
-- !query analysis
Expand All @@ -1047,3 +1141,10 @@ DropTempViewCommand t2
DROP VIEW IF EXISTS t3
-- !query analysis
DropTempViewCommand t3


-- !query
DROP TABLE IF EXISTS t4
-- !query analysis
DropTable true, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.t4
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
-- Automatically generated by SQLQueryTestSuite
-- !query
CREATE TABLE t1(col1 STRUCT<a: STRING>, a STRING)
-- !query analysis
CreateDataSourceTableCommand `spark_catalog`.`default`.`t1`, false


-- !query
SELECT col1.a, a FROM t1 ORDER BY a
-- !query analysis
Sort [a#x ASC NULLS FIRST], true
+- Project [col1#x.a AS a#x, a#x]
+- SubqueryAlias spark_catalog.default.t1
+- Relation spark_catalog.default.t1[col1#x,a#x] parquet


-- !query
SELECT col1.a, a FROM t1 ORDER BY col1.a
-- !query analysis
Project [a#x, a#x]
+- Sort [col1#x.a ASC NULLS FIRST], true
+- Project [col1#x.a AS a#x, a#x, col1#x]
+- SubqueryAlias spark_catalog.default.t1
+- Relation spark_catalog.default.t1[col1#x,a#x] parquet


-- !query
SELECT split(col1, '-')[1] AS a FROM VALUES('a-b') ORDER BY split(col1, '-')[1]
-- !query analysis
Project [a#x]
+- Sort [split(col1#x, -, -1)[1] ASC NULLS FIRST], true
+- Project [split(col1#x, -, -1)[1] AS a#x, col1#x]
+- LocalRelation [col1#x]


-- !query
DROP TABLE t1
-- !query analysis
DropTable false, false
+- ResolvedIdentifier V2SessionCatalog(spark_catalog), default.t1
Original file line number Diff line number Diff line change
Expand Up @@ -543,3 +543,24 @@ Aggregate [a#x], [1 AS a#x]
+- Project [a#x, b#x]
+- SubqueryAlias testData
+- LocalRelation [a#x, b#x]


-- !query
SELECT col1 AS a FROM VALUES (NAMED_STRUCT('f1', 1)) GROUP BY a.f1
-- !query analysis
org.apache.spark.sql.catalyst.ExtendedAnalysisException
{
"errorClass" : "UNRESOLVED_COLUMN.WITH_SUGGESTION",
"sqlState" : "42703",
"messageParameters" : {
"objectName" : "`a`.`f1`",
"proposal" : "`col1`"
},
"queryContext" : [ {
"objectType" : "",
"objectName" : "",
"startIndex" : 63,
"stopIndex" : 66,
"fragment" : "a.f1"
} ]
}
Loading