Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ package object expressions {
// Collect matching attributes given a name and a lookup.
def collectMatches(name: String, candidates: Option[Seq[Attribute]]): Seq[Attribute] = {
candidates.toSeq.flatMap(_.collect {
case a if resolver(a.name, name) => a.withName(name)
case a if resolver(a.name, name) => a
})
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ class DataSourceV2AnalysisSuite extends AnalysisTest {
val parsedPlan = AppendData.byName(table, query)
val expectedPlan = AppendData.byName(table,
Project(Seq(
Alias(Cast(toLower(X), FloatType, Some(conf.sessionLocalTimeZone)), "x")(),
Alias(Cast(X, FloatType, Some(conf.sessionLocalTimeZone)), "x")(),
Alias(Cast(y, FloatType, Some(conf.sessionLocalTimeZone)), "y")()),
query))

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.spark.sql.catalyst.plans.logical.{Command, LogicalPlan}
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.execution.datasources.BasicWriteJobStatsTracker
import org.apache.spark.sql.execution.datasources.FileFormatWriter
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.execution.metric.SQLMetric
import org.apache.spark.util.SerializableConfiguration

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ struct<col1:int,col2:int,col3:int,sum_col2:bigint>
-- !query 6
SELECT COL1, COL2, COL3 FROM spark_10747 ORDER BY COL3 ASC NULLS FIRST, COL2
-- !query 6 schema
struct<COL1:int,COL2:int,COL3:int>
struct<col1:int,col2:int,col3:int>
-- !query 6 output
6 10 NULL
6 13 NULL
Expand All @@ -118,7 +118,7 @@ struct<COL1:int,COL2:int,COL3:int>
-- !query 7
SELECT COL1, COL2, COL3 FROM spark_10747 ORDER BY COL3 NULLS LAST, COL2
-- !query 7 schema
struct<COL1:int,COL2:int,COL3:int>
struct<col1:int,col2:int,col3:int>
-- !query 7 output
6 7 4
6 11 4
Expand All @@ -134,7 +134,7 @@ struct<COL1:int,COL2:int,COL3:int>
-- !query 8
SELECT COL1, COL2, COL3 FROM spark_10747 ORDER BY COL3 DESC NULLS FIRST, COL2
-- !query 8 schema
struct<COL1:int,COL2:int,COL3:int>
struct<col1:int,col2:int,col3:int>
-- !query 8 output
6 10 NULL
6 13 NULL
Expand All @@ -150,7 +150,7 @@ struct<COL1:int,COL2:int,COL3:int>
-- !query 9
SELECT COL1, COL2, COL3 FROM spark_10747 ORDER BY COL3 DESC NULLS LAST, COL2
-- !query 9 schema
struct<COL1:int,COL2:int,COL3:int>
struct<col1:int,col2:int,col3:int>
-- !query 9 output
6 9 10
6 12 10
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ struct<>
-- !query 20
SELECT p.`(KEY)?+.+`, b, testdata2.`(b)?+.+` FROM testData p join testData2 ON p.key = testData2.a WHERE key < 3
-- !query 20 schema
struct<value1:string,value2:string,b:int,A:int,c:int,d:int>
struct<value1:string,value2:string,B:int,A:int,c:int,d:int>
-- !query 20 output
1 11 1 1 1 2
1 11 2 1 1 2
Expand All @@ -194,7 +194,7 @@ struct<value1:string,value2:string,b:int,A:int,c:int,d:int>
-- !query 21
SELECT p.`(key)?+.+`, b, testdata2.`(b)?+.+` FROM testData p join testData2 ON p.key = testData2.a WHERE key < 3
-- !query 21 schema
struct<value1:string,value2:string,b:int,A:int,c:int,d:int>
struct<value1:string,value2:string,B:int,A:int,c:int,d:int>
-- !query 21 output
1 11 1 1 1 2
1 11 2 1 1 2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -891,6 +891,31 @@ class ParquetQuerySuite extends QueryTest with ParquetTest with SharedSQLContext
}
}
}

test("SPARK-25135: insert parquet table may all null when select from view") {
withTempDir { dir =>
val path = dir.getCanonicalPath
val cnt = 30
val table1Path = s"$path/table1"
val table2Path = s"$path/table2"
spark.range(cnt).selectExpr("cast(id as bigint) as col1")
.write.mode(SaveMode.Overwrite).parquet(table1Path)
withTable("table1", "table2") {
spark.sql(s"CREATE TABLE table1(col1 bigint) using parquet location '$table1Path/'")
spark.sql(s"CREATE TABLE table2(COL1 bigint) using parquet location '$table2Path/'")

withView("view1") {
spark.sql("CREATE VIEW view1 as select col1 from table1 where col1 > -20")
spark.sql("INSERT OVERWRITE TABLE table2 select COL1 from view1")
assert(spark.table("table2").count() === cnt)
spark.read.parquet(table2Path).schema.zip(
spark.table("table2").schema).foreach { case (actual, table) =>
assert(actual.name.equals(table.name))
}
}
}
}
}
}

object TestingUDT {
Expand Down