Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
52 commits
Select commit Hold shift + click to select a range
01e4cdf
Merge remote-tracking branch 'upstream/master'
gatorsmile Nov 13, 2015
6835704
Merge remote-tracking branch 'upstream/master'
gatorsmile Nov 14, 2015
9180687
Merge remote-tracking branch 'upstream/master'
gatorsmile Nov 14, 2015
b38a21e
SPARK-11633
gatorsmile Nov 17, 2015
d2b84af
Merge remote-tracking branch 'upstream/master' into joinMakeCopy
gatorsmile Nov 17, 2015
fda8025
Merge remote-tracking branch 'upstream/master'
gatorspark Nov 17, 2015
ac0dccd
Merge branch 'master' of https://github.com/gatorsmile/spark
gatorspark Nov 17, 2015
6e0018b
Merge remote-tracking branch 'upstream/master'
Nov 20, 2015
0546772
converge
gatorsmile Nov 20, 2015
b37a64f
converge
gatorsmile Nov 20, 2015
c2a872c
Merge remote-tracking branch 'upstream/master'
gatorsmile Jan 6, 2016
ab6dbd7
Merge remote-tracking branch 'upstream/master'
gatorsmile Jan 6, 2016
4276356
Merge remote-tracking branch 'upstream/master'
gatorsmile Jan 6, 2016
2dab708
Merge remote-tracking branch 'upstream/master'
gatorsmile Jan 7, 2016
0458770
Merge remote-tracking branch 'upstream/master'
gatorsmile Jan 8, 2016
1debdfa
Merge remote-tracking branch 'upstream/master'
gatorsmile Jan 9, 2016
763706d
Merge remote-tracking branch 'upstream/master'
gatorsmile Jan 14, 2016
4de6ec1
Merge remote-tracking branch 'upstream/master'
gatorsmile Jan 18, 2016
9422a4f
Merge remote-tracking branch 'upstream/master'
gatorsmile Jan 19, 2016
52bdf48
Merge remote-tracking branch 'upstream/master'
gatorsmile Jan 20, 2016
1e95df3
Merge remote-tracking branch 'upstream/master'
gatorsmile Jan 23, 2016
fab24cf
Merge remote-tracking branch 'upstream/master'
gatorsmile Feb 1, 2016
8b2e33b
Merge remote-tracking branch 'upstream/master'
gatorsmile Feb 5, 2016
2ee1876
Merge remote-tracking branch 'upstream/master'
gatorsmile Feb 11, 2016
b9f0090
Merge remote-tracking branch 'upstream/master'
gatorsmile Feb 12, 2016
ade6f7e
Merge remote-tracking branch 'upstream/master'
gatorsmile Feb 15, 2016
9fd63d2
Merge remote-tracking branch 'upstream/master'
gatorsmile Feb 19, 2016
5199d49
Merge remote-tracking branch 'upstream/master'
gatorsmile Feb 22, 2016
404214c
Merge remote-tracking branch 'upstream/master'
gatorsmile Feb 23, 2016
c001dd9
Merge remote-tracking branch 'upstream/master'
gatorsmile Feb 25, 2016
59daa48
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 5, 2016
41d5f64
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 7, 2016
472a6e3
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 10, 2016
0fba10a
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 12, 2016
cbf73b3
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 21, 2016
c08f561
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 22, 2016
474df88
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 22, 2016
3d9828d
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 24, 2016
72d2361
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 26, 2016
07afea5
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 29, 2016
8bf2007
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 30, 2016
87a165b
Merge remote-tracking branch 'upstream/master'
gatorsmile Mar 31, 2016
b9359cd
Merge remote-tracking branch 'upstream/master'
gatorsmile Apr 1, 2016
181c817
throw exceptions for DDLs of partitioned views
gatorsmile Apr 5, 2016
b6c1601
add comments.
gatorsmile Apr 5, 2016
65bd090
Merge remote-tracking branch 'upstream/master'
gatorsmile Apr 5, 2016
14df7ab
address comments.
gatorsmile Apr 5, 2016
babf2da
Merge remote-tracking branch 'upstream/master'
gatorsmile Apr 5, 2016
140f859
Merge branch 'viewPartition' into viewPartitionNew
gatorsmile Apr 5, 2016
c9d40e1
address comments
gatorsmile Apr 5, 2016
e5a8de7
Merge remote-tracking branch 'upstream/master' into viewPartitionNew
gatorsmile Apr 5, 2016
93e51b4
address comments
gatorsmile Apr 6, 2016
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import scala.collection.JavaConverters._

import org.apache.spark.sql.SaveMode
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.parser.{AbstractSqlParser, AstBuilder, ParseException}
import org.apache.spark.sql.catalyst.parser._
import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, OneRowRelation}
import org.apache.spark.sql.execution.command.{DescribeCommand => _, _}
Expand Down Expand Up @@ -552,9 +552,13 @@ class SparkSqlAstBuilder extends AstBuilder {
* ALTER TABLE table ADD [IF NOT EXISTS] PARTITION spec [LOCATION 'loc1']
* ALTER VIEW view ADD [IF NOT EXISTS] PARTITION spec
* }}}
*
* ALTER VIEW ... DROP PARTITION ... is not supported because the concept of partitioning
* is associated with physical tables
*/
override def visitAddTablePartition(
ctx: AddTablePartitionContext): LogicalPlan = withOrigin(ctx) {
if (ctx.VIEW != null) throw new ParseException(s"Operation not allowed: partitioned views", ctx)
// Create partition spec to location mapping.
val specsAndLocs = if (ctx.partitionSpec.isEmpty) {
ctx.partitionSpecLocation.asScala.map {
Expand Down Expand Up @@ -616,9 +620,13 @@ class SparkSqlAstBuilder extends AstBuilder {
* ALTER TABLE table DROP [IF EXISTS] PARTITION spec1[, PARTITION spec2, ...] [PURGE];
* ALTER VIEW view DROP [IF EXISTS] PARTITION spec1[, PARTITION spec2, ...];
* }}}
*
* ALTER VIEW ... DROP PARTITION ... is not supported because the concept of partitioning
* is associated with physical tables
*/
override def visitDropTablePartitions(
ctx: DropTablePartitionsContext): LogicalPlan = withOrigin(ctx) {
if (ctx.VIEW != null) throw new ParseException(s"Operation not allowed: partitioned views", ctx)
AlterTableDropPartition(
visitTableIdentifier(ctx.tableIdentifier),
ctx.partitionSpec.asScala.map(visitNonOptionalPartitionSpec),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -448,22 +448,12 @@ class DDLCommandSuite extends PlanTest {
|(col1=NULL, cOL2='f', col3=5, COL4=true)
""".stripMargin

val parsed1 = parser.parsePlan(sql1)
val parsed2 = parser.parsePlan(sql2)

val expected1 = AlterTableAddPartition(
TableIdentifier("view_name", None),
Seq(
(Map("dt" -> "2008-08-08", "country" -> "us"), None),
(Map("dt" -> "2009-09-09", "country" -> "uk"), None)),
ifNotExists = true)(sql1)
val expected2 = AlterTableAddPartition(
TableIdentifier("view_name", None),
Seq((Map("col1" -> "NULL", "col2" -> "f", "col3" -> "5", "col4" -> "true"), None)),
ifNotExists = false)(sql2)

comparePlans(parsed1, expected1)
comparePlans(parsed2, expected2)
intercept[ParseException] {
parser.parsePlan(sql1)
}
intercept[ParseException] {
parser.parsePlan(sql2)
}
}

test("alter table: rename partition") {
Expand Down Expand Up @@ -513,8 +503,13 @@ class DDLCommandSuite extends PlanTest {

val parsed1_table = parser.parsePlan(sql1_table)
val parsed2_table = parser.parsePlan(sql2_table)
val parsed1_view = parser.parsePlan(sql1_view)
val parsed2_view = parser.parsePlan(sql2_view)

intercept[ParseException] {
parser.parsePlan(sql1_view)
}
intercept[ParseException] {
parser.parsePlan(sql2_view)
}

val tableIdent = TableIdentifier("table_name", None)
val expected1_table = AlterTableDropPartition(
Expand All @@ -532,25 +527,8 @@ class DDLCommandSuite extends PlanTest {
ifExists = false,
purge = true)(sql2_table)

val expected1_view = AlterTableDropPartition(
tableIdent,
Seq(
Map("dt" -> "2008-08-08", "country" -> "us"),
Map("dt" -> "2009-09-09", "country" -> "uk")),
ifExists = true,
purge = false)(sql1_view)
val expected2_view = AlterTableDropPartition(
tableIdent,
Seq(
Map("dt" -> "2008-08-08", "country" -> "us"),
Map("dt" -> "2009-09-09", "country" -> "uk")),
ifExists = false,
purge = false)(sql2_table)

comparePlans(parsed1_table, expected1_table)
comparePlans(parsed2_table, expected2_table)
comparePlans(parsed1_view, expected1_view)
comparePlans(parsed2_view, expected2_view)
}

test("alter table: archive partition") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -366,7 +366,11 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"alter_index",

// Macro commands are not supported
"macro"
"macro",

// Create partitioned view is not supported
"create_like_view",
"describe_formatted_view_partitioned"
)

/**
Expand Down Expand Up @@ -477,7 +481,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"cp_mj_rc",
"create_insert_outputformat",
"create_like_tbl_props",
"create_like_view",
"create_nested_type",
"create_skewed_table1",
"create_struct_table",
Expand All @@ -502,7 +505,6 @@ class HiveCompatibilitySuite extends HiveQueryFileTest with BeforeAndAfter {
"default_partition_name",
"delimiter",
"desc_non_existent_tbl",
"describe_formatted_view_partitioned",
"diff_part_input_formats",
"disable_file_format_check",
"disallow_incompatible_type_change_off",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -215,11 +215,19 @@ class HiveSqlAstBuilder extends SparkSqlAstBuilder {

/**
* Create or replace a view. This creates a [[CreateViewAsSelect]] command.
*
* For example:
* {{{
* CREATE VIEW [IF NOT EXISTS] [db_name.]view_name
* [(column_name [COMMENT column_comment], ...) ]
* [COMMENT view_comment]
* [TBLPROPERTIES (property_name = property_value, ...)]
* AS SELECT ...;
* }}}
*/
override def visitCreateView(ctx: CreateViewContext): LogicalPlan = withOrigin(ctx) {
// Pass a partitioned view on to hive.
if (ctx.identifierList != null) {
HiveNativeCommand(command(ctx))
throw new ParseException(s"Operation not allowed: partitioned views", ctx)
} else {
if (ctx.STRING != null) {
logWarning("COMMENT clause is ignored.")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,16 +26,18 @@ import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans
import org.apache.spark.sql.catalyst.dsl.plans.DslLogicalPlan
import org.apache.spark.sql.catalyst.expressions.JsonTuple
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.PlanTest
import org.apache.spark.sql.catalyst.plans.logical.{Generate, ScriptTransformation}
import org.apache.spark.sql.hive.execution.HiveSqlParser
import org.apache.spark.sql.hive.execution.{HiveNativeCommand, HiveSqlParser}

class HiveQlSuite extends PlanTest {
class HiveDDLCommandSuite extends PlanTest {
val parser = HiveSqlParser

private def extractTableDesc(sql: String): (CatalogTable, Boolean) = {
parser.parsePlan(sql).collect {
case CreateTableAsSelect(desc, child, allowExisting) => (desc, allowExisting)
case CreateTableAsSelect(desc, _, allowExisting) => (desc, allowExisting)
case CreateViewAsSelect(desc, _, allowExisting, _, _) => (desc, allowExisting)
}.head
}

Expand Down Expand Up @@ -251,4 +253,56 @@ class HiveQlSuite extends PlanTest {
|LATERAL VIEW explode(`gen``tab1`.`gen``col1`) `gen``tab2` AS `gen``col2`
""".stripMargin)
}

test("create view -- basic") {
val v1 = "CREATE VIEW view1 AS SELECT * FROM tab1"
val (desc, exists) = extractTableDesc(v1)
assert(!exists)
assert(desc.identifier.database.isEmpty)
assert(desc.identifier.table == "view1")
assert(desc.tableType == CatalogTableType.VIRTUAL_VIEW)
assert(desc.storage.locationUri.isEmpty)
assert(desc.schema == Seq.empty[CatalogColumn])
assert(desc.viewText.contains("SELECT * FROM tab1"))
assert(desc.viewOriginalText.contains("SELECT * FROM tab1"))
assert(desc.storage.serdeProperties == Map())
assert(desc.storage.inputFormat.isEmpty)
assert(desc.storage.outputFormat.isEmpty)
assert(desc.storage.serde.isEmpty)
assert(desc.properties == Map())
}

test("create view - full") {
val v1 =
"""
|CREATE OR REPLACE VIEW IF NOT EXISTS view1
|(col1, col3)
|COMMENT 'I cannot spell'
|TBLPROPERTIES('prop1Key'="prop1Val")
|AS SELECT * FROM tab1
""".stripMargin
val (desc, exists) = extractTableDesc(v1)
assert(exists)
assert(desc.identifier.database.isEmpty)
assert(desc.identifier.table == "view1")
assert(desc.tableType == CatalogTableType.VIRTUAL_VIEW)
assert(desc.storage.locationUri.isEmpty)
assert(desc.schema ==
CatalogColumn("col1", null, nullable = true, None) ::
CatalogColumn("col3", null, nullable = true, None) :: Nil)
assert(desc.viewText.contains("SELECT * FROM tab1"))
assert(desc.viewOriginalText.contains("SELECT * FROM tab1"))
assert(desc.storage.serdeProperties == Map())
assert(desc.storage.inputFormat.isEmpty)
assert(desc.storage.outputFormat.isEmpty)
assert(desc.storage.serde.isEmpty)
assert(desc.properties == Map("prop1Key" -> "prop1Val"))
}

test("create view -- partitioned view") {
val v1 = "CREATE VIEW view1 partitioned on (ds, hr) as select * from srcpart"
intercept[ParseException] {
parser.parsePlan(v1).isInstanceOf[HiveNativeCommand]
}
}
}