From eb819442d56d4c2c0ee233e411ce7f859da4a9e2 Mon Sep 17 00:00:00 2001 From: Jacky Li Date: Sat, 25 Oct 2014 03:09:01 +0800 Subject: [PATCH 1/2] add the parser part --- .../spark/sql/hbase/HBaseSQLParser.scala | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/sql/hbase/src/main/scala/org/apache/spark/sql/hbase/HBaseSQLParser.scala b/sql/hbase/src/main/scala/org/apache/spark/sql/hbase/HBaseSQLParser.scala index 20bd7fb1790ef..fde3bc69daf1f 100644 --- a/sql/hbase/src/main/scala/org/apache/spark/sql/hbase/HBaseSQLParser.scala +++ b/sql/hbase/src/main/scala/org/apache/spark/sql/hbase/HBaseSQLParser.scala @@ -19,8 +19,15 @@ package org.apache.spark.sql.hbase import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.{SqlLexical, SqlParser} +import org.apache.spark.sql.hbase.logical._ class HBaseSQLParser extends SqlParser { + + protected val DATA = Keyword("DATA") + protected val LOAD = Keyword("LOAD") + protected val LOCAL = Keyword("LOCAL") + protected val INPATH = Keyword("INPATH") + protected val BULK = Keyword("BULK") protected val CREATE = Keyword("CREATE") protected val DROP = Keyword("DROP") @@ -53,9 +60,10 @@ class HBaseSQLParser extends SqlParser { | EXCEPT ^^^ { (q1: LogicalPlan, q2: LogicalPlan) => Except(q1, q2)} | UNION ~ DISTINCT.? ^^^ { (q1: LogicalPlan, q2: LogicalPlan) => Distinct(Union(q1, q2))} ) - | insert | create | drop | alter + | load + /* | insert | create | drop | alter */ ) - +/* override protected lazy val insert: Parser[LogicalPlan] = INSERT ~> INTO ~> relation ~ select <~ opt(";") ^^ { case r ~ s => @@ -121,6 +129,7 @@ class HBaseSQLParser extends SqlParser { CreateHBaseTablePlan(tableName, customizedNameSpace, hbaseTableName, tableColumns.unzip._1, keyColsWithDataType, nonKeyCols) + } protected lazy val drop: Parser[LogicalPlan] = @@ -134,6 +143,12 @@ class HBaseSQLParser extends SqlParser { } | ALTER ~> TABLE ~> ident ~ ADD ~ tableCol ~ (MAPPED ~> BY ~> "(" ~> expressions <~ ")") ^^ { case tn ~ op ~ tc ~ cf => null } +*/ + protected lazy val load: Parser[LogicalPlan] = + (LOAD ~> DATA ~> opt(LOCAL) ~> INPATH ~> ident) ~ + (opt(OVERWRITE) ~> INTO ~> TABLE ~> relation) ^^ { + case filePath ~ table => LoadDataIntoTable(filePath, table, true) + } protected lazy val tableCol: Parser[(String, String)] = ident ~ (STRING | BYTE | SHORT | INTEGER | LONG | FLOAT | DOUBLE | BOOLEAN) ^^ { From e595792b4c626539bdb319967545b7350e5080d7 Mon Sep 17 00:00:00 2001 From: Jacky Li Date: Sat, 25 Oct 2014 03:09:32 +0800 Subject: [PATCH 2/2] add the logical plan part --- .../apache/spark/sql/hbase/HBaseLogicalPlan.scala | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/sql/hbase/src/main/scala/org/apache/spark/sql/hbase/HBaseLogicalPlan.scala b/sql/hbase/src/main/scala/org/apache/spark/sql/hbase/HBaseLogicalPlan.scala index 7664b027366fb..50d45ec7a40d9 100644 --- a/sql/hbase/src/main/scala/org/apache/spark/sql/hbase/HBaseLogicalPlan.scala +++ b/sql/hbase/src/main/scala/org/apache/spark/sql/hbase/HBaseLogicalPlan.scala @@ -1,8 +1,8 @@ package org.apache.spark.sql.hbase.logical -import org.apache.spark.sql.catalyst.plans.logical.{LeafNode, LogicalPlan, Command} -import org.apache.spark.sql.hbase.HBaseRelation - +import org.apache.spark.sql.catalyst.plans.logical.{UnaryNode, LeafNode, LogicalPlan, Command} +//import org.apache.spark.sql.hbase.HBaseRelation +/* case class CreateHBaseTablePlan(tableName: String, nameSpace: String, hbaseTable: String, @@ -20,8 +20,13 @@ case class BulkLoadIntoTable( // TODO:need resolved here? } +*/ +case class LoadDataIntoTable(path: String, + child: LogicalPlan, + isLocal: Boolean) extends UnaryNode { -case class LoadDataIntoTable(path: String, table: String, isLocal: Boolean) extends LeafNode { - override def output = Seq.empty + override def output = Nil + + override def toString = s"LogicalPlan: LoadDataIntoTable(LOAD $path INTO $child)" }