Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,18 @@ private[sql] object CatalogV2Implicits {
def quoted: String = parts.map(quoteIfNeeded).mkString(".")
}

implicit class TableIdentifierHelper(identifier: TableIdentifier) {
def quoted: String = {
identifier.database match {
case Some(db) =>
Seq(db, identifier.table).map(quoteIfNeeded).mkString(".")
case _ =>
quoteIfNeeded(identifier.table)

}
}
}

def parseColumnPath(name: String): Seq[String] = {
CatalystSqlParser.parseMultipartIdentifier(name)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,8 @@ import java.util
import scala.collection.JavaConverters._
import scala.collection.mutable

import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogTableType}
import org.apache.spark.sql.catalyst.util.quoteIfNeeded
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.TableIdentifierHelper
import org.apache.spark.sql.connector.catalog.V1Table.addV2TableProperties
import org.apache.spark.sql.connector.expressions.{LogicalExpressions, Transform}
import org.apache.spark.sql.types.StructType
Expand All @@ -33,17 +32,6 @@ import org.apache.spark.sql.types.StructType
* An implementation of catalog v2 `Table` to expose v1 table metadata.
*/
private[sql] case class V1Table(v1Table: CatalogTable) extends Table {
implicit class IdentifierHelper(identifier: TableIdentifier) {
def quoted: String = {
identifier.database match {
case Some(db) =>
Seq(db, identifier.table).map(quoteIfNeeded).mkString(".")
case _ =>
quoteIfNeeded(identifier.table)

}
}
}

def catalogTable: CatalogTable = v1Table

Expand Down Expand Up @@ -92,7 +80,9 @@ private[sql] object V1Table {
TableCatalog.OPTION_PREFIX + key -> value } ++
v1Table.provider.map(TableCatalog.PROP_PROVIDER -> _) ++
v1Table.comment.map(TableCatalog.PROP_COMMENT -> _) ++
v1Table.storage.locationUri.map(TableCatalog.PROP_LOCATION -> _.toString) ++
(if (external) {
v1Table.storage.locationUri.map(TableCatalog.PROP_LOCATION -> _.toString)
} else None) ++
(if (external) Some(TableCatalog.PROP_EXTERNAL -> "true") else None) ++
Some(TableCatalog.PROP_OWNER -> v1Table.owner)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -266,12 +266,19 @@ class ResolveSessionCatalog(val catalogManager: CatalogManager)
isOverwrite,
partition)

case ShowCreateTable(ResolvedV1TableOrViewIdentifier(ident), asSerde, output) =>
if (asSerde) {
ShowCreateTableAsSerdeCommand(ident.asTableIdentifier, output)
} else {
case ShowCreateTable(ResolvedV1TableOrViewIdentifier(ident), asSerde, output) if asSerde =>
ShowCreateTableAsSerdeCommand(ident.asTableIdentifier, output)

// If target is view, force use v1 command
case ShowCreateTable(ResolvedViewIdentifier(ident), _, output) =>
ShowCreateTableCommand(ident.asTableIdentifier, output)

case ShowCreateTable(ResolvedV1TableIdentifier(ident), _, output)
if conf.useV1Command => ShowCreateTableCommand(ident.asTableIdentifier, output)

case ShowCreateTable(ResolvedTable(catalog, ident, table: V1Table, _), _, output)
if isSessionCatalog(catalog) && DDLUtils.isHiveTable(table.catalogTable) =>
ShowCreateTableCommand(ident.asTableIdentifier, output)
}

case TruncateTable(ResolvedV1TableIdentifier(ident)) =>
TruncateTableCommand(ident.asTableIdentifier, None)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.plans.DescribeCommandSchema
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.util.{escapeSingleQuotedString, quoteIdentifier, CaseInsensitiveMap, CharVarcharUtils}
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.TableIdentifierHelper
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.execution.datasources.DataSource
import org.apache.spark.sql.execution.datasources.csv.CSVFileFormat
Expand Down Expand Up @@ -1104,12 +1105,12 @@ case class ShowCreateTableCommand(
val builder = StringBuilder.newBuilder

val stmt = if (tableMetadata.tableType == VIEW) {
builder ++= s"CREATE VIEW ${table.quotedString} "
builder ++= s"CREATE VIEW ${table.quoted} "
showCreateView(metadata, builder)

builder.toString()
} else {
builder ++= s"CREATE TABLE ${table.quotedString} "
builder ++= s"CREATE TABLE ${table.quoted} "

showCreateDataSourceTable(metadata, builder)
builder.toString()
Expand Down Expand Up @@ -1247,7 +1248,7 @@ case class ShowCreateTableAsSerdeCommand(
s"Unknown table type is found at showCreateHiveTable: $t")
}

builder ++= s"CREATE$tableTypeString ${table.quotedString} "
builder ++= s"CREATE$tableTypeString ${table.quoted} "

if (metadata.tableType == VIEW) {
showCreateView(metadata, builder)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,11 @@ import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer

import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.catalyst.util.escapeSingleQuotedString
import org.apache.spark.sql.catalyst.util.{escapeSingleQuotedString, CharVarcharUtils}
import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Table, TableCatalog}
import org.apache.spark.sql.connector.expressions.BucketTransform
import org.apache.spark.sql.execution.LeafExecNode
import org.apache.spark.unsafe.types.UTF8String

Expand Down Expand Up @@ -57,7 +59,7 @@ case class ShowCreateTableExec(
}

private def showTableDataColumns(table: Table, builder: StringBuilder): Unit = {
val columns = table.schema().fields.map(_.toDDL)
val columns = CharVarcharUtils.getRawSchema(table.schema(), conf).fields.map(_.toDDL)
builder ++= concatByMultiLines(columns)
}

Expand All @@ -71,8 +73,9 @@ case class ShowCreateTableExec(
builder: StringBuilder,
tableOptions: Map[String, String]): Unit = {
if (tableOptions.nonEmpty) {
val props = tableOptions.toSeq.sortBy(_._1).map { case (key, value) =>
s"'${escapeSingleQuotedString(key)}' = '${escapeSingleQuotedString(value)}'"
val props = conf.redactOptions(tableOptions).toSeq.sortBy(_._1).map {
case (key, value) =>
s"'${escapeSingleQuotedString(key)}' = '${escapeSingleQuotedString(value)}'"
}
builder ++= "OPTIONS "
builder ++= concatByMultiLines(props)
Expand All @@ -82,8 +85,31 @@ case class ShowCreateTableExec(
private def showTablePartitioning(table: Table, builder: StringBuilder): Unit = {
if (!table.partitioning.isEmpty) {
val transforms = new ArrayBuffer[String]
table.partitioning.foreach(t => transforms += t.describe())
builder ++= s"PARTITIONED BY ${transforms.mkString("(", ", ", ")")}\n"
var bucketSpec = Option.empty[BucketSpec]
table.partitioning.map {
case BucketTransform(numBuckets, col, sortCol) =>
if (sortCol.isEmpty) {
bucketSpec = Some(BucketSpec(numBuckets, col.map(_.fieldNames.mkString(".")), Nil))
} else {
bucketSpec = Some(BucketSpec(numBuckets, col.map(_.fieldNames.mkString(".")),
sortCol.map(_.fieldNames.mkString("."))))
}
case t =>
transforms += t.describe()
}
if (transforms.nonEmpty) {
builder ++= s"PARTITIONED BY ${transforms.mkString("(", ", ", ")")}\n"
}

// compatible with v1
bucketSpec.map { bucket =>
assert(bucket.bucketColumnNames.nonEmpty)
builder ++= s"CLUSTERED BY ${bucket.bucketColumnNames.mkString("(", ", ", ")")}\n"
if (bucket.sortColumnNames.nonEmpty) {
builder ++= s"SORTED BY ${bucket.sortColumnNames.mkString("(", ", ", ")")}\n"
}
builder ++= s"INTO ${bucket.numBuckets} BUCKETS\n"
}
}
}

Expand All @@ -98,11 +124,12 @@ case class ShowCreateTableExec(
builder: StringBuilder,
tableOptions: Map[String, String]): Unit = {


val showProps = table.properties.asScala
.filterKeys(key => !CatalogV2Util.TABLE_RESERVED_PROPERTIES.contains(key)
&& !key.startsWith(TableCatalog.OPTION_PREFIX)
&& !tableOptions.contains(key))
&& !tableOptions.contains(key)
&& !key.equals(TableCatalog.PROP_EXTERNAL)
)
if (showProps.nonEmpty) {
val props = showProps.toSeq.sortBy(_._1).map {
case (key, value) =>
Expand All @@ -123,5 +150,4 @@ case class ShowCreateTableExec(
private def concatByMultiLines(iter: Iterable[String]): String = {
iter.mkString("(\n ", ",\n ", ")\n")
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ show create table char_tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`char_tbl` (
CREATE TABLE default.char_tbl (
`c` CHAR(5),
`v` VARCHAR(6))
USING parquet
Expand All @@ -70,7 +70,7 @@ show create table char_tbl2
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`char_tbl2` (
CREATE TABLE default.char_tbl2 (
`c` CHAR(5),
`v` VARCHAR(6))
USING parquet
Expand Down Expand Up @@ -161,7 +161,7 @@ show create table char_tbl3
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`char_tbl3` (
CREATE TABLE default.char_tbl3 (
`c` CHAR(5),
`v` VARCHAR(6))
USING parquet
Expand Down Expand Up @@ -218,7 +218,7 @@ show create table char_view
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE VIEW `default`.`char_view` (
CREATE VIEW default.char_view (
`c`,
`v`)
AS select * from char_tbl
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`tbl` (
CREATE TABLE default.tbl (
`a` INT,
`b` STRING,
`c` INT)
Expand Down Expand Up @@ -44,7 +44,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`tbl` (
CREATE TABLE default.tbl (
`a` INT,
`b` STRING,
`c` INT)
Expand Down Expand Up @@ -75,7 +75,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`tbl` (
CREATE TABLE default.tbl (
`a` INT,
`b` STRING,
`c` INT)
Expand Down Expand Up @@ -105,7 +105,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`tbl` (
CREATE TABLE default.tbl (
`a` INT,
`b` STRING,
`c` INT)
Expand Down Expand Up @@ -135,7 +135,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`tbl` (
CREATE TABLE default.tbl (
`b` STRING,
`c` INT,
`a` INT)
Expand Down Expand Up @@ -165,7 +165,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`tbl` (
CREATE TABLE default.tbl (
`a` INT,
`b` STRING,
`c` INT)
Expand Down Expand Up @@ -197,7 +197,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`tbl` (
CREATE TABLE default.tbl (
`a` INT,
`b` STRING,
`c` INT)
Expand Down Expand Up @@ -227,7 +227,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`tbl` (
CREATE TABLE default.tbl (
`a` INT,
`b` STRING,
`c` INT)
Expand Down Expand Up @@ -257,7 +257,7 @@ SHOW CREATE TABLE tbl
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE TABLE `default`.`tbl` (
CREATE TABLE default.tbl (
`a` FLOAT,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

to be consistent, shall we also only quote the column name if needed? This is can be done in a new PR, as it's not related to v2 command migration.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ok. I will do it.

`b` DECIMAL(10,0),
`c` DECIMAL(10,0),
Expand Down Expand Up @@ -295,7 +295,7 @@ SHOW CREATE TABLE view_SPARK_30302 AS SERDE
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE VIEW `default`.`view_SPARK_30302` (
CREATE VIEW default.view_SPARK_30302 (
`aaa`,
`bbb`)
AS SELECT a, b FROM tbl
Expand All @@ -306,7 +306,7 @@ SHOW CREATE TABLE view_SPARK_30302
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE VIEW `default`.`view_SPARK_30302` (
CREATE VIEW default.view_SPARK_30302 (
`aaa`,
`bbb`)
AS SELECT a, b FROM tbl
Expand Down Expand Up @@ -335,7 +335,7 @@ SHOW CREATE TABLE view_SPARK_30302 AS SERDE
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE VIEW `default`.`view_SPARK_30302` (
CREATE VIEW default.view_SPARK_30302 (
`aaa` COMMENT 'comment with \'quoted text\' for aaa',
`bbb`)
COMMENT 'This is a comment with \'quoted text\' for view'
Expand All @@ -347,7 +347,7 @@ SHOW CREATE TABLE view_SPARK_30302
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE VIEW `default`.`view_SPARK_30302` (
CREATE VIEW default.view_SPARK_30302 (
`aaa` COMMENT 'comment with \'quoted text\' for aaa',
`bbb`)
COMMENT 'This is a comment with \'quoted text\' for view'
Expand Down Expand Up @@ -377,7 +377,7 @@ SHOW CREATE TABLE view_SPARK_30302 AS SERDE
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE VIEW `default`.`view_SPARK_30302` (
CREATE VIEW default.view_SPARK_30302 (
`aaa`,
`bbb`)
TBLPROPERTIES (
Expand All @@ -391,7 +391,7 @@ SHOW CREATE TABLE view_SPARK_30302
-- !query schema
struct<createtab_stmt:string>
-- !query output
CREATE VIEW `default`.`view_SPARK_30302` (
CREATE VIEW default.view_SPARK_30302 (
`aaa`,
`bbb`)
TBLPROPERTIES (
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2775,6 +2775,7 @@ class DataSourceV2SQLSuite
assert(properties.get(TableCatalog.PROP_COMMENT) == "This is a comment")
assert(properties.get(TableCatalog.PROP_LOCATION) == "file:/tmp")
assert(properties.containsKey(TableCatalog.PROP_OWNER))
assert(properties.get(TableCatalog.PROP_EXTERNAL) == "true")
assert(properties.get(s"${TableCatalog.OPTION_PREFIX}from") == "0")
assert(properties.get(s"${TableCatalog.OPTION_PREFIX}to") == "1")
assert(properties.get("prop1") == "1")
Expand Down
Loading