diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala index 407f25ba20e5..f4890cc3058d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/CatalogV2Implicits.scala @@ -164,6 +164,18 @@ private[sql] object CatalogV2Implicits { def quoted: String = parts.map(quoteIfNeeded).mkString(".") } + implicit class TableIdentifierHelper(identifier: TableIdentifier) { + def quoted: String = { + identifier.database match { + case Some(db) => + Seq(db, identifier.table).map(quoteIfNeeded).mkString(".") + case _ => + quoteIfNeeded(identifier.table) + + } + } + } + def parseColumnPath(name: String): Seq[String] = { CatalystSqlParser.parseMultipartIdentifier(name) } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/V1Table.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/V1Table.scala index 07f66a614b2a..bf92107f6ae2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/V1Table.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/connector/catalog/V1Table.scala @@ -22,9 +22,8 @@ import java.util import scala.collection.JavaConverters._ import scala.collection.mutable -import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogTableType} -import org.apache.spark.sql.catalyst.util.quoteIfNeeded +import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.TableIdentifierHelper import org.apache.spark.sql.connector.catalog.V1Table.addV2TableProperties import org.apache.spark.sql.connector.expressions.{LogicalExpressions, Transform} import org.apache.spark.sql.types.StructType @@ -33,17 +32,6 @@ import org.apache.spark.sql.types.StructType * An implementation of catalog v2 `Table` to expose v1 table metadata. */ private[sql] case class V1Table(v1Table: CatalogTable) extends Table { - implicit class IdentifierHelper(identifier: TableIdentifier) { - def quoted: String = { - identifier.database match { - case Some(db) => - Seq(db, identifier.table).map(quoteIfNeeded).mkString(".") - case _ => - quoteIfNeeded(identifier.table) - - } - } - } def catalogTable: CatalogTable = v1Table @@ -92,7 +80,9 @@ private[sql] object V1Table { TableCatalog.OPTION_PREFIX + key -> value } ++ v1Table.provider.map(TableCatalog.PROP_PROVIDER -> _) ++ v1Table.comment.map(TableCatalog.PROP_COMMENT -> _) ++ - v1Table.storage.locationUri.map(TableCatalog.PROP_LOCATION -> _.toString) ++ + (if (external) { + v1Table.storage.locationUri.map(TableCatalog.PROP_LOCATION -> _.toString) + } else None) ++ (if (external) Some(TableCatalog.PROP_EXTERNAL -> "true") else None) ++ Some(TableCatalog.PROP_OWNER -> v1Table.owner) } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala index aaf2ead592c9..3dde9985abbe 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveSessionCatalog.scala @@ -266,12 +266,19 @@ class ResolveSessionCatalog(val catalogManager: CatalogManager) isOverwrite, partition) - case ShowCreateTable(ResolvedV1TableOrViewIdentifier(ident), asSerde, output) => - if (asSerde) { - ShowCreateTableAsSerdeCommand(ident.asTableIdentifier, output) - } else { + case ShowCreateTable(ResolvedV1TableOrViewIdentifier(ident), asSerde, output) if asSerde => + ShowCreateTableAsSerdeCommand(ident.asTableIdentifier, output) + + // If target is view, force use v1 command + case ShowCreateTable(ResolvedViewIdentifier(ident), _, output) => + ShowCreateTableCommand(ident.asTableIdentifier, output) + + case ShowCreateTable(ResolvedV1TableIdentifier(ident), _, output) + if conf.useV1Command => ShowCreateTableCommand(ident.asTableIdentifier, output) + + case ShowCreateTable(ResolvedTable(catalog, ident, table: V1Table, _), _, output) + if isSessionCatalog(catalog) && DDLUtils.isHiveTable(table.catalogTable) => ShowCreateTableCommand(ident.asTableIdentifier, output) - } case TruncateTable(ResolvedV1TableIdentifier(ident)) => TruncateTableCommand(ident.asTableIdentifier, None) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index b989224d4e0f..87028d5f12d0 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -36,6 +36,7 @@ import org.apache.spark.sql.catalyst.expressions.Attribute import org.apache.spark.sql.catalyst.plans.DescribeCommandSchema import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.catalyst.util.{escapeSingleQuotedString, quoteIdentifier, CaseInsensitiveMap, CharVarcharUtils} +import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.TableIdentifierHelper import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors} import org.apache.spark.sql.execution.datasources.DataSource import org.apache.spark.sql.execution.datasources.csv.CSVFileFormat @@ -1104,12 +1105,12 @@ case class ShowCreateTableCommand( val builder = StringBuilder.newBuilder val stmt = if (tableMetadata.tableType == VIEW) { - builder ++= s"CREATE VIEW ${table.quotedString} " + builder ++= s"CREATE VIEW ${table.quoted} " showCreateView(metadata, builder) builder.toString() } else { - builder ++= s"CREATE TABLE ${table.quotedString} " + builder ++= s"CREATE TABLE ${table.quoted} " showCreateDataSourceTable(metadata, builder) builder.toString() @@ -1247,7 +1248,7 @@ case class ShowCreateTableAsSerdeCommand( s"Unknown table type is found at showCreateHiveTable: $t") } - builder ++= s"CREATE$tableTypeString ${table.quotedString} " + builder ++= s"CREATE$tableTypeString ${table.quoted} " if (metadata.tableType == VIEW) { showCreateView(metadata, builder) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala index f21b9a5095a3..8b3ad9521648 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala @@ -21,9 +21,11 @@ import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.catalog.BucketSpec import org.apache.spark.sql.catalyst.expressions.Attribute -import org.apache.spark.sql.catalyst.util.escapeSingleQuotedString +import org.apache.spark.sql.catalyst.util.{escapeSingleQuotedString, CharVarcharUtils} import org.apache.spark.sql.connector.catalog.{CatalogV2Util, Table, TableCatalog} +import org.apache.spark.sql.connector.expressions.BucketTransform import org.apache.spark.sql.execution.LeafExecNode import org.apache.spark.unsafe.types.UTF8String @@ -57,7 +59,7 @@ case class ShowCreateTableExec( } private def showTableDataColumns(table: Table, builder: StringBuilder): Unit = { - val columns = table.schema().fields.map(_.toDDL) + val columns = CharVarcharUtils.getRawSchema(table.schema(), conf).fields.map(_.toDDL) builder ++= concatByMultiLines(columns) } @@ -71,8 +73,9 @@ case class ShowCreateTableExec( builder: StringBuilder, tableOptions: Map[String, String]): Unit = { if (tableOptions.nonEmpty) { - val props = tableOptions.toSeq.sortBy(_._1).map { case (key, value) => - s"'${escapeSingleQuotedString(key)}' = '${escapeSingleQuotedString(value)}'" + val props = conf.redactOptions(tableOptions).toSeq.sortBy(_._1).map { + case (key, value) => + s"'${escapeSingleQuotedString(key)}' = '${escapeSingleQuotedString(value)}'" } builder ++= "OPTIONS " builder ++= concatByMultiLines(props) @@ -82,8 +85,31 @@ case class ShowCreateTableExec( private def showTablePartitioning(table: Table, builder: StringBuilder): Unit = { if (!table.partitioning.isEmpty) { val transforms = new ArrayBuffer[String] - table.partitioning.foreach(t => transforms += t.describe()) - builder ++= s"PARTITIONED BY ${transforms.mkString("(", ", ", ")")}\n" + var bucketSpec = Option.empty[BucketSpec] + table.partitioning.map { + case BucketTransform(numBuckets, col, sortCol) => + if (sortCol.isEmpty) { + bucketSpec = Some(BucketSpec(numBuckets, col.map(_.fieldNames.mkString(".")), Nil)) + } else { + bucketSpec = Some(BucketSpec(numBuckets, col.map(_.fieldNames.mkString(".")), + sortCol.map(_.fieldNames.mkString(".")))) + } + case t => + transforms += t.describe() + } + if (transforms.nonEmpty) { + builder ++= s"PARTITIONED BY ${transforms.mkString("(", ", ", ")")}\n" + } + + // compatible with v1 + bucketSpec.map { bucket => + assert(bucket.bucketColumnNames.nonEmpty) + builder ++= s"CLUSTERED BY ${bucket.bucketColumnNames.mkString("(", ", ", ")")}\n" + if (bucket.sortColumnNames.nonEmpty) { + builder ++= s"SORTED BY ${bucket.sortColumnNames.mkString("(", ", ", ")")}\n" + } + builder ++= s"INTO ${bucket.numBuckets} BUCKETS\n" + } } } @@ -98,11 +124,12 @@ case class ShowCreateTableExec( builder: StringBuilder, tableOptions: Map[String, String]): Unit = { - val showProps = table.properties.asScala .filterKeys(key => !CatalogV2Util.TABLE_RESERVED_PROPERTIES.contains(key) && !key.startsWith(TableCatalog.OPTION_PREFIX) - && !tableOptions.contains(key)) + && !tableOptions.contains(key) + && !key.equals(TableCatalog.PROP_EXTERNAL) + ) if (showProps.nonEmpty) { val props = showProps.toSeq.sortBy(_._1).map { case (key, value) => @@ -123,5 +150,4 @@ case class ShowCreateTableExec( private def concatByMultiLines(iter: Iterable[String]): String = { iter.mkString("(\n ", ",\n ", ")\n") } - } diff --git a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out index fcd207cd1500..5c6b1a727705 100644 --- a/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/charvarchar.sql.out @@ -51,7 +51,7 @@ show create table char_tbl -- !query schema struct -- !query output -CREATE TABLE `default`.`char_tbl` ( +CREATE TABLE default.char_tbl ( `c` CHAR(5), `v` VARCHAR(6)) USING parquet @@ -70,7 +70,7 @@ show create table char_tbl2 -- !query schema struct -- !query output -CREATE TABLE `default`.`char_tbl2` ( +CREATE TABLE default.char_tbl2 ( `c` CHAR(5), `v` VARCHAR(6)) USING parquet @@ -161,7 +161,7 @@ show create table char_tbl3 -- !query schema struct -- !query output -CREATE TABLE `default`.`char_tbl3` ( +CREATE TABLE default.char_tbl3 ( `c` CHAR(5), `v` VARCHAR(6)) USING parquet @@ -218,7 +218,7 @@ show create table char_view -- !query schema struct -- !query output -CREATE VIEW `default`.`char_view` ( +CREATE VIEW default.char_view ( `c`, `v`) AS select * from char_tbl diff --git a/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out b/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out index 49c27a2229c5..ffcbb73458aa 100644 --- a/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/show-create-table.sql.out @@ -15,7 +15,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `default`.`tbl` ( +CREATE TABLE default.tbl ( `a` INT, `b` STRING, `c` INT) @@ -44,7 +44,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `default`.`tbl` ( +CREATE TABLE default.tbl ( `a` INT, `b` STRING, `c` INT) @@ -75,7 +75,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `default`.`tbl` ( +CREATE TABLE default.tbl ( `a` INT, `b` STRING, `c` INT) @@ -105,7 +105,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `default`.`tbl` ( +CREATE TABLE default.tbl ( `a` INT, `b` STRING, `c` INT) @@ -135,7 +135,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `default`.`tbl` ( +CREATE TABLE default.tbl ( `b` STRING, `c` INT, `a` INT) @@ -165,7 +165,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `default`.`tbl` ( +CREATE TABLE default.tbl ( `a` INT, `b` STRING, `c` INT) @@ -197,7 +197,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `default`.`tbl` ( +CREATE TABLE default.tbl ( `a` INT, `b` STRING, `c` INT) @@ -227,7 +227,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `default`.`tbl` ( +CREATE TABLE default.tbl ( `a` INT, `b` STRING, `c` INT) @@ -257,7 +257,7 @@ SHOW CREATE TABLE tbl -- !query schema struct -- !query output -CREATE TABLE `default`.`tbl` ( +CREATE TABLE default.tbl ( `a` FLOAT, `b` DECIMAL(10,0), `c` DECIMAL(10,0), @@ -295,7 +295,7 @@ SHOW CREATE TABLE view_SPARK_30302 AS SERDE -- !query schema struct -- !query output -CREATE VIEW `default`.`view_SPARK_30302` ( +CREATE VIEW default.view_SPARK_30302 ( `aaa`, `bbb`) AS SELECT a, b FROM tbl @@ -306,7 +306,7 @@ SHOW CREATE TABLE view_SPARK_30302 -- !query schema struct -- !query output -CREATE VIEW `default`.`view_SPARK_30302` ( +CREATE VIEW default.view_SPARK_30302 ( `aaa`, `bbb`) AS SELECT a, b FROM tbl @@ -335,7 +335,7 @@ SHOW CREATE TABLE view_SPARK_30302 AS SERDE -- !query schema struct -- !query output -CREATE VIEW `default`.`view_SPARK_30302` ( +CREATE VIEW default.view_SPARK_30302 ( `aaa` COMMENT 'comment with \'quoted text\' for aaa', `bbb`) COMMENT 'This is a comment with \'quoted text\' for view' @@ -347,7 +347,7 @@ SHOW CREATE TABLE view_SPARK_30302 -- !query schema struct -- !query output -CREATE VIEW `default`.`view_SPARK_30302` ( +CREATE VIEW default.view_SPARK_30302 ( `aaa` COMMENT 'comment with \'quoted text\' for aaa', `bbb`) COMMENT 'This is a comment with \'quoted text\' for view' @@ -377,7 +377,7 @@ SHOW CREATE TABLE view_SPARK_30302 AS SERDE -- !query schema struct -- !query output -CREATE VIEW `default`.`view_SPARK_30302` ( +CREATE VIEW default.view_SPARK_30302 ( `aaa`, `bbb`) TBLPROPERTIES ( @@ -391,7 +391,7 @@ SHOW CREATE TABLE view_SPARK_30302 -- !query schema struct -- !query output -CREATE VIEW `default`.`view_SPARK_30302` ( +CREATE VIEW default.view_SPARK_30302 ( `aaa`, `bbb`) TBLPROPERTIES ( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index 3e0627c50534..d9e3342240bc 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -2775,6 +2775,7 @@ class DataSourceV2SQLSuite assert(properties.get(TableCatalog.PROP_COMMENT) == "This is a comment") assert(properties.get(TableCatalog.PROP_LOCATION) == "file:/tmp") assert(properties.containsKey(TableCatalog.PROP_OWNER)) + assert(properties.get(TableCatalog.PROP_EXTERNAL) == "true") assert(properties.get(s"${TableCatalog.OPTION_PREFIX}from") == "0") assert(properties.get(s"${TableCatalog.OPTION_PREFIX}to") == "1") assert(properties.get("prop1") == "1") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala index 433264951ace..aa80f4f03883 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewTestSuite.scala @@ -611,7 +611,7 @@ class PersistedViewTestSuite extends SQLViewTestSuite with SharedSparkSession { Seq(true, false).foreach { serde => withView(viewName) { createView(viewName, "SELECT 1 AS a") - val expected = "CREATE VIEW `default`.`v1` ( `a`) AS SELECT 1 AS a" + val expected = s"CREATE VIEW ${formattedViewName(viewName)} ( `a`) AS SELECT 1 AS a" assert(getShowCreateDDL(formattedViewName(viewName), serde) == expected) } } @@ -622,8 +622,8 @@ class PersistedViewTestSuite extends SQLViewTestSuite with SharedSparkSession { Seq(true, false).foreach { serde => withView(viewName) { createView(viewName, "SELECT 1 AS a, 2 AS b", Seq("a", "b COMMENT 'b column'")) - val expected = "CREATE VIEW `default`.`v1` ( `a`, `b` COMMENT 'b column')" + - " AS SELECT 1 AS a, 2 AS b" + val expected = s"CREATE VIEW ${formattedViewName(viewName)}" + + s" ( `a`, `b` COMMENT 'b column') AS SELECT 1 AS a, 2 AS b" assert(getShowCreateDDL(formattedViewName(viewName), serde) == expected) } } @@ -636,7 +636,7 @@ class PersistedViewTestSuite extends SQLViewTestSuite with SharedSparkSession { createView(viewName, "SELECT 1 AS c1, '2' AS c2", Seq("c1 COMMENT 'bla'", "c2"), Seq("COMMENT 'table comment'", "TBLPROPERTIES ( 'prop1' = 'value1', 'prop2' = 'value2')")) - val expected = "CREATE VIEW `default`.`v1` ( `c1` COMMENT 'bla', `c2`)" + + val expected = s"CREATE VIEW ${formattedViewName(viewName)} ( `c1` COMMENT 'bla', `c2`)" + " COMMENT 'table comment'" + " TBLPROPERTIES ( 'prop1' = 'value1', 'prop2' = 'value2')" + " AS SELECT 1 AS c1, '2' AS c2" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala index 208ed4c08afc..023dfce3ba9c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowCreateTableSuite.scala @@ -31,7 +31,7 @@ import org.apache.spark.sql.execution.command */ trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase with command.TestsV1AndV2Commands { - override def fullName: String = s"`$ns`.`$table`" + override def fullName: String = s"$ns.$table" test("show create table[simple]") { // todo After SPARK-37517 unify the testcase both v1 and v2 @@ -81,6 +81,21 @@ trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase } test("bucketed data source table") { + withNamespaceAndTable(ns, table) { t => + sql( + s"""CREATE TABLE $t + |USING json + |CLUSTERED BY (a) INTO 2 BUCKETS + |AS SELECT 1 AS a, "foo" AS b + """.stripMargin + ) + val expected = s"CREATE TABLE $fullName ( `a` INT, `b` STRING) USING json" + + s" CLUSTERED BY (a) INTO 2 BUCKETS" + assert(getShowCreateDDL(t).mkString(" ") == expected) + } + } + + test("sort bucketed data source table") { withNamespaceAndTable(ns, table) { t => sql( s"""CREATE TABLE $t @@ -96,6 +111,22 @@ trait ShowCreateTableSuiteBase extends command.ShowCreateTableSuiteBase } test("partitioned bucketed data source table") { + withNamespaceAndTable(ns, table) { t => + sql( + s"""CREATE TABLE $t + |USING json + |PARTITIONED BY (c) + |CLUSTERED BY (a) INTO 2 BUCKETS + |AS SELECT 1 AS a, "foo" AS b, 2.5 AS c + """.stripMargin + ) + val expected = s"CREATE TABLE $fullName ( `a` INT, `b` STRING, `c` DECIMAL(2,1)) USING json" + + s" PARTITIONED BY (c) CLUSTERED BY (a) INTO 2 BUCKETS" + assert(getShowCreateDDL(t).mkString(" ") == expected) + } + } + + test("partitioned sort bucketed data source table") { withNamespaceAndTable(ns, table) { t => sql( s"""CREATE TABLE $t diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala index 35b196fe0d8b..47e59e965509 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala @@ -132,7 +132,9 @@ class ShowCreateTableSuite extends command.ShowCreateTableSuiteBase with Command "`b` STRING,", "`ts` TIMESTAMP)", defaultUsing, - "PARTITIONED BY (a, bucket(16, b), years(ts), months(ts), days(ts), hours(ts))" + "PARTITIONED BY (a, years(ts), months(ts), days(ts), hours(ts))", + "CLUSTERED BY (b)", + "INTO 16 BUCKETS" )) } }