From 5699c0dc2810a4500f0ee34414b77b80afd0e9c1 Mon Sep 17 00:00:00 2001 From: guoxiaolong Date: Fri, 9 Feb 2018 14:00:40 +0800 Subject: [PATCH 1/2] [SPARK-23364][SQL]'desc table' command in spark-sql add column head display --- .../scala/org/apache/spark/sql/execution/command/tables.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index e400975f1970..6d0d70007ba1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -539,7 +539,7 @@ case class DescribeTableCommand( throw new AnalysisException( s"DESC PARTITION is not allowed on a temporary view: ${table.identifier}") } - describeSchema(catalog.lookupRelation(table).schema, result, header = false) + describeSchema(catalog.lookupRelation(table).schema, result, header = true) } else { val metadata = catalog.getTableMetadata(table) if (metadata.schema.isEmpty) { @@ -547,7 +547,7 @@ case class DescribeTableCommand( // inferred at runtime. We should still support it. describeSchema(sparkSession.table(metadata.identifier).schema, result, header = false) } else { - describeSchema(metadata.schema, result, header = false) + describeSchema(metadata.schema, result, header = true) } describePartitionInfo(metadata, result) From cf38ca33f78ade3a25bbf865529cf107f9ffb922 Mon Sep 17 00:00:00 2001 From: guoxiaolong Date: Sun, 11 Feb 2018 11:51:29 +0800 Subject: [PATCH 2/2] 'Fetched * row(s)' correct display, no statistics head. --- .../spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala index 832a15d09599..a177bf0d9533 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala @@ -381,7 +381,9 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging { try { while (!out.checkError() && driver.getResults(res)) { res.asScala.foreach { l => - counter += 1 + if (!l.startsWith("#")) { + counter += 1 + } out.println(l) } res.clear()