diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index e400975f1970..6d0d70007ba1 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -539,7 +539,7 @@ case class DescribeTableCommand( throw new AnalysisException( s"DESC PARTITION is not allowed on a temporary view: ${table.identifier}") } - describeSchema(catalog.lookupRelation(table).schema, result, header = false) + describeSchema(catalog.lookupRelation(table).schema, result, header = true) } else { val metadata = catalog.getTableMetadata(table) if (metadata.schema.isEmpty) { @@ -547,7 +547,7 @@ case class DescribeTableCommand( // inferred at runtime. We should still support it. describeSchema(sparkSession.table(metadata.identifier).schema, result, header = false) } else { - describeSchema(metadata.schema, result, header = false) + describeSchema(metadata.schema, result, header = true) } describePartitionInfo(metadata, result) diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala index 832a15d09599..a177bf0d9533 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala @@ -381,7 +381,9 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging { try { while (!out.checkError() && driver.getResults(res)) { res.asScala.foreach { l => - counter += 1 + if (!l.startsWith("#")) { + counter += 1 + } out.println(l) } res.clear()