diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala index ce3cbc3a2fc4..ad3e10ed2aad 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkExecuteStatementOperation.scala @@ -26,6 +26,7 @@ import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import scala.util.control.NonFatal +import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.metastore.api.FieldSchema import org.apache.hadoop.hive.shims.Utils import org.apache.hive.service.cli._ @@ -281,12 +282,16 @@ private[hive] class SparkExecuteStatementOperation( } else { logError(s"Error executing query with $statementId, currentState $currentState, ", e) setState(OperationState.ERROR) - HiveThriftServer2.listener.onStatementError( - statementId, e.getMessage, SparkUtils.exceptionString(e)) - if (e.isInstanceOf[HiveSQLException]) { - throw e.asInstanceOf[HiveSQLException] - } else { - throw new HiveSQLException("Error running query: " + e.toString, e) + e match { + case hiveException: HiveSQLException => + HiveThriftServer2.listener.onStatementError( + statementId, hiveException.getMessage, SparkUtils.exceptionString(hiveException)) + throw hiveException + case _ => + val root = ExceptionUtils.getRootCause(e) + HiveThriftServer2.listener.onStatementError( + statementId, root.getMessage, SparkUtils.exceptionString(root)) + throw new HiveSQLException("Error running query: " + root.toString, root) } } } finally { diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetCatalogsOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetCatalogsOperation.scala index cde99fd35bd5..6c8a5b00992d 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetCatalogsOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetCatalogsOperation.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql.hive.thriftserver import java.util.UUID +import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType import org.apache.hive.service.cli.{HiveSQLException, OperationState} import org.apache.hive.service.cli.operation.GetCatalogsOperation @@ -68,11 +69,20 @@ private[hive] class SparkGetCatalogsOperation( } setState(OperationState.FINISHED) } catch { - case e: HiveSQLException => + case e: Throwable => + logError(s"Error executing get catalogs operation with $statementId", e) setState(OperationState.ERROR) - HiveThriftServer2.listener.onStatementError( - statementId, e.getMessage, SparkUtils.exceptionString(e)) - throw e + e match { + case hiveException: HiveSQLException => + HiveThriftServer2.listener.onStatementError( + statementId, hiveException.getMessage, SparkUtils.exceptionString(hiveException)) + throw hiveException + case _ => + val root = ExceptionUtils.getRootCause(e) + HiveThriftServer2.listener.onStatementError( + statementId, root.getMessage, SparkUtils.exceptionString(root)) + throw new HiveSQLException("Error getting catalogs: " + root.toString, root) + } } HiveThriftServer2.listener.onStatementFinish(statementId) } diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala index 89faff2f6f91..f845a2285b9a 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetColumnsOperation.scala @@ -22,6 +22,7 @@ import java.util.regex.Pattern import scala.collection.JavaConverters.seqAsJavaListConverter +import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.ql.security.authorization.plugin.{HiveOperationType, HivePrivilegeObject} import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType import org.apache.hive.service.cli._ @@ -129,11 +130,20 @@ private[hive] class SparkGetColumnsOperation( } setState(OperationState.FINISHED) } catch { - case e: HiveSQLException => + case e: Throwable => + logError(s"Error executing get columns operation with $statementId", e) setState(OperationState.ERROR) - HiveThriftServer2.listener.onStatementError( - statementId, e.getMessage, SparkUtils.exceptionString(e)) - throw e + e match { + case hiveException: HiveSQLException => + HiveThriftServer2.listener.onStatementError( + statementId, hiveException.getMessage, SparkUtils.exceptionString(hiveException)) + throw hiveException + case _ => + val root = ExceptionUtils.getRootCause(e) + HiveThriftServer2.listener.onStatementError( + statementId, root.getMessage, SparkUtils.exceptionString(root)) + throw new HiveSQLException("Error getting columns: " + root.toString, root) + } } HiveThriftServer2.listener.onStatementFinish(statementId) } diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetFunctionsOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetFunctionsOperation.scala index 462e57300e82..1cdd8918421b 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetFunctionsOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetFunctionsOperation.scala @@ -22,6 +22,7 @@ import java.util.UUID import scala.collection.JavaConverters.seqAsJavaListConverter +import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.ql.security.authorization.plugin.{HiveOperationType, HivePrivilegeObjectUtils} import org.apache.hive.service.cli._ import org.apache.hive.service.cli.operation.GetFunctionsOperation @@ -104,11 +105,20 @@ private[hive] class SparkGetFunctionsOperation( } setState(OperationState.FINISHED) } catch { - case e: HiveSQLException => + case e: Throwable => + logError(s"Error executing get functions operation with $statementId", e) setState(OperationState.ERROR) - HiveThriftServer2.listener.onStatementError( - statementId, e.getMessage, SparkUtils.exceptionString(e)) - throw e + e match { + case hiveException: HiveSQLException => + HiveThriftServer2.listener.onStatementError( + statementId, hiveException.getMessage, SparkUtils.exceptionString(hiveException)) + throw hiveException + case _ => + val root = ExceptionUtils.getRootCause(e) + HiveThriftServer2.listener.onStatementError( + statementId, root.getMessage, SparkUtils.exceptionString(root)) + throw new HiveSQLException("Error getting functions: " + root.toString, root) + } } HiveThriftServer2.listener.onStatementFinish(statementId) } diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetSchemasOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetSchemasOperation.scala index 87ef154bcc8a..928610a6bcff 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetSchemasOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetSchemasOperation.scala @@ -20,6 +20,7 @@ package org.apache.spark.sql.hive.thriftserver import java.util.UUID import java.util.regex.Pattern +import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType import org.apache.hive.service.cli._ import org.apache.hive.service.cli.operation.GetSchemasOperation @@ -87,11 +88,20 @@ private[hive] class SparkGetSchemasOperation( } setState(OperationState.FINISHED) } catch { - case e: HiveSQLException => + case e: Throwable => + logError(s"Error executing get schemas operation with $statementId", e) setState(OperationState.ERROR) - HiveThriftServer2.listener.onStatementError( - statementId, e.getMessage, SparkUtils.exceptionString(e)) - throw e + e match { + case hiveException: HiveSQLException => + HiveThriftServer2.listener.onStatementError( + statementId, hiveException.getMessage, SparkUtils.exceptionString(hiveException)) + throw hiveException + case _ => + val root = ExceptionUtils.getRootCause(e) + HiveThriftServer2.listener.onStatementError( + statementId, root.getMessage, SparkUtils.exceptionString(root)) + throw new HiveSQLException("Error getting schemas: " + root.toString, root) + } } HiveThriftServer2.listener.onStatementFinish(statementId) } diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTableTypesOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTableTypesOperation.scala index 8f2257f77d2a..ec03f1e148e6 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTableTypesOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTableTypesOperation.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql.hive.thriftserver import java.util.UUID +import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType import org.apache.hive.service.cli._ import org.apache.hive.service.cli.operation.GetTableTypesOperation @@ -74,11 +75,20 @@ private[hive] class SparkGetTableTypesOperation( } setState(OperationState.FINISHED) } catch { - case e: HiveSQLException => + case e: Throwable => + logError(s"Error executing get table types operation with $statementId", e) setState(OperationState.ERROR) - HiveThriftServer2.listener.onStatementError( - statementId, e.getMessage, SparkUtils.exceptionString(e)) - throw e + e match { + case hiveException: HiveSQLException => + HiveThriftServer2.listener.onStatementError( + statementId, hiveException.getMessage, SparkUtils.exceptionString(hiveException)) + throw hiveException + case _ => + val root = ExceptionUtils.getRootCause(e) + HiveThriftServer2.listener.onStatementError( + statementId, root.getMessage, SparkUtils.exceptionString(root)) + throw new HiveSQLException("Error getting table types: " + root.toString, root) + } } HiveThriftServer2.listener.onStatementFinish(statementId) } diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala index 6441dc50f49f..bf9cf7ad46d9 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTablesOperation.scala @@ -22,6 +22,7 @@ import java.util.regex.Pattern import scala.collection.JavaConverters._ +import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObjectUtils import org.apache.hive.service.cli._ @@ -30,7 +31,6 @@ import org.apache.hive.service.cli.session.HiveSession import org.apache.spark.internal.Logging import org.apache.spark.sql.SQLContext -import org.apache.spark.sql.catalyst.catalog.CatalogTableType import org.apache.spark.sql.catalyst.catalog.CatalogTableType._ import org.apache.spark.sql.hive.HiveUtils import org.apache.spark.util.{Utils => SparkUtils} @@ -119,11 +119,20 @@ private[hive] class SparkGetTablesOperation( } setState(OperationState.FINISHED) } catch { - case e: HiveSQLException => + case e: Throwable => + logError(s"Error executing get tables operation with $statementId", e) setState(OperationState.ERROR) - HiveThriftServer2.listener.onStatementError( - statementId, e.getMessage, SparkUtils.exceptionString(e)) - throw e + e match { + case hiveException: HiveSQLException => + HiveThriftServer2.listener.onStatementError( + statementId, hiveException.getMessage, SparkUtils.exceptionString(hiveException)) + throw hiveException + case _ => + val root = ExceptionUtils.getRootCause(e) + HiveThriftServer2.listener.onStatementError( + statementId, root.getMessage, SparkUtils.exceptionString(root)) + throw new HiveSQLException("Error getting tables: " + root.toString, root) + } } HiveThriftServer2.listener.onStatementFinish(statementId) } diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTypeInfoOperation.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTypeInfoOperation.scala index 7a6a8c59b721..0d263b09d57d 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTypeInfoOperation.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkGetTypeInfoOperation.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql.hive.thriftserver import java.util.UUID +import org.apache.commons.lang3.exception.ExceptionUtils import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType import org.apache.hive.service.cli.{HiveSQLException, OperationState} import org.apache.hive.service.cli.operation.GetTypeInfoOperation @@ -92,11 +93,20 @@ private[hive] class SparkGetTypeInfoOperation( }) setState(OperationState.FINISHED) } catch { - case e: HiveSQLException => + case e: Throwable => + logError(s"Error executing get type info with $statementId", e) setState(OperationState.ERROR) - HiveThriftServer2.listener.onStatementError( - statementId, e.getMessage, SparkUtils.exceptionString(e)) - throw e + e match { + case hiveException: HiveSQLException => + HiveThriftServer2.listener.onStatementError( + statementId, hiveException.getMessage, SparkUtils.exceptionString(hiveException)) + throw hiveException + case _ => + val root = ExceptionUtils.getRootCause(e) + HiveThriftServer2.listener.onStatementError( + statementId, root.getMessage, SparkUtils.exceptionString(root)) + throw new HiveSQLException("Error getting type info: " + root.toString, root) + } } HiveThriftServer2.listener.onStatementFinish(statementId) }