diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala index 8980bcf88558..4ec1de10ae9e 100644 --- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala +++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLEnv.scala @@ -19,7 +19,11 @@ package org.apache.spark.sql.hive.thriftserver import java.io.PrintStream +import org.apache.hadoop.security.UserGroupInformation + import org.apache.spark.{SparkConf, SparkContext} +import org.apache.spark.deploy.SparkHadoopUtil +import org.apache.spark.deploy.security.HadoopDelegationTokenManager import org.apache.spark.internal.Logging import org.apache.spark.sql.{SparkSession, SQLContext} import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveUtils} @@ -49,6 +53,16 @@ private[hive] object SparkSQLEnv extends Logging { sparkContext = sparkSession.sparkContext sqlContext = sparkSession.sqlContext + val currentUser = UserGroupInformation.getCurrentUser + if (SparkHadoopUtil.get.isProxyUser(currentUser)) { + logInfo("Add credentials from token for proxy user") + val hadoopConf = SparkHadoopUtil.get.newConfiguration(sparkConf) + val credentials = currentUser.getCredentials + val tokenManager = new HadoopDelegationTokenManager(sparkConf, hadoopConf) + tokenManager.obtainDelegationTobtainDelegationTokensokens(hadoopConf, credentials) + currentUser.addCredentails(credentials) + } + val metadataHive = sparkSession .sharedState.externalCatalog.unwrapped.asInstanceOf[HiveExternalCatalog].client metadataHive.setOut(new PrintStream(System.out, true, "UTF-8"))