Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ package org.apache.spark.sql.hive

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{Path, PathFilter}
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants._
import org.apache.hadoop.hive.ql.exec.Utilities
import org.apache.hadoop.hive.ql.metadata.{Partition => HivePartition, Table => HiveTable}
Expand Down Expand Up @@ -52,7 +53,8 @@ private[hive]
class HadoopTableReader(
@transient attributes: Seq[Attribute],
@transient relation: MetastoreRelation,
@transient sc: HiveContext)
@transient sc: HiveContext,
@transient hiveExtraConf: HiveConf)
extends TableReader {

// Choose the minimum number of splits. If mapred.map.tasks is set, then use that unless
Expand All @@ -63,7 +65,7 @@ class HadoopTableReader(
// TODO: set aws s3 credentials.

private val _broadcastedHiveConf =
sc.sparkContext.broadcast(new SerializableWritable(sc.hiveconf))
sc.sparkContext.broadcast(new SerializableWritable(hiveExtraConf))

def broadcastedHiveConf = _broadcastedHiveConf

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,14 @@ case class HiveTableScan(
BindReferences.bindReference(pred, relation.partitionKeys)
}

// Create a local copy of hiveconf,so that scan specific modifications should not impact
// other queries
@transient
private[this] val hadoopReader = new HadoopTableReader(attributes, relation, context)
private[this] val hiveExtraConf = new HiveConf(context.hiveconf)

@transient
private[this] val hadoopReader =
new HadoopTableReader(attributes, relation, context, hiveExtraConf)

private[this] def castFromString(value: String, dataType: DataType) = {
Cast(Literal(value), dataType).eval(null)
Expand Down Expand Up @@ -97,7 +103,7 @@ case class HiveTableScan(
hiveConf.set(serdeConstants.LIST_COLUMNS, relation.attributes.map(_.name).mkString(","))
}

addColumnMetadataToConf(context.hiveconf)
addColumnMetadataToConf(hiveExtraConf)

/**
* Prunes partitions not involve the query plan.
Expand Down