Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ case class FileSourceScanExec(
partitionSchema = relation.partitionSchema,
relation.sparkSession.sessionState.conf)

val driverMetrics: HashMap[String, Long] = HashMap.empty
private lazy val driverMetrics: HashMap[String, Long] = HashMap.empty

/**
* Send the driver-side metrics. Before calling this function, selectedPartitions has
Expand Down Expand Up @@ -325,8 +325,7 @@ case class FileSourceScanExec(
}

@transient
private val pushedDownFilters = dataFilters.flatMap(DataSourceStrategy.translateFilter)
logInfo(s"Pushed Filters: ${pushedDownFilters.mkString(",")}")
private lazy val pushedDownFilters = dataFilters.flatMap(DataSourceStrategy.translateFilter)

override lazy val metadata: Map[String, String] = {
def seqToString(seq: Seq[Any]) = seq.mkString("[", ", ", "]")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -177,6 +177,8 @@ object FileSourceStrategy extends Strategy with Logging {
// Partition keys are not available in the statistics of the files.
val dataFilters =
normalizedFiltersWithoutSubqueries.filter(_.references.intersect(partitionSet).isEmpty)
logInfo(s"Pushed Filters: " +
s"${dataFilters.flatMap(DataSourceStrategy.translateFilter).mkString(",")}")

// Predicates with both partition keys and attributes need to be evaluated after the scan.
val afterScanFilters = filterSet -- partitionKeyFilters.filter(_.references.nonEmpty)
Expand Down