Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 12 additions & 14 deletions core/src/main/scala/org/apache/spark/rdd/HadoopRDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import java.text.SimpleDateFormat
import java.util.Date

import scala.collection.immutable.Map
import scala.collection.mutable.ListBuffer
import scala.reflect.ClassTag

import org.apache.hadoop.conf.{Configurable, Configuration}
Expand Down Expand Up @@ -317,7 +316,7 @@ class HadoopRDD[K, V](
try {
val lsplit = c.inputSplitWithLocationInfo.cast(hsplit)
val infos = c.getLocationInfo.invoke(lsplit).asInstanceOf[Array[AnyRef]]
Some(HadoopRDD.convertSplitLocationInfo(infos))
HadoopRDD.convertSplitLocationInfo(infos)
} catch {
case e: Exception =>
logDebug("Failed to use InputSplitWithLocations.", e)
Expand Down Expand Up @@ -419,21 +418,20 @@ private[spark] object HadoopRDD extends Logging {
None
}

private[spark] def convertSplitLocationInfo(infos: Array[AnyRef]): Seq[String] = {
val out = ListBuffer[String]()
infos.foreach { loc =>
val locationStr = HadoopRDD.SPLIT_INFO_REFLECTIONS.get.
getLocation.invoke(loc).asInstanceOf[String]
private[spark] def convertSplitLocationInfo(infos: Array[AnyRef]): Option[Seq[String]] = {
Option(infos).map(_.flatMap { loc =>
val reflections = HadoopRDD.SPLIT_INFO_REFLECTIONS.get
val locationStr = reflections.getLocation.invoke(loc).asInstanceOf[String]
if (locationStr != "localhost") {
if (HadoopRDD.SPLIT_INFO_REFLECTIONS.get.isInMemory.
invoke(loc).asInstanceOf[Boolean]) {
logDebug("Partition " + locationStr + " is cached by Hadoop.")
out += new HDFSCacheTaskLocation(locationStr).toString
if (reflections.isInMemory.invoke(loc).asInstanceOf[Boolean]) {
logDebug(s"Partition $locationStr is cached by Hadoop.")
Some(HDFSCacheTaskLocation(locationStr).toString)
} else {
out += new HostTaskLocation(locationStr).toString
Some(HostTaskLocation(locationStr).toString)
}
} else {
None
}
}
out.seq
})
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -255,7 +255,7 @@ class NewHadoopRDD[K, V](
case Some(c) =>
try {
val infos = c.newGetLocationInfo.invoke(split).asInstanceOf[Array[AnyRef]]
Some(HadoopRDD.convertSplitLocationInfo(infos))
HadoopRDD.convertSplitLocationInfo(infos)
} catch {
case e : Exception =>
logDebug("Failed to use InputSplit#getLocationInfo.", e)
Expand Down