diff --git a/core/src/main/scala/org/apache/spark/rdd/RDD.scala b/core/src/main/scala/org/apache/spark/rdd/RDD.scala index 54bdc3e7cbc7a..b6fc4b13ad4d7 100644 --- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala +++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala @@ -1180,7 +1180,7 @@ abstract class RDD[T: ClassTag]( /** User code that created this RDD (e.g. `textFile`, `parallelize`). */ @transient private[spark] val creationSiteInfo = Utils.getCallSiteInfo - private[spark] def getCreationSite: String = creationSiteInfo.toString + private[spark] def getCreationSite: String = Option(creationSiteInfo).getOrElse("").toString private[spark] def elementClassTag: ClassTag[T] = classTag[T] diff --git a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala index 55af1666df662..2e2ccc5a1859e 100644 --- a/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/RDDSuite.scala @@ -24,7 +24,7 @@ import org.scalatest.FunSuite import org.apache.spark._ import org.apache.spark.SparkContext._ -import org.apache.spark.rdd._ +import org.apache.spark.util.Utils class RDDSuite extends FunSuite with SharedSparkContext { @@ -66,6 +66,13 @@ class RDDSuite extends FunSuite with SharedSparkContext { } } + test("serialization") { + val empty = new EmptyRDD[Int](sc) + val serial = Utils.serialize(empty) + val deserial: EmptyRDD[Int] = Utils.deserialize(serial) + assert(!deserial.toString().isEmpty()) + } + test("countApproxDistinct") { def error(est: Long, size: Long) = math.abs(est - size) / size.toDouble