Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ class CartesianRDD[T: ClassTag, U: ClassTag](
sc: SparkContext,
var rdd1 : RDD[T],
var rdd2 : RDD[U])
extends RDD[Pair[T, U]](sc, Nil)
extends RDD[(T, U)](sc, Nil)
with Serializable {

val numPartitionsInRdd2 = rdd2.partitions.length
Expand Down
2 changes: 1 addition & 1 deletion core/src/main/scala/org/apache/spark/rdd/RDD.scala
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ abstract class RDD[T: ClassTag](

/** Construct an RDD with just a one-to-one dependency on one parent */
def this(@transient oneParent: RDD[_]) =
this(oneParent.context , List(new OneToOneDependency(oneParent)))
this(oneParent.context, List(new OneToOneDependency(oneParent)))

private[spark] def conf = sc.conf
// =======================================================================
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ class StageData private[spark](
val status: StageStatus,
val stageId: Int,
val attemptId: Int,
val numActiveTasks: Int ,
val numActiveTasks: Int,
val numCompleteTasks: Int,
val numFailedTasks: Int,

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ class DoubleRDDSuite extends SparkFunSuite with SharedSparkContext {
test("WorksWithOutOfRangeWithInfiniteBuckets") {
// Verify that out of range works with two buckets
val rdd = sc.parallelize(Seq(10.01, -0.01, Double.NaN))
val buckets = Array(-1.0/0.0 , 0.0, 1.0/0.0)
val buckets = Array(-1.0/0.0, 0.0, 1.0/0.0)
val histogramResults = rdd.histogram(buckets)
val expectedHistogramResults = Array(1, 1)
assert(histogramResults === expectedHistogramResults)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ class MesosSchedulerBackendSuite extends SparkFunSuite with LocalSparkContext wi

test("check spark-class location correctly") {
val conf = new SparkConf
conf.set("spark.mesos.executor.home" , "/mesos-home")
conf.set("spark.mesos.executor.home", "/mesos-home")

val listenerBus = mock[LiveListenerBus]
listenerBus.post(
Expand Down
2 changes: 1 addition & 1 deletion scalastyle-config.xml
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,7 @@ This file is divided into 3 sections:
<!-- Should turn this on, but we have a few places that need to be fixed first -->
<check level="warning" class="org.scalastyle.scalariform.DisallowSpaceBeforeTokenChecker" enabled="true">
<parameters>
<parameter name="tokens">COLON, COMMA</parameter>
<parameter name="tokens">COMMA</parameter>
</parameters>
</check>

Expand Down