@@ -83,7 +83,7 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])
8383 throw new SparkException (" Cannot use map-side combining with array keys." )
8484 }
8585 if (partitioner.isInstanceOf [HashPartitioner ]) {
86- throw new SparkException (" Default partitioner cannot partition array keys." )
86+ throw new SparkException (" Specified or default partitioner cannot partition array keys." )
8787 }
8888 }
8989 val aggregator = new Aggregator [K , V , C ](
@@ -784,7 +784,7 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])
784784 partitioner : Partitioner )
785785 : RDD [(K , (Iterable [V ], Iterable [W1 ], Iterable [W2 ], Iterable [W3 ]))] = self.withScope {
786786 if (partitioner.isInstanceOf [HashPartitioner ] && keyClass.isArray) {
787- throw new SparkException (" Default partitioner cannot partition array keys." )
787+ throw new SparkException (" Specified or default partitioner cannot partition array keys." )
788788 }
789789 val cg = new CoGroupedRDD [K ](Seq (self, other1, other2, other3), partitioner)
790790 cg.mapValues { case Array (vs, w1s, w2s, w3s) =>
@@ -802,7 +802,7 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])
802802 def cogroup [W ](other : RDD [(K , W )], partitioner : Partitioner )
803803 : RDD [(K , (Iterable [V ], Iterable [W ]))] = self.withScope {
804804 if (partitioner.isInstanceOf [HashPartitioner ] && keyClass.isArray) {
805- throw new SparkException (" Default partitioner cannot partition array keys." )
805+ throw new SparkException (" Specified or default partitioner cannot partition array keys." )
806806 }
807807 val cg = new CoGroupedRDD [K ](Seq (self, other), partitioner)
808808 cg.mapValues { case Array (vs, w1s) =>
@@ -817,7 +817,7 @@ class PairRDDFunctions[K, V](self: RDD[(K, V)])
817817 def cogroup [W1 , W2 ](other1 : RDD [(K , W1 )], other2 : RDD [(K , W2 )], partitioner : Partitioner )
818818 : RDD [(K , (Iterable [V ], Iterable [W1 ], Iterable [W2 ]))] = self.withScope {
819819 if (partitioner.isInstanceOf [HashPartitioner ] && keyClass.isArray) {
820- throw new SparkException (" Default partitioner cannot partition array keys." )
820+ throw new SparkException (" Specified or default partitioner cannot partition array keys." )
821821 }
822822 val cg = new CoGroupedRDD [K ](Seq (self, other1, other2), partitioner)
823823 cg.mapValues { case Array (vs, w1s, w2s) =>
0 commit comments