Skip to content

Commit 06c1a01

Browse files
committed
SPARK-2469: Use Snappy (instead of LZF) for default shuffle compression codec.
This reduces shuffle compression memory usage by 3x.
1 parent 9fe693b commit 06c1a01

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

core/src/main/scala/org/apache/spark/io/CompressionCodec.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ private[spark] object CompressionCodec {
5555
ctor.newInstance(conf).asInstanceOf[CompressionCodec]
5656
}
5757

58-
val DEFAULT_COMPRESSION_CODEC = classOf[LZFCompressionCodec].getName
58+
val DEFAULT_COMPRESSION_CODEC = classOf[SnappyCompressionCodec].getName
5959
}
6060

6161

@@ -81,7 +81,7 @@ class LZFCompressionCodec(conf: SparkConf) extends CompressionCodec {
8181
/**
8282
* :: DeveloperApi ::
8383
* Snappy implementation of [[org.apache.spark.io.CompressionCodec]].
84-
* Block size can be configured by spark.io.compression.snappy.block.size.
84+
* Block size can be configured by `spark.io.compression.snappy.block.size`.
8585
*
8686
* Note: The wire protocol for this codec is not guaranteed to be compatible across versions
8787
* of Spark. This is intended for use as an internal compression utility within a single Spark

core/src/test/scala/org/apache/spark/io/CompressionCodecSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ class CompressionCodecSuite extends FunSuite {
4646

4747
test("default compression codec") {
4848
val codec = CompressionCodec.createCodec(conf)
49-
assert(codec.getClass === classOf[LZFCompressionCodec])
49+
assert(codec.getClass === classOf[SnappyCompressionCodec])
5050
testCodec(codec)
5151
}
5252

0 commit comments

Comments
 (0)