File tree Expand file tree Collapse file tree 2 files changed +3
-3
lines changed
main/scala/org/apache/spark/io
test/scala/org/apache/spark/io Expand file tree Collapse file tree 2 files changed +3
-3
lines changed Original file line number Diff line number Diff line change @@ -55,7 +55,7 @@ private[spark] object CompressionCodec {
5555 ctor.newInstance(conf).asInstanceOf [CompressionCodec ]
5656 }
5757
58- val DEFAULT_COMPRESSION_CODEC = classOf [LZFCompressionCodec ].getName
58+ val DEFAULT_COMPRESSION_CODEC = classOf [SnappyCompressionCodec ].getName
5959}
6060
6161
@@ -81,7 +81,7 @@ class LZFCompressionCodec(conf: SparkConf) extends CompressionCodec {
8181/**
8282 * :: DeveloperApi ::
8383 * Snappy implementation of [[org.apache.spark.io.CompressionCodec ]].
84- * Block size can be configured by spark.io.compression.snappy.block.size.
84+ * Block size can be configured by ` spark.io.compression.snappy.block.size` .
8585 *
8686 * Note: The wire protocol for this codec is not guaranteed to be compatible across versions
8787 * of Spark. This is intended for use as an internal compression utility within a single Spark
Original file line number Diff line number Diff line change @@ -46,7 +46,7 @@ class CompressionCodecSuite extends FunSuite {
4646
4747 test(" default compression codec" ) {
4848 val codec = CompressionCodec .createCodec(conf)
49- assert(codec.getClass === classOf [LZFCompressionCodec ])
49+ assert(codec.getClass === classOf [SnappyCompressionCodec ])
5050 testCodec(codec)
5151 }
5252
You can’t perform that action at this time.
0 commit comments