Skip to content

Commit 4e61c16

Browse files
zhzhanliancheng
authored andcommitted
minor change
1 parent 305418c commit 4e61c16

File tree

3 files changed

+11
-7
lines changed

3 files changed

+11
-7
lines changed

sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcQuerySuite.scala

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,8 @@ class OrcQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
9191
val tempDir = getTempFilePath("orcTest").getCanonicalPath
9292
val range = (0 to 255)
9393
val data = sparkContext.parallelize(range)
94-
.map(x => AllDataTypes(s"$x", x, x.toLong, x.toFloat, x.toDouble, x.toShort, x.toByte, x % 2 == 0))
94+
.map(x =>
95+
AllDataTypes(s"$x", x, x.toLong, x.toFloat,x.toDouble, x.toShort, x.toByte, x % 2 == 0))
9596
data.toDF().saveAsOrcFile(tempDir)
9697
checkAnswer(
9798
TestHive.orcFile(tempDir),
@@ -101,7 +102,8 @@ class OrcQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
101102

102103
test("read/write binary data") {
103104
val tempDir = getTempFilePath("orcTest").getCanonicalPath
104-
sparkContext.parallelize(BinaryData("test".getBytes("utf8")) :: Nil).toDF().saveAsOrcFile(tempDir)
105+
sparkContext.parallelize(BinaryData("test".getBytes("utf8")) :: Nil)
106+
.toDF().saveAsOrcFile(tempDir)
105107
TestHive.orcFile(tempDir)
106108
.map(r => new String(r(0).asInstanceOf[Array[Byte]], "utf8"))
107109
.collect().toSeq == Seq("test")
@@ -136,7 +138,8 @@ class OrcQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
136138
rdd.foreach {
137139
// '===' does not like string comparison?
138140
row: Row => {
139-
assert(row.getString(1).equals(s"val_$counter"), s"row $counter value ${row.getString(1)} does not match val_$counter")
141+
assert(row.getString(1).equals(s"val_$counter"),
142+
s"row $counter value ${row.getString(1)} does not match val_$counter")
140143
counter = counter + 1
141144
}
142145
}
@@ -173,7 +176,7 @@ class OrcQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
173176

174177
// We only support zlib in hive0.12.0 now
175178
test("Default Compression options for writing to an Orcfile") {
176-
//TODO: support other compress codec
179+
// TODO: support other compress codec
177180
var tempDir = getTempFilePath("orcTest").getCanonicalPath
178181
val rdd = sparkContext.parallelize((1 to 100))
179182
.map(i => TestRDDEntry(i, s"val_$i"))
@@ -184,7 +187,7 @@ class OrcQuerySuite extends QueryTest with FunSuiteLike with BeforeAndAfterAll {
184187
}
185188

186189
// Following codec is supported in hive-0.13.1, ignore it now
187-
ignore("Other Compression options for writing to an Orcfile only supported in hive 0.13.1 and above") {
190+
ignore("Other Compression options for writing to an Orcfile - 0.13.1 and above") {
188191
TestHive.sparkContext.hadoopConfiguration.set(orcDefaultCompressVar, "SNAPPY")
189192
var tempDir = getTempFilePath("orcTest").getCanonicalPath
190193
val rdd = sparkContext.parallelize((1 to 100))

sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcRelationTest.scala renamed to sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcRelationSuite.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -530,4 +530,4 @@ class FSBasedOrcRelationSuite extends OrcRelationTest {
530530
"dataSchema" -> dataSchemaWithPartition.json)))
531531
}
532532
}
533-
}
533+
}

sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSuite.scala renamed to sql/hive/src/test/scala/org/apache/spark/sql/hive/orc/OrcSourceSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -175,7 +175,8 @@ abstract class OrcSuite extends QueryTest with BeforeAndAfterAll {
175175
}
176176

177177
test("overwrite insert") {
178-
sql("insert overwrite table normal_orc_as_source select * from orc_temp_table where intField > 5")
178+
sql("insert overwrite table normal_orc_as_source select * " +
179+
"from orc_temp_table where intField > 5")
179180
checkAnswer(
180181
sql("select * from normal_orc_as_source"),
181182
Row(6, "part-6") ::

0 commit comments

Comments
 (0)