Skip to content

Commit 93d094f

Browse files
authored
name nits (#11)
1 parent b26e49e commit 93d094f

File tree

6 files changed

+18
-18
lines changed

6 files changed

+18
-18
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/CSVExpressionUtils.scala renamed to sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/CSVExprUtils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717

1818
package org.apache.spark.sql.catalyst.csv
1919

20-
object CSVExpressionUtils {
20+
object CSVExprUtils {
2121
/**
2222
* Filter ignorable rows for CSV iterator (lines empty and starting with `comment`).
2323
* This is currently being used in CSV reading path and CSV schema inference.

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/CSVHeaderChecker.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ class CSVHeaderChecker(
123123
// Note: if there are only comments in the first block, the header would probably
124124
// be not extracted.
125125
if (options.headerFlag && isStartOfFile) {
126-
CSVExpressionUtils.extractHeader(lines, options).foreach { header =>
126+
CSVExprUtils.extractHeader(lines, options).foreach { header =>
127127
checkHeaderColumnNames(tokenizer.parseLine(header))
128128
}
129129
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/CSVOptions.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ class CSVOptions(
8383
}
8484
}
8585

86-
val delimiter = CSVExpressionUtils.toChar(
86+
val delimiter = CSVExprUtils.toChar(
8787
parameters.getOrElse("sep", parameters.getOrElse("delimiter", ",")))
8888
val parseMode: ParseMode =
8989
parameters.get("mode").map(ParseMode.fromString).getOrElse(PermissiveMode)

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/csv/UnivocityParser.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -338,7 +338,7 @@ private[sql] object UnivocityParser {
338338

339339
val options = parser.options
340340

341-
val filteredLines: Iterator[String] = CSVExpressionUtils.filterCommentAndEmpty(lines, options)
341+
val filteredLines: Iterator[String] = CSVExprUtils.filterCommentAndEmpty(lines, options)
342342

343343
val safeParser = new FailureSafeParser[String](
344344
input => Seq(parser.parse(input)),

sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/csv/CSVUtilsSuite.scala

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -21,40 +21,40 @@ import org.apache.spark.SparkFunSuite
2121

2222
class CSVExpressionUtilsSuite extends SparkFunSuite {
2323
test("Can parse escaped characters") {
24-
assert(CSVExpressionUtils.toChar("""\t""") === '\t')
25-
assert(CSVExpressionUtils.toChar("""\r""") === '\r')
26-
assert(CSVExpressionUtils.toChar("""\b""") === '\b')
27-
assert(CSVExpressionUtils.toChar("""\f""") === '\f')
28-
assert(CSVExpressionUtils.toChar("""\"""") === '\"')
29-
assert(CSVExpressionUtils.toChar("""\'""") === '\'')
30-
assert(CSVExpressionUtils.toChar("""\u0000""") === '\u0000')
31-
assert(CSVExpressionUtils.toChar("""\\""") === '\\')
24+
assert(CSVExprUtils.toChar("""\t""") === '\t')
25+
assert(CSVExprUtils.toChar("""\r""") === '\r')
26+
assert(CSVExprUtils.toChar("""\b""") === '\b')
27+
assert(CSVExprUtils.toChar("""\f""") === '\f')
28+
assert(CSVExprUtils.toChar("""\"""") === '\"')
29+
assert(CSVExprUtils.toChar("""\'""") === '\'')
30+
assert(CSVExprUtils.toChar("""\u0000""") === '\u0000')
31+
assert(CSVExprUtils.toChar("""\\""") === '\\')
3232
}
3333

3434
test("Does not accept delimiter larger than one character") {
3535
val exception = intercept[IllegalArgumentException]{
36-
CSVExpressionUtils.toChar("ab")
36+
CSVExprUtils.toChar("ab")
3737
}
3838
assert(exception.getMessage.contains("cannot be more than one character"))
3939
}
4040

4141
test("Throws exception for unsupported escaped characters") {
4242
val exception = intercept[IllegalArgumentException]{
43-
CSVExpressionUtils.toChar("""\1""")
43+
CSVExprUtils.toChar("""\1""")
4444
}
4545
assert(exception.getMessage.contains("Unsupported special character for delimiter"))
4646
}
4747

4848
test("string with one backward slash is prohibited") {
4949
val exception = intercept[IllegalArgumentException]{
50-
CSVExpressionUtils.toChar("""\""")
50+
CSVExprUtils.toChar("""\""")
5151
}
5252
assert(exception.getMessage.contains("Single backslash is prohibited"))
5353
}
5454

5555
test("output proper error message for empty string") {
5656
val exception = intercept[IllegalArgumentException]{
57-
CSVExpressionUtils.toChar("")
57+
CSVExprUtils.toChar("")
5858
}
5959
assert(exception.getMessage.contains("Delimiter cannot be empty string"))
6060
}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/csv/CSVUtils.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.datasources.csv
1919

2020
import org.apache.spark.rdd.RDD
2121
import org.apache.spark.sql.Dataset
22-
import org.apache.spark.sql.catalyst.csv.CSVExpressionUtils
22+
import org.apache.spark.sql.catalyst.csv.CSVExprUtils
2323
import org.apache.spark.sql.catalyst.csv.CSVOptions
2424
import org.apache.spark.sql.functions._
2525

@@ -128,5 +128,5 @@ object CSVUtils {
128128
}
129129

130130
def filterCommentAndEmpty(iter: Iterator[String], options: CSVOptions): Iterator[String] =
131-
CSVExpressionUtils.filterCommentAndEmpty(iter, options)
131+
CSVExprUtils.filterCommentAndEmpty(iter, options)
132132
}

0 commit comments

Comments
 (0)