Skip to content

Commit 7530f00

Browse files
author
Jan Vrsovsky
committed
better handling of temp dirs, as suggested
1 parent 38da77d commit 7530f00

File tree

1 file changed

+5
-8
lines changed

1 file changed

+5
-8
lines changed

mllib/src/test/scala/org/apache/spark/ml/source/libsvm/LibSVMRelationSuite.scala

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -47,14 +47,14 @@ class LibSVMRelationSuite extends SparkFunSuite with MLlibTestSparkContext {
4747
"""
4848
|0 2:4.0 4:5.0 6:6.0
4949
""".stripMargin
50-
val dir = Utils.createDirectory(tempDir.getCanonicalPath, "data")
50+
val dir = Utils.createTempDir()
5151
val succ = new File(dir, "_SUCCESS")
5252
val file0 = new File(dir, "part-00000")
5353
val file1 = new File(dir, "part-00001")
5454
Files.write("", succ, StandardCharsets.UTF_8)
5555
Files.write(lines0, file0, StandardCharsets.UTF_8)
5656
Files.write(lines1, file1, StandardCharsets.UTF_8)
57-
path = dir.toURI.toString
57+
path = dir.getPath
5858
}
5959

6060
override def afterAll(): Unit = {
@@ -111,16 +111,15 @@ class LibSVMRelationSuite extends SparkFunSuite with MLlibTestSparkContext {
111111

112112
test("write libsvm data and read it again") {
113113
val df = spark.read.format("libsvm").load(path)
114-
val tempDir2 = new File(tempDir, "read_write_test")
115-
val writePath = tempDir2.toURI.toString
114+
val writePath = Utils.createTempDir().getPath
115+
116116
// TODO: Remove requirement to coalesce by supporting multiple reads.
117117
df.coalesce(1).write.format("libsvm").mode(SaveMode.Overwrite).save(writePath)
118118

119119
val df2 = spark.read.format("libsvm").load(writePath)
120120
val row1 = df2.first()
121121
val v = row1.getAs[SparseVector](1)
122122
assert(v == Vectors.sparse(6, Seq((0, 1.0), (2, 2.0), (4, 3.0))))
123-
Utils.deleteRecursively(tempDir2)
124123
}
125124

126125
test("write libsvm data failed due to invalid schema") {
@@ -141,16 +140,14 @@ class LibSVMRelationSuite extends SparkFunSuite with MLlibTestSparkContext {
141140
)
142141
val df = spark.sqlContext.createDataFrame(rawData, struct)
143142

144-
val tempDir2 = new File(tempDir, "read_write_test_2")
145-
val writePath = tempDir2.toURI.toString
143+
val writePath = Utils.createTempDir().getPath
146144

147145
df.coalesce(1).write.format("libsvm").mode(SaveMode.Overwrite).save(writePath)
148146

149147
val df2 = spark.read.format("libsvm").load(writePath)
150148
val row1 = df2.first()
151149
val v = row1.getAs[SparseVector](1)
152150
assert(v == Vectors.sparse(3, Seq((0, 2.0), (1, 3.0))))
153-
Utils.deleteRecursively(tempDir2)
154151
}
155152

156153
test("select features from libsvm relation") {

0 commit comments

Comments
 (0)