Skip to content

Commit b2018a5

Browse files
committed
check sparkFilesDir before delete
change from < if (SparkContext.DRIVER_IDENTIFIER == executorId) > to < if (sparkFilesDir != ".") >, and add comment where sparkFilesDir is created.
1 parent f48a3c6 commit b2018a5

File tree

1 file changed

+3
-1
lines changed

1 file changed

+3
-1
lines changed

core/src/main/scala/org/apache/spark/SparkEnv.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ class SparkEnv (
9898
// the tmp dir, if not, it will create too many tmp dirs.
9999
// We only need to delete the tmp dir create by driver, because sparkFilesDir is point to the
100100
// current working dir in executor which we do not need to delete.
101-
if (SparkContext.DRIVER_IDENTIFIER == executorId) {
101+
if (sparkFilesDir != ".") {
102102
try {
103103
Utils.deleteRecursively(new File(sparkFilesDir))
104104
} catch {
@@ -351,6 +351,8 @@ object SparkEnv extends Logging {
351351
// Set the sparkFiles directory, used when downloading dependencies. In local mode,
352352
// this is a temporary directory; in distributed mode, this is the executor's current working
353353
// directory.
354+
// As we use this value to decide whether if we need to delete the tmp file in stop(), so if you
355+
// want to change this code please be careful.
354356
val sparkFilesDir: String = if (isDriver) {
355357
Utils.createTempDir(Utils.getLocalDir(conf), "userFiles").getAbsolutePath
356358
} else {

0 commit comments

Comments
 (0)