Skip to content

Commit 89d8072

Browse files
author
Marcelo Vanzin
committed
Cleanups.
Mostly minor, except for ClientBase.scala where I cleaned up the generated classpath to only include the necessary entries.
1 parent a963ea3 commit 89d8072

File tree

12 files changed

+39
-54
lines changed

12 files changed

+39
-54
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -360,8 +360,8 @@ private[spark] object SparkConf extends Logging {
360360
val configs = Seq(
361361
DeprecatedConfig("spark.files.userClassPathFirst", "spark.executor.userClassPathFirst",
362362
"1.3"),
363-
DeprecatedConfig("spark.yarn.user.classpath.first", null, "1.3.",
364-
"Use spark.{driver,executor}.userClassPathFirst; this option will be removed the future."))
363+
DeprecatedConfig("spark.yarn.user.classpath.first", null, "1.3",
364+
"Use spark.{driver,executor}.userClassPathFirst instead."))
365365
configs.map { x => (x.oldName, x) }.toMap
366366
}
367367

@@ -435,12 +435,12 @@ private[spark] object SparkConf extends Logging {
435435
val message = Option(deprecationMessage).getOrElse(
436436
s"Please use the alternative '$newName' instead.")
437437
logWarning(
438-
s"The configuration option '$oldName' has been replaced as of Spark $version. " +
439-
message)
438+
s"The configuration option '$oldName' has been replaced as of Spark $version and " +
439+
s"may be removed in the future. $message")
440440
} else {
441441
logWarning(
442-
s"The configuration option '$oldName' has been deprecated as of Spark $version. " +
443-
deprecationMessage)
442+
s"The configuration option '$oldName' has been deprecated as of Spark $version and " +
443+
s"may be removed in the future. $deprecationMessage")
444444
}
445445
}
446446
}

core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -321,7 +321,7 @@ object SparkSubmit {
321321
}
322322

323323
val loader =
324-
if (sysProps.get("spark.driver.userClassPathFirst").getOrElse("false").toBoolean) {
324+
if (sysProps.getOrElse("spark.driver.userClassPathFirst", "false").toBoolean) {
325325
new ChildExecutorURLClassLoader(new Array[URL](0),
326326
Thread.currentThread.getContextClassLoader)
327327
} else {

core/src/main/scala/org/apache/spark/deploy/worker/DriverRunner.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,6 @@ private[spark] class DriverRunner(
7474
val driverDir = createWorkingDirectory()
7575
val localJarFilename = downloadUserJar(driverDir)
7676

77-
/** Replace variables in a command argument passed to us */
7877
def substituteVariables(argument: String): String = argument match {
7978
case "{{WORKER_URL}}" => workerUrl
8079
case "{{USER_JAR}}" => localJarFilename

core/src/main/scala/org/apache/spark/deploy/worker/DriverWrapper.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@
1818
package org.apache.spark.deploy.worker
1919

2020
import java.io.File
21-
import java.net.URL
2221

2322
import akka.actor._
2423

core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,7 @@ private[spark] object CoarseGrainedExecutorBackend extends Logging {
211211
| --hostname <hostname>
212212
| --cores <cores>
213213
| --app-id <appid>
214-
| [--worker-id <workerUrl>]
214+
| [--worker-url <workerUrl>]
215215
| [--user-class-path <url>]
216216
|""".stripMargin)
217217
System.exit(1)

core/src/test/scala/org/apache/spark/SparkConfSuite.scala

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -185,12 +185,16 @@ class SparkConfSuite extends FunSuite with LocalSparkContext {
185185
serializer.newInstance().serialize(new StringBuffer())
186186
}
187187

188-
test("deprecated key renaming") {
189-
val conf = new SparkConf().set("spark.files.userClassPathFirst", "true")
188+
test("deprecated config keys") {
189+
val conf = new SparkConf()
190+
.set("spark.files.userClassPathFirst", "true")
191+
.set("spark.yarn.user.classpath.first", "true")
190192
assert(conf.contains("spark.files.userClassPathFirst"))
191193
assert(conf.contains("spark.executor.userClassPathFirst"))
194+
assert(conf.contains("spark.yarn.user.classpath.first"))
192195
assert(conf.getBoolean("spark.files.userClassPathFirst", false))
193196
assert(conf.getBoolean("spark.executor.userClassPathFirst", false))
197+
assert(conf.getBoolean("spark.yarn.user.classpath.first", false))
194198
}
195199

196200
}

core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -417,7 +417,7 @@ class SparkSubmitSuite extends FunSuite with Matchers {
417417
val systemJar = TestUtils.createJarWithFiles(Map("test.resource" -> "SYSTEM"))
418418
val userJar = TestUtils.createJarWithFiles(Map("test.resource" -> "USER"))
419419
val args = Seq(
420-
"--class", ClassPathIsolationTest.getClass.getName.stripSuffix("$"),
420+
"--class", UserClasspathFirstTest.getClass.getName.stripSuffix("$"),
421421
"--name", "testApp",
422422
"--master", "local",
423423
"--conf", "spark.driver.extraClassPath=" + systemJar,
@@ -514,7 +514,7 @@ object SimpleApplicationTest {
514514
}
515515
}
516516

517-
object ClassPathIsolationTest {
517+
object UserClasspathFirstTest {
518518
def main(args: Array[String]) {
519519
val ccl = Thread.currentThread().getContextClassLoader()
520520
val resource = ccl.getResourceAsStream("test.resource")

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ApplicationMaster.scala

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ import scala.util.control.NonFatal
2121

2222
import java.io.{File, IOException}
2323
import java.lang.reflect.InvocationTargetException
24-
import java.net.{Socket, URI, URL}
24+
import java.net.{Socket, URL}
2525
import java.util.concurrent.atomic.AtomicReference
2626

2727
import akka.actor._
@@ -452,13 +452,7 @@ private[spark] class ApplicationMaster(args: ApplicationMasterArguments,
452452

453453
val classpath = ClientBase.getUserClasspath(null, sparkConf)
454454
val urls = classpath.map { entry =>
455-
val absPath =
456-
if (new File(entry.getPath()).isAbsolute()) {
457-
entry.getPath()
458-
} else {
459-
new File(entry.getPath()).getAbsolutePath()
460-
}
461-
new URL("file:" + absPath)
455+
new URL("file:" + new File(entry.getPath()).getAbsolutePath())
462456
}
463457
val userClassLoader =
464458
if (ClientBase.isUserClassPathFirst(sparkConf, true)) {

yarn/common/src/main/scala/org/apache/spark/deploy/yarn/ClientBase.scala

Lines changed: 14 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717

1818
package org.apache.spark.deploy.yarn
1919

20-
import java.io.File
2120
import java.net.{InetAddress, UnknownHostException, URI}
2221

2322
import scala.collection.JavaConversions._
@@ -161,17 +160,16 @@ private[spark] trait ClientBase extends Logging {
161160
/**
162161
* Copy the given main resource to the distributed cache if the scheme is not "local".
163162
* Otherwise, set the corresponding key in our SparkConf to handle it downstream.
164-
* Each resource is represented by a 4-tuple of:
163+
* Each resource is represented by a 3-tuple of:
165164
* (1) destination resource name,
166165
* (2) local path to the resource,
167166
* (3) Spark property key to set if the scheme is not local, and
168-
* (4) whether to set permissions for this resource
169167
*/
170168
List(
171-
(SPARK_JAR, sparkJar(sparkConf), CONF_SPARK_JAR, false),
172-
(APP_JAR, args.userJar, CONF_SPARK_USER_JAR, true),
173-
("log4j.properties", oldLog4jConf.orNull, null, true)
174-
).foreach { case (destName, _localPath, confKey, isUserArtifact) =>
169+
(SPARK_JAR, sparkJar(sparkConf), CONF_SPARK_JAR),
170+
(APP_JAR, args.userJar, CONF_SPARK_USER_JAR),
171+
("log4j.properties", oldLog4jConf.orNull, null)
172+
).foreach { case (destName, _localPath, confKey) =>
175173
val localPath: String = if (_localPath != null) _localPath.trim() else ""
176174
if (!localPath.isEmpty()) {
177175
val localURI = new URI(localPath)
@@ -661,9 +659,10 @@ private[spark] object ClientBase extends Logging {
661659
/**
662660
* Populate the classpath entry in the given environment map.
663661
*
664-
* Class path isolation, when enabled, makes the user-added jars be loaded from a different
665-
* class loader than other class path entries. The extra class path and other uploaded files
666-
* are still made available through the system class path.
662+
* User jars are generally not added to the JVM's classpath; those are handled by the AM and
663+
* executor backend. When the deprecated `spark.yarn.user.classpath.first` is used, user jars are
664+
* included in the system classpath, though. The extra class path and other uploaded files are
665+
* always made available through the system class path.
667666
*
668667
* @param args Client arguments (when starting the AM) or null (when starting executors).
669668
*/
@@ -675,22 +674,13 @@ private[spark] object ClientBase extends Logging {
675674
extraClassPath: Option[String] = None): Unit = {
676675
extraClassPath.foreach(addClasspathEntry(_, env))
677676
addClasspathEntry(Environment.PWD.$(), env)
678-
if (isUserClassPathFirst(sparkConf, args == null)) {
679-
addFileToClasspath(new URI(sparkJar(sparkConf)), SPARK_JAR, env)
680-
populateHadoopClasspath(conf, env)
681-
} else {
682-
// Normally the users app.jar is last in case it conflicts with spark jars.
683-
if (sparkConf.getBoolean("spark.yarn.user.classpath.first", false)) {
684-
getUserClasspath(args, sparkConf).foreach { x =>
685-
addFileToClasspath(x, null, env)
686-
}
677+
if (sparkConf.getBoolean("spark.yarn.user.classpath.first", false)) {
678+
getUserClasspath(args, sparkConf).foreach { x =>
679+
addFileToClasspath(x, null, env)
687680
}
688-
addFileToClasspath(new URI(sparkJar(sparkConf)), SPARK_JAR, env)
689-
populateHadoopClasspath(conf, env)
690-
691-
// Append all jar files under the directory holding cached files to the classpath.
692-
addClasspathEntry(buildPath(Environment.PWD.$(), "*"), env)
693681
}
682+
addFileToClasspath(new URI(sparkJar(sparkConf)), SPARK_JAR, env)
683+
populateHadoopClasspath(conf, env)
694684
}
695685

696686
/**

yarn/common/src/test/scala/org/apache/spark/deploy/yarn/ClientBaseSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -100,7 +100,6 @@ class ClientBaseSuite extends FunSuite with Matchers {
100100
}
101101
})
102102
cp should contain (Environment.PWD.$())
103-
cp should contain (s"${Environment.PWD.$()}${File.separator}*")
104103
cp should not contain (ClientBase.SPARK_JAR)
105104
cp should not contain (ClientBase.APP_JAR)
106105
}

0 commit comments

Comments
 (0)