Skip to content

Commit ea69cf2

Browse files
Andrew Orpwendell
authored andcommitted
[SPARK-6048] SparkConf should not translate deprecated configs on set
There are multiple issues with translating on set outlined in the JIRA. This PR reverts the translation logic added to `SparkConf`. In the future, after the 1.3.0 release we will figure out a way to reorganize the internal structure more elegantly. For now, let's preserve the existing semantics of `SparkConf` since it's a public interface. Unfortunately this means duplicating some code for now, but this is all internal and we can always clean it up later. Author: Andrew Or <andrew@databricks.com> Closes #4799 from andrewor14/conf-set-translate and squashes the following commits: 11c525b [Andrew Or] Move warning to driver 10e77b5 [Andrew Or] Add documentation for deprecation precedence a369cb1 [Andrew Or] Merge branch 'master' of github.com:apache/spark into conf-set-translate c26a9e3 [Andrew Or] Revert all translate logic in SparkConf fef6c9c [Andrew Or] Restore deprecation logic for spark.executor.userClassPathFirst 94b4dfa [Andrew Or] Translate on get, not set (cherry picked from commit 258d154) Signed-off-by: Patrick Wendell <patrick@databricks.com>
1 parent 8100b79 commit ea69cf2

File tree

5 files changed

+25
-22
lines changed

5 files changed

+25
-22
lines changed

core/src/main/scala/org/apache/spark/SparkConf.scala

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
6868
if (value == null) {
6969
throw new NullPointerException("null value for " + key)
7070
}
71-
settings.put(translateConfKey(key, warn = true), value)
71+
settings.put(key, value)
7272
this
7373
}
7474

@@ -140,7 +140,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
140140

141141
/** Set a parameter if it isn't already configured */
142142
def setIfMissing(key: String, value: String): SparkConf = {
143-
settings.putIfAbsent(translateConfKey(key, warn = true), value)
143+
settings.putIfAbsent(key, value)
144144
this
145145
}
146146

@@ -176,7 +176,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
176176

177177
/** Get a parameter as an Option */
178178
def getOption(key: String): Option[String] = {
179-
Option(settings.get(translateConfKey(key)))
179+
Option(settings.get(key))
180180
}
181181

182182
/** Get all parameters as a list of pairs */
@@ -229,7 +229,7 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
229229
def getAppId: String = get("spark.app.id")
230230

231231
/** Does the configuration contain a given parameter? */
232-
def contains(key: String): Boolean = settings.containsKey(translateConfKey(key))
232+
def contains(key: String): Boolean = settings.containsKey(key)
233233

234234
/** Copy this object */
235235
override def clone: SparkConf = {
@@ -343,6 +343,13 @@ class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging {
343343
}
344344
}
345345
}
346+
347+
// Warn against the use of deprecated configs
348+
deprecatedConfigs.values.foreach { dc =>
349+
if (contains(dc.oldName)) {
350+
dc.warn()
351+
}
352+
}
346353
}
347354

348355
/**

core/src/main/scala/org/apache/spark/executor/Executor.scala

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,12 @@ private[spark] class Executor(
9292
private val executorActor = env.actorSystem.actorOf(
9393
Props(new ExecutorActor(executorId)), "ExecutorActor")
9494

95+
// Whether to load classes in user jars before those in Spark jars
96+
private val userClassPathFirst: Boolean = {
97+
conf.getBoolean("spark.executor.userClassPathFirst",
98+
conf.getBoolean("spark.files.userClassPathFirst", false))
99+
}
100+
95101
// Create our ClassLoader
96102
// do this after SparkEnv creation so can access the SecurityManager
97103
private val urlClassLoader = createClassLoader()
@@ -309,7 +315,7 @@ private[spark] class Executor(
309315
val urls = userClassPath.toArray ++ currentJars.keySet.map { uri =>
310316
new File(uri.split("/").last).toURI.toURL
311317
}
312-
if (conf.getBoolean("spark.executor.userClassPathFirst", false)) {
318+
if (userClassPathFirst) {
313319
new ChildFirstURLClassLoader(urls, currentLoader)
314320
} else {
315321
new MutableURLClassLoader(urls, currentLoader)
@@ -324,14 +330,13 @@ private[spark] class Executor(
324330
val classUri = conf.get("spark.repl.class.uri", null)
325331
if (classUri != null) {
326332
logInfo("Using REPL class URI: " + classUri)
327-
val userClassPathFirst: java.lang.Boolean =
328-
conf.getBoolean("spark.executor.userClassPathFirst", false)
329333
try {
334+
val _userClassPathFirst: java.lang.Boolean = userClassPathFirst
330335
val klass = Class.forName("org.apache.spark.repl.ExecutorClassLoader")
331336
.asInstanceOf[Class[_ <: ClassLoader]]
332337
val constructor = klass.getConstructor(classOf[SparkConf], classOf[String],
333338
classOf[ClassLoader], classOf[Boolean])
334-
constructor.newInstance(conf, classUri, parent, userClassPathFirst)
339+
constructor.newInstance(conf, classUri, parent, _userClassPathFirst)
335340
} catch {
336341
case _: ClassNotFoundException =>
337342
logError("Could not find org.apache.spark.repl.ExecutorClassLoader on classpath!")

core/src/test/scala/org/apache/spark/SparkConfSuite.scala

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -197,18 +197,6 @@ class SparkConfSuite extends FunSuite with LocalSparkContext with ResetSystemPro
197197
serializer.newInstance().serialize(new StringBuffer())
198198
}
199199

200-
test("deprecated config keys") {
201-
val conf = new SparkConf()
202-
.set("spark.files.userClassPathFirst", "true")
203-
.set("spark.yarn.user.classpath.first", "true")
204-
assert(conf.contains("spark.files.userClassPathFirst"))
205-
assert(conf.contains("spark.executor.userClassPathFirst"))
206-
assert(conf.contains("spark.yarn.user.classpath.first"))
207-
assert(conf.getBoolean("spark.files.userClassPathFirst", false))
208-
assert(conf.getBoolean("spark.executor.userClassPathFirst", false))
209-
assert(conf.getBoolean("spark.yarn.user.classpath.first", false))
210-
}
211-
212200
}
213201

214202
class Class1 {}

docs/configuration.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,9 @@ each line consists of a key and a value separated by whitespace. For example:
7070
Any values specified as flags or in the properties file will be passed on to the application
7171
and merged with those specified through SparkConf. Properties set directly on the SparkConf
7272
take highest precedence, then flags passed to `spark-submit` or `spark-shell`, then options
73-
in the `spark-defaults.conf` file.
73+
in the `spark-defaults.conf` file. A few configuration keys have been renamed since earlier
74+
versions of Spark; in such cases, the older key names are still accepted, but take lower
75+
precedence than any instance of the newer key.
7476

7577
## Viewing Spark Properties
7678

yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -955,7 +955,8 @@ object Client extends Logging {
955955
if (isDriver) {
956956
conf.getBoolean("spark.driver.userClassPathFirst", false)
957957
} else {
958-
conf.getBoolean("spark.executor.userClassPathFirst", false)
958+
conf.getBoolean("spark.executor.userClassPathFirst",
959+
conf.getBoolean("spark.files.userClassPathFirst", false))
959960
}
960961
}
961962

0 commit comments

Comments
 (0)