@@ -61,16 +61,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
6161 var pyFiles : String = null
6262 var isR : Boolean = false
6363 var action : SparkSubmitAction = null
64- val sparkProperties : HashMap [String , String ] = new HashMap [String , String ]() {
65- override def put (k : String , v : String ): Option [String ] = {
66- if (k.startsWith(" spark." )) {
67- super .put(k, v)
68- } else {
69- SparkSubmit .printWarning(s " Ignoring non-spark config property: $k= $v" )
70- Option [String ](null )
71- }
72- }
73- }
64+ val sparkProperties : HashMap [String , String ] = new HashMap [String , String ]()
7465 var proxyUser : String = null
7566
7667 // Standalone cluster mode only
@@ -102,6 +93,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
10293 }
10394 // Populate `sparkProperties` map from properties file
10495 mergeDefaultSparkProperties()
96+ // Remove keys that don't start with "spark." from `sparkProperties`.
97+ ignoreNonSparkProperties()
10598 // Use `sparkProperties` map along with env vars to fill in any missing parameters
10699 loadEnvironmentArguments()
107100
@@ -122,6 +115,18 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
122115 }
123116 }
124117
118+ /**
119+ * Remove keys that don't start with "spark." from `sparkProperties`.
120+ */
121+ private def ignoreNonSparkProperties (): Unit = {
122+ sparkProperties.foreach { case (k, v) =>
123+ if (! k.startsWith(" spark." )) {
124+ sparkProperties -= k
125+ SparkSubmit .printWarning(s " Ignoring non-spark config property: $k= $v" )
126+ }
127+ }
128+ }
129+
125130 /**
126131 * Load arguments from environment variables, Spark properties etc.
127132 */
0 commit comments