Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -244,9 +244,12 @@ public SparkContext createSparkContext() {
new SparkConf()
.setMaster(getProperty("master"))
.setAppName(getProperty("spark.app.name"))
.setJars(jars)
.set("spark.repl.class.uri", classServerUri);

if (jars.length > 0) {
conf.setJars(jars);
}

if (execUri != null) {
conf.set("spark.executor.uri", execUri);
}
Expand All @@ -259,17 +262,19 @@ public SparkContext createSparkContext() {

for (Object k : intpProperty.keySet()) {
String key = (String) k;
Object value = intpProperty.get(key);
logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, value));
conf.set(key, (String) value);
String val = toString(intpProperty.get(key));
if (!key.startsWith("spark.") || !val.trim().isEmpty()) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what about if val is whitespace? eg. " "

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@felixcheung trim() will make whitespace removed. Tests case is not covered all of cases. I make a PR of @bzz and he will fix this.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Then val.trim().isEmpty() is true, so it will be set for any property, that does not start with "spark." (i.e whitespace as a password)

logger.debug(String.format("SparkConf: key = [%s], value = [%s]", key, val));
conf.set(key, val);
}
}

SparkContext sparkContext = new SparkContext(conf);
return sparkContext;
}

public static boolean isEmptyString(Object val) {
return val instanceof String && ((String) val).trim().isEmpty();
static final String toString(Object o) {
return (o instanceof String) ? (String) o : "";
}

public static String getSystemDefault(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.LinkedList;
import java.util.Properties;

import org.apache.spark.SparkConf;
import org.apache.zeppelin.display.AngularObjectRegistry;
import org.apache.zeppelin.display.GUI;
import org.apache.zeppelin.interpreter.InterpreterContext;
Expand Down Expand Up @@ -138,4 +139,18 @@ public void testZContextDependencyLoading() {
repl.interpret("z.load(\"org.apache.commons:commons-csv:1.1\")", context);
assertEquals(InterpreterResult.Code.SUCCESS, repl.interpret("import org.apache.commons.csv.CSVFormat", context).code());
}

@Test
public void emptyConfigurationVariablesOnlyForNonSparkProperties() {
Properties intpProperty = repl.getProperty();
SparkConf sparkConf = repl.getSparkContext().getConf();
for (Object oKey : intpProperty.keySet()) {
String key = (String) oKey;
String value = (String) intpProperty.get(key);
repl.logger.debug(String.format("[%s]: [%s]", key, value));
if (key.startsWith("spark.") && value.isEmpty()) {
assertTrue(String.format("configuration starting from 'spark.' should not be empty. [%s]", key), !sparkConf.contains(key) || !sparkConf.get(key).isEmpty());
}
}
}
}