diff --git a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java index 29c322d78d6..7a6df7a92ee 100644 --- a/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java +++ b/spark/src/main/java/org/apache/zeppelin/spark/SparkInterpreter.java @@ -329,6 +329,7 @@ public Object createSparkSession() { } } + setupConfForPySpark(conf); Class SparkSession = Utils.findClass("org.apache.spark.sql.SparkSession"); Object builder = Utils.invokeStaticMethod(SparkSession, "builder"); Utils.invokeMethod(builder, "config", new Class[]{ SparkConf.class }, new Object[]{ conf }); @@ -442,8 +443,12 @@ public SparkContext createSparkContext_1() { conf.set(key, val); } } + setupConfForPySpark(conf); + SparkContext sparkContext = new SparkContext(conf); + return sparkContext; + } - //TODO(jongyoul): Move these codes into PySparkInterpreter.java + private void setupConfForPySpark(SparkConf conf) { String pysparkBasePath = getSystemDefault("SPARK_HOME", null, null); File pysparkPath; if (null == pysparkBasePath) { @@ -456,7 +461,8 @@ public SparkContext createSparkContext_1() { } //Only one of py4j-0.9-src.zip and py4j-0.8.2.1-src.zip should exist - String[] pythonLibs = new String[]{"pyspark.zip", "py4j-0.9-src.zip", "py4j-0.8.2.1-src.zip"}; + String[] pythonLibs = new String[]{"pyspark.zip", "py4j-0.9-src.zip", "py4j-0.8.2.1-src.zip", + "py4j-0.10.1-src.zip"}; ArrayList pythonLibUris = new ArrayList<>(); for (String lib : pythonLibs) { File libFile = new File(pysparkPath, lib); @@ -486,9 +492,6 @@ public SparkContext createSparkContext_1() { if (getProperty("master").equals("yarn-client")) { conf.set("spark.yarn.isPython", "true"); } - - SparkContext sparkContext = new SparkContext(conf); - return sparkContext; } static final String toString(Object o) {