Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
82eaefa
[ZEPPELIN-605] Add support for Scala 2.11
lresende Feb 25, 2016
175be7a
[ZEPPELIN-605] Add Scala 2.11 build profile
lresende Apr 2, 2016
a73b68d
[ZEPPELIN-605] Enable Scala 2.11 REPL support for Spark Interpreter
lresende Apr 7, 2016
5c47d9a
[ZEPPELIN-605] Rewrite Spark interpreter based on Scala 2.11 support
lresende Apr 13, 2016
c88348d
Initial scala-210, 211 support in the single binary
Leemoonsoo Jun 9, 2016
b9e0e1e
scala 2.11 support for spark interpreter
Leemoonsoo Jun 10, 2016
112ae7d
Fix some reflections
Leemoonsoo Jun 10, 2016
222e4e7
Fix reflection on creating SparkCommandLine
Leemoonsoo Jun 10, 2016
dfe6e83
Fix reflection around HttpServer and createTempDir
Leemoonsoo Jun 10, 2016
c999a2d
fix style
Leemoonsoo Jun 11, 2016
2ec51a3
Fix reflection
Leemoonsoo Jun 13, 2016
9424769
style
Leemoonsoo Jun 13, 2016
6b9ff1d
SparkContext sharing seems not working in scala 2.11, disable the test
Leemoonsoo Jun 13, 2016
6d3e7e2
Update FlinkInterpreter
Leemoonsoo Jun 13, 2016
fc9e8a0
Update ignite interpreter
Leemoonsoo Jun 13, 2016
9f5d2a2
Remove unused methods
Leemoonsoo Jun 13, 2016
74d8a62
Force close
lresende Jun 16, 2016
736d055
make binary built with scala 2.11 work with spark_2.10 binary
Leemoonsoo Jun 29, 2016
e068593
Fix pom.xml merge conflict
lresende Jul 7, 2016
a3d0525
Fix new code to support both scala versions
lresende Jul 7, 2016
6e4f7b0
Force scala-library dependency version based on scala
lresende Jul 9, 2016
98790a6
Remove obsolete/commented config
lresende Jul 9, 2016
cbf84c7
Force Scala 2.11 profile to be called
lresende Jul 9, 2016
9194218
initialize imain
Leemoonsoo Jul 9, 2016
de4fc10
Minor change to force build
lresende Jul 11, 2016
dd79443
Minor formatting change to force build
lresende Jul 11, 2016
4e2237a
Update readme to use profile to build scala 2.11 and match CI
lresende Jul 13, 2016
6e5e5ad
Refactor utility methods to helper class
lresende Jul 13, 2016
87f46de
Force build
lresende Jul 13, 2016
c208e69
Fix class reference
lresende Jul 13, 2016
b9bdf86
Properly invoke createTempDir from spark utils
lresende Jul 13, 2016
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,13 @@ addons:

matrix:
include:
# Test all modules
# Test all modules with scala 2.10
- jdk: "oraclejdk7"
env: SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Pr -Phadoop-2.3 -Ppyspark -Psparkr -Pscalding -Pexamples" BUILD_FLAG="package -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="-Dpython.test.exclude=''"
env: SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Pr -Phadoop-2.3 -Ppyspark -Psparkr -Pscalding -Pexamples" BUILD_FLAG="package -Dscala-2.10 -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="-Dpython.test.exclude=''"

# Test all modules with scala 2.11
- jdk: "oraclejdk7"
env: SPARK_VER="1.6.1" HADOOP_VER="2.3" PROFILE="-Pspark-1.6 -Pr -Phadoop-2.3 -Ppyspark -Psparkr -Pscalding -Pexamples -Pscala-2.11" BUILD_FLAG="package -Dscala-2.11 -Pbuild-distr" TEST_FLAG="verify -Pusing-packaged-distr" TEST_PROJECTS="-Dpython.test.exclude=''"

# Test spark module for 1.5.2
- jdk: "oraclejdk7"
Expand Down
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,14 @@ And browse [localhost:8080](localhost:8080) in your browser.

For configuration details check __`./conf`__ subdirectory.

### Building for Scala 2.11

To produce a Zeppelin package compiled with Scala 2.11, use the -Pscala-2.11 profile:

```
mvn clean package -Pspark-1.6 -Phadoop-2.4 -Pyarn -Ppyspark -Pscala-2.11 -DskipTests clean install
```

### Package
To package the final distribution including the compressed archive, run:

Expand Down
4 changes: 1 addition & 3 deletions cassandra/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -38,14 +38,11 @@
<cassandra.driver.version>3.0.1</cassandra.driver.version>
<snappy.version>1.0.5.4</snappy.version>
<lz4.version>1.3.0</lz4.version>
<scala.version>2.10.4</scala.version>
<scala.binary.version>2.10</scala.binary.version>
<commons-lang.version>3.3.2</commons-lang.version>
<scalate.version>1.7.1</scalate.version>
<cassandra.guava.version>16.0.1</cassandra.guava.version>

<!--TEST-->
<scalatest.version>2.2.4</scalatest.version>
<junit.version>4.12</junit.version>
<achilles.version>3.2.4-Zeppelin</achilles.version>
<assertj.version>1.7.0</assertj.version>
Expand Down Expand Up @@ -173,6 +170,7 @@
<plugin>
<groupId>org.scala-tools</groupId>
<artifactId>maven-scala-plugin</artifactId>
<version>2.15.2</version>
<executions>
<execution>
<id>compile</id>
Expand Down
31 changes: 16 additions & 15 deletions flink/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,6 @@
<properties>
<flink.version>1.0.3</flink.version>
<flink.akka.version>2.3.7</flink.akka.version>
<flink.scala.binary.version>2.10</flink.scala.binary.version>
<flink.scala.version>2.10.4</flink.scala.version>
<scala.macros.version>2.0.1</scala.macros.version>
</properties>

Expand Down Expand Up @@ -73,68 +71,71 @@

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${flink.scala.binary.version}</artifactId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-runtime_${flink.scala.binary.version}</artifactId>
<artifactId>flink-runtime_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala_${flink.scala.binary.version}</artifactId>
<artifactId>flink-scala_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>

<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala-shell_${flink.scala.binary.version}</artifactId>
<artifactId>flink-scala-shell_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>

<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-actor_${flink.scala.binary.version}</artifactId>
<artifactId>akka-actor_${scala.binary.version}</artifactId>
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

there might still be a need for interpreter individually requiring different version of Scala? I think for now it might make sense to have separate flink.scala, ignite.scala and so on

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What would be the case for having interpreter specific scala versions ? Now it seems that everything work with latest scala 2.10. I am more towards simplifying the build now, and making it more complex when actually needed. Do we have a concrete case where this is needed now ?

If we are talking here about actually scala 2.10 versus 2.11, I plan to handle that by profiles/modules.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, I was referring to flink.scala.binary.version, ignite.scala.binary.version.
Maybe Ignite doesn't support Scala 2.11? https://github.com/apache/ignite/blob/master/pom.xml#L453

<version>${flink.akka.version}</version>
</dependency>

<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-remote_${flink.scala.binary.version}</artifactId>
<artifactId>akka-remote_${scala.binary.version}</artifactId>
<version>${flink.akka.version}</version>
</dependency>

<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-slf4j_${flink.scala.binary.version}</artifactId>
<artifactId>akka-slf4j_${scala.binary.version}</artifactId>
<version>${flink.akka.version}</version>
</dependency>

<dependency>
<groupId>com.typesafe.akka</groupId>
<artifactId>akka-testkit_${flink.scala.binary.version}</artifactId>
<artifactId>akka-testkit_${scala.binary.version}</artifactId>
<version>${flink.akka.version}</version>
</dependency>

<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${flink.scala.version}</version>
<version>${scala.version}</version>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${flink.scala.version}</version>
<version>${scala.version}</version>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${flink.scala.version}</version>
<version>${scala.version}</version>
<scope>provided</scope>
</dependency>

<dependency>
Expand Down Expand Up @@ -169,7 +170,7 @@
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.1.4</version>
<version>3.2.2</version>
<executions>
<!-- Run scala compiler in the process-resources phase, so that dependencies on
scala classes can be resolved later in the (Java) compile phase -->
Expand Down Expand Up @@ -199,7 +200,7 @@
<compilerPlugins combine.children="append">
<compilerPlugin>
<groupId>org.scalamacros</groupId>
<artifactId>paradise_${flink.scala.version}</artifactId>
<artifactId>paradise_${scala.version}</artifactId>
<version>${scala.macros.version}</version>
</compilerPlugin>
</compilerPlugins>
Expand Down
30 changes: 14 additions & 16 deletions flink/src/main/java/org/apache/zeppelin/flink/FlinkInterpreter.java
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,15 @@
*/
package org.apache.zeppelin.flink;

import java.lang.reflect.InvocationTargetException;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.*;

import org.apache.flink.api.scala.FlinkILoop;
import org.apache.flink.configuration.Configuration;
Expand All @@ -45,6 +43,8 @@
import scala.Console;
import scala.None;
import scala.Some;
import scala.collection.JavaConversions;
import scala.collection.immutable.Nil;
import scala.runtime.AbstractFunction0;
import scala.tools.nsc.Settings;
import scala.tools.nsc.interpreter.IMain;
Expand Down Expand Up @@ -94,7 +94,7 @@ public void open() {

// prepare bindings
imain.interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
binder = (Map<String, Object>) getValue("_binder");
Map<String, Object> binder = (Map<String, Object>) getLastObject();

// import libraries
imain.interpret("import scala.tools.nsc.io._");
Expand All @@ -103,7 +103,10 @@ public void open() {

imain.interpret("import org.apache.flink.api.scala._");
imain.interpret("import org.apache.flink.api.common.functions._");
imain.bindValue("env", env);

binder.put("env", env);
imain.interpret("val env = _binder.get(\"env\").asInstanceOf["
+ env.getClass().getName() + "]");
}

private boolean localMode() {
Expand Down Expand Up @@ -192,16 +195,11 @@ private List<File> classPath(ClassLoader cl) {
return paths;
}

public Object getValue(String name) {
IMain imain = flinkIloop.intp();
Object ret = imain.valueOfTerm(name);
if (ret instanceof None) {
return null;
} else if (ret instanceof Some) {
return ((Some) ret).get();
} else {
return ret;
}
public Object getLastObject() {
Object obj = imain.lastRequest().lineRep().call(
"$result",
JavaConversions.asScalaBuffer(new LinkedList<Object>()));
return obj;
}

@Override
Expand Down
10 changes: 4 additions & 6 deletions ignite/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,7 @@
<url>http://zeppelin.apache.org</url>

<properties>
<ignite.version>1.6.0</ignite.version>
<ignite.scala.binary.version>2.10</ignite.scala.binary.version>
<ignite.scala.version>2.10.4</ignite.scala.version>
<ignite.version>1.5.0.final</ignite.version>
</properties>

<dependencies>
Expand Down Expand Up @@ -73,19 +71,19 @@
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-library</artifactId>
<version>${ignite.scala.version}</version>
<version>${scala.version}</version>
</dependency>

<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<version>${ignite.scala.version}</version>
<version>${scala.version}</version>
</dependency>

<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>${ignite.scala.version}</version>
<version>${scala.version}</version>
</dependency>

<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import scala.Console;
import scala.None;
import scala.Some;
import scala.collection.JavaConversions;
import scala.tools.nsc.Settings;
import scala.tools.nsc.interpreter.IMain;
import scala.tools.nsc.interpreter.Results.Result;
Expand Down Expand Up @@ -174,16 +175,11 @@ private List<File> classPath(ClassLoader cl) {
return paths;
}

public Object getValue(String name) {
Object val = imain.valueOfTerm(name);

if (val instanceof None) {
return null;
} else if (val instanceof Some) {
return ((Some) val).get();
} else {
return val;
}
public Object getLastObject() {
Object obj = imain.lastRequest().lineRep().call(
"$result",
JavaConversions.asScalaBuffer(new LinkedList<Object>()));
return obj;
}

private Ignite getIgnite() {
Expand Down Expand Up @@ -222,7 +218,7 @@ private Ignite getIgnite() {

private void initIgnite() {
imain.interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
Map<String, Object> binder = (Map<String, Object>) getValue("_binder");
Map<String, Object> binder = (Map<String, Object>) getLastObject();

if (getIgnite() != null) {
binder.put("ignite", ignite);
Expand Down
Loading