diff --git a/.travis.yml b/.travis.yml
index b09e228b2a7..8bd717d2733 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -21,20 +21,39 @@ before_install:
- "export DISPLAY=:99.0"
- "sh -e /etc/init.d/xvfb start"
+install:
+ - mvn package -DskipTests -Phadoop-2.3 -B
+
before_script:
- - mvn package -Pbuild-distr -B
- - ./testing/startSparkCluster.sh
+ -
script:
- - mvn verify -Pusing-packaged-distr -B
+# spark 1.3
+ - mvn package -Pbuild-distr -Phadoop-2.3 -B
+ - ./testing/startSparkCluster.sh 1.3.1 2.3
+ - SPARK_HOME=./spark-1.3.1-bin-hadoop2.3 mvn verify -Pusing-packaged-distr -Phadoop-2.3 -B
+ - ./testing/stopSparkCluster.sh 1.3.1 2.3
+# spark 1.2
+ - mvn clean package -DskipTests -Pspark-1.2 -Phadoop-2.3 -B -pl '!zeppelin-web,!zeppelin-distribution'
+ - mvn package -Pbuild-distr -Pspark-1.2 -Phadoop-2.3 -B
+ - ./testing/startSparkCluster.sh 1.2.1 2.3
+ - SPARK_HOME=./spark-1.2.1-bin-hadoop2.3 mvn verify -Pusing-packaged-distr -Pspark-1.2 -Phadoop-2.3 -B
+ - ./testing/stopSparkCluster.sh 1.2.1 2.3
+# spark 1.1
+ - mvn clean package -DskipTests -Pspark-1.1 -Phadoop-2.3 -B -pl '!zeppelin-web,!zeppelin-distribution'
+ - mvn package -Pbuild-distr -Pspark-1.1 -Phadoop-2.3 -B
+ - ./testing/startSparkCluster.sh 1.1.1 2.3
+ - SPARK_HOME=./spark-1.1.1-bin-hadoop2.3 mvn verify -Pusing-packaged-distr -Pspark-1.1 -Phadoop-2.3 -B
+ - ./testing/stopSparkCluster.sh 1.1.1 2.3
after_failure:
- cat target/rat.txt
+ - cat zeppelin-server/target/rat.txt
- cat zeppelin-distribution/target/zeppelin-*-SNAPSHOT/zeppelin-*-SNAPSHOT/logs/zeppelin*.log
- cat zeppelin-distribution/target/zeppelin-*-SNAPSHOT/zeppelin-*-SNAPSHOT/logs/zeppelin*.out
after_script:
- - ./testing/stopSparkCluster.sh
+ -
notifications:
slack:
diff --git a/pom.xml b/pom.xml
index fcd1a2511ef..bbde0846c67 100644
--- a/pom.xml
+++ b/pom.xml
@@ -98,111 +98,44 @@
- 1.1.1
- 2.10.4
- 2.10
- 2.0.1
- 0.18.1
- shaded-protobuf
- org.spark-project.akka
- 2.2.3-shaded-protobuf
1.7.10
1.2.17
- 1.0.4
- 2.4.1
- ${hadoop.version}
- 0.94.6
- 3.4.5
- 0.12.0
- 0.10.0
- 10.4.2.0
- 1.4.3
- 1.2.3
- 8.1.14.v20131031
- 3.2.10
- 0.3.6
- 3.0.0
- 1.7.6
- 0.7.1
- 3.2
- 4.3.6
- 2.4.4
- 1.8.8
- 1.0.5
- 4.0.17.Final
-
+ 0.9.0
+ 2.2
15.0
- 2.2.1
+
64m
512m
-
- 1.7
- 2.7.7
- 2.2
- 0.9.0
-
-
-
- org.eclipse.jetty
- jetty-util
- ${jetty.version}
-
-
-
- org.eclipse.jetty
- jetty-security
- ${jetty.version}
-
+
- org.eclipse.jetty
- jetty-plus
- ${jetty.version}
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
- org.eclipse.jetty
- jetty-server
- ${jetty.version}
+ org.slf4j
+ slf4j-log4j12
+ ${slf4j.version}
- org.eclipse.jetty.aggregate
- jetty-all-server
- ${jetty.version}
+ log4j
+ log4j
+ ${log4j.version}
-
org.apache.thrift
libthrift
${libthrift.version}
-
-
-
- com.google.code.gson
- gson
- ${gson.version}
-
-
-
- com.google.guava
- guava
- ${guava.version}
-
-
-
- org.apache.httpcomponents
- httpclient
- ${commons.httpclient.version}
-
-
org.apache.httpcomponents
httpcore
@@ -210,9 +143,9 @@
- commons-collections
- commons-collections
- 3.2.1
+ org.apache.httpcomponents
+ httpclient
+ 4.3.6
@@ -221,16 +154,17 @@
2.5
+
- org.apache.commons
- commons-lang3
- 3.3.2
+ com.google.code.gson
+ gson
+ ${gson.version}
- org.apache.commons
- commons-math3
- ${commons.math3.version}
+ commons-configuration
+ commons-configuration
+ 1.9
@@ -239,28 +173,11 @@
1.5
-
- commons-configuration
- commons-configuration
- 1.9
-
-
- commons-logging
- commons-logging
-
-
-
commons-io
commons-io
2.4
-
-
- org.slf4j
- *
-
-
@@ -270,282 +187,11 @@
- com.google.code.findbugs
- jsr305
- 1.3.9
-
-
-
-
-
- org.slf4j
- slf4j-api
- ${slf4j.version}
-
-
-
- org.slf4j
- slf4j-log4j12
- ${slf4j.version}
-
-
-
- org.slf4j
- jul-to-slf4j
- ${slf4j.version}
-
-
-
- org.slf4j
- jcl-over-slf4j
- ${slf4j.version}
-
-
-
-
- log4j
- log4j
- ${log4j.version}
-
-
-
- com.ning
- compress-lzf
- 1.0.0
-
-
-
- org.xerial.snappy
- snappy-java
- ${snappy.version}
-
-
-
- com.clearspring.analytics
- stream
- 2.5.1
-
-
-
- it.unimi.dsi
- fastutil
-
-
-
-
-
- com.google.protobuf
- protobuf-java
- ${protobuf.version}
-
-
-
- com.twitter
- parquet-hadoop-bundle
- ${parquet.version}
-
-
-
- com.twitter
- chill_${scala.binary.version}
- ${chill.version}
-
-
- org.ow2.asm
- asm
-
-
- org.ow2.asm
- asm-commons
-
-
-
-
-
- com.twitter
- chill-java
- ${chill.version}
-
-
- org.ow2.asm
- asm
-
-
- org.ow2.asm
- asm-commons
-
-
-
-
-
-
-
- ${akka.group}
- akka-actor_${scala.binary.version}
- ${akka.version}
-
-
-
- ${akka.group}
- akka-remote_${scala.binary.version}
- ${akka.version}
-
-
-
- ${akka.group}
- akka-slf4j_${scala.binary.version}
- ${akka.version}
-
-
-
- ${akka.group}
- akka-testkit_${scala.binary.version}
- ${akka.version}
-
-
-
- colt
- colt
- 1.2.0
-
-
-
- org.apache.mesos
- mesos
- ${mesos.version}
- ${mesos.classifier}
-
-
- com.google.protobuf
- protobuf-java
-
-
-
-
-
- commons-net
- commons-net
- 2.2
-
-
-
- io.netty
- netty-all
- ${io.netty.version}
-
-
-
- io.netty
- netty
- 3.6.6.Final
-
-
-
- org.apache.derby
- derby
- ${derby.version}
-
-
-
- com.codahale.metrics
- metrics-core
- ${codahale.metrics.version}
-
-
-
- com.codahale.metrics
- metrics-jvm
- ${codahale.metrics.version}
-
-
-
- com.codahale.metrics
- metrics-json
- ${codahale.metrics.version}
-
-
-
- com.codahale.metrics
- metrics-ganglia
- ${codahale.metrics.version}
-
-
-
- com.codahale.metrics
- metrics-graphite
- ${codahale.metrics.version}
-
-
-
-
-
- org.scala-lang
- scala-compiler
- ${scala.version}
-
-
-
- org.scala-lang
- scala-reflect
- ${scala.version}
-
-
-
- org.scala-lang
- jline
- ${scala.version}
-
-
-
- org.scala-lang
- scala-library
- ${scala.version}
-
-
-
- org.scala-lang
- scala-actors
- ${scala.version}
-
-
-
- org.scala-lang
- scalap
- ${scala.version}
-
-
-
- org.scalatest
- scalatest_${scala.binary.version}
- 2.1.5
- test
-
-
-
- org.easymock
- easymockclassextension
- 3.1
- test
-
-
-
- org.mockito
- mockito-all
- 1.9.0
- test
+ com.google.guava
+ guava
+ ${guava.version}
-
- org.scalacheck
- scalacheck_${scala.binary.version}
- 1.11.3
- test
-
junit
@@ -553,348 +199,6 @@
4.11
test
-
-
- com.novocode
- junit-interface
- 0.10
- test
-
-
-
- org.apache.curator
- curator-recipes
- 2.4.0
-
-
- org.jboss.netty
- netty
-
-
-
-
-
- org.apache.hadoop
- hadoop-client
- ${hadoop.version}
-
-
- asm
- asm
-
-
- org.ow2.asm
- asm
-
-
- org.jboss.netty
- netty
-
-
- commons-logging
- commons-logging
-
-
- org.mortbay.jetty
- servlet-api-2.5
-
-
- junit
- junit
-
-
-
-
-
- org.apache.avro
- avro
- ${avro.version}
-
-
-
- org.apache.avro
- avro-ipc
- ${avro.version}
-
-
- io.netty
- netty
-
-
- org.mortbay.jetty
- jetty
-
-
- org.mortbay.jetty
- jetty-util
-
-
- org.mortbay.jetty
- servlet-api
-
-
- org.apache.velocity
- velocity
-
-
-
-
-
- org.apache.avro
- avro-mapred
- ${avro.version}
-
-
- io.netty
- netty
-
-
- org.mortbay.jetty
- jetty
-
-
- org.mortbay.jetty
- jetty-util
-
-
- org.mortbay.jetty
- servlet-api
-
-
- org.apache.velocity
- velocity
-
-
-
-
-
- org.apache.zookeeper
- zookeeper
- ${zookeeper.version}
-
-
-
-
- net.java.dev.jets3t
- jets3t
- ${jets3t.version}
- runtime
-
-
- commons-logging
- commons-logging
-
-
-
-
-
- org.apache.hadoop
- hadoop-yarn-api
- ${yarn.version}
-
-
- asm
- asm
-
-
- org.ow2.asm
- asm
-
-
- org.jboss.netty
- netty
-
-
- commons-logging
- commons-logging
-
-
-
-
-
- org.apache.hadoop
- hadoop-yarn-common
- ${yarn.version}
-
-
- asm
- asm
-
-
- org.ow2.asm
- asm
-
-
- org.jboss.netty
- netty
-
-
- javax.servlet
- servlet-api
-
-
- commons-logging
- commons-logging
-
-
-
-
-
- org.apache.hadoop
- hadoop-yarn-server-web-proxy
- ${yarn.version}
-
-
- asm
- asm
-
-
- org.ow2.asm
- asm
-
-
- org.jboss.netty
- netty
-
-
- javax.servlet
- servlet-api
-
-
- commons-logging
- commons-logging
-
-
-
-
-
- org.apache.hadoop
- hadoop-yarn-client
- ${yarn.version}
-
-
- asm
- asm
-
-
- org.ow2.asm
- asm
-
-
- org.jboss.netty
- netty
-
-
- javax.servlet
- servlet-api
-
-
- commons-logging
- commons-logging
-
-
-
-
-
- org.codehaus.jackson
- jackson-mapper-asl
- ${codehaus.jackson.version}
-
-
-
- org.codehaus.jackson
- jackson-core-asl
- ${codehaus.jackson.version}
-
-
-
- org.codehaus.jackson
- jackson-xc
- ${codehaus.jackson.version}
-
-
-
- org.codehaus.jackson
- jackson-jaxrs
- ${codehaus.jackson.version}
-
-
-
- com.fasterxml.jackson.core
- jackson-databind
- ${fasterxml.jackson.version}
-
-
-
-
- com.fasterxml.jackson.module
- jackson-module-scala_2.10
- ${fasterxml.jackson.version}
-
-
- com.google.guava
- guava
-
-
-
-
-
- com.thoughtworks.paranamer
- paranamer
- 2.6
-
-
-
- com.sun.jersey
- jersey-server
- 1.9
-
-
-
- com.sun.jersey
- jersey-core
- 1.9
-
-
-
- com.sun.jersey
- jersey-client
- 1.9
-
-
-
- com.sun.xml.bind
- jaxb-impl
- 2.2.6
-
-
-
-
-
- org.json4s
- json4s-core_2.10
- ${json4s.version}
-
-
-
- org.json4s
- json4s-native_2.10
- ${json4s.version}
-
-
-
- org.json4s
- json4s-jackson_2.10
- ${json4s.version}
-
-
-
- org.json4s
- json4s-ext_2.10
- ${json4s.version}
-
-
@@ -1112,6 +416,7 @@
STYLE.md
Roadmap.md
conf/interpreter.json
+ spark-*-bin*/**
@@ -1315,339 +620,5 @@
-
-
- spark-1.1
-
-
-
-
- 1.1.1
-
-
-
-
- cassandra-spark-1.1
-
-
- com.datastax.spark
- spark-cassandra-connector_${scala.binary.version}
- 1.1.1
-
-
- org.joda
- joda-convert
-
-
-
-
-
- 1.1.1
- 0.9.2
-
-
-
-
- spark-1.2
-
-
-
- 2.3.4-spark
- 1.2.1
- 0.13.1a
- 10.10.1.1
- 1.6.0rc3
- 0.5.0
- 4.2.6
- 3.1.1
- 4.0.23.Final
-
-
-
-
- cassandra-spark-1.2
-
- 2.3.4-spark
- 1.2.1
- 0.13.1a
- 10.10.1.1
- 1.6.0rc3
- 0.5.0
- 4.2.6
- 3.1.1
- 4.0.23.Final
- 0.9.2
-
-
-
- com.datastax.spark
- spark-cassandra-connector_${scala.binary.version}
- 1.2.1
-
-
- org.joda
- joda-convert
-
-
-
-
-
-
-
- spark-1.3
-
- 2.3.4-spark
- 1.3.1
- 0.21.0
- 0.98.7
- hbase
- org.spark-project.hive
- 0.13.1a
- 10.10.1.1
- 3.0.0.v201112011016
- 1.6.0rc3
- 0.5.0
- 2.4.0
- 2.0.8
-
- 3.1.0
- 4.2.6
- 3.1.1
- 4.0.23.Final
- 1.9.13
- 2.4.4
- 1.1.1.6
- 0.21.0
-
-
- true
-
-
-
-
-
-
- cassandra-spark-1.3
-
- 2.3.4-spark
- 1.3.1
- 0.21.0
- 0.98.7
- hbase
- org.spark-project.hive
- 0.13.1a
- 10.10.1.1
- 3.0.0.v201112011016
- 1.6.0rc3
- 0.5.0
- 2.4.0
- 2.0.8
-
- 3.1.0
- 4.2.6
- 3.1.1
- 4.0.23.Final
- 1.9.13
- 2.4.4
- 1.1.1.6
- 0.21.0
- 0.9.2
-
-
-
-
- com.datastax.spark
- spark-cassandra-connector_${scala.binary.version}
-
- 1.3.0-SNAPSHOT
-
-
- org.joda
- joda-convert
-
-
-
-
-
-
-
- spark-1.4
-
- 2.3.4-spark
- 1.4.0
- 0.21.1
- 0.98.7-hadoop2
- hbase
- org.spark-project.hive
- 0.13.1a
- 10.10.1.1
- 3.0.0.v201112011016
- 1.6.0rc3
- 0.5.0
- 2.4.0
- 2.0.8
- hadoop2
- 3.1.0
- 4.3.2
- 3.4.1
- 4.0.23.Final
- 1.9.13
- 2.4.4
- 1.1.1.7
- 0.21.0
-
-
-
-
-
-
-
-
-
- hadoop-0.23
-
-
-
- org.apache.avro
- avro
-
-
-
- 0.23.10
-
-
-
-
- hadoop-2.2
-
- 2.2.0
- 2.5.0
-
-
-
-
- hadoop-2.3
-
- 2.3.0
- 2.5.0
- 0.9.0
-
-
-
-
- hadoop-2.4
-
- 2.4.0
- 2.5.0
- 0.9.3
-
-
-
-
- hadoop-2.6
-
- 2.6.0
- 2.5.0
- 0.9.3
- 1.9.13
-
-
-
-
- mapr3
-
- false
-
-
- 1.0.3-mapr-3.0.3
- 2.3.0-mapr-4.0.0-FCS
- 0.94.17-mapr-1405
- 3.4.5-mapr-1406
-
-
-
-
- mapr4
-
- false
-
-
- 2.3.0-mapr-4.0.0-FCS
- 2.3.0-mapr-4.0.0-FCS
- 0.94.17-mapr-1405-4.0.0-FCS
- 3.4.5-mapr-1406
-
-
-
- org.apache.curator
- curator-recipes
- 2.4.0
-
-
- org.apache.zookeeper
- zookeeper
-
-
-
-
- org.apache.zookeeper
- zookeeper
- 3.4.5-mapr-1406
-
-
-
-
-
-
- hadoop-provided
-
- false
-
-
-
- org.apache.hadoop
- hadoop-client
- provided
-
-
- org.apache.hadoop
- hadoop-yarn-api
- provided
-
-
- org.apache.hadoop
- hadoop-yarn-common
- provided
-
-
- org.apache.hadoop
- hadoop-yarn-server-web-proxy
- provided
-
-
- org.apache.hadoop
- hadoop-yarn-client
- provided
-
-
- org.apache.avro
- avro
- provided
-
-
- org.apache.avro
- avro-ipc
- provided
-
-
- org.apache.zookeeper
- zookeeper
- ${zookeeper.version}
- provided
-
-
-
diff --git a/spark/pom.xml b/spark/pom.xml
index b4c05e595b8..dc37bf75266 100644
--- a/spark/pom.xml
+++ b/spark/pom.xml
@@ -32,6 +32,15 @@
Zeppelin: Spark
Zeppelin spark support
http://zeppelin.incubator.apache.org
+
+
+
+ 1.3.1
+ 2.10.4
+ 2.10
+
+ 2.3.0
+
@@ -51,58 +60,19 @@
provided
-
- com.google.code.gson
- gson
-
-
-
- commons-collections
- commons-collections
-
-
+
org.apache.spark
spark-core_2.10
${spark.version}
- com.fasterxml.jackson.core
- jackson-databind
+ org.apache.hadoop
+ hadoop-client
-
- com.fasterxml.jackson.core
- jackson-databind
-
-
-
- org.scala-lang
- scala-library
-
-
-
- org.scala-lang
- scala-compiler
-
-
-
- org.scala-lang
- scala-reflect
-
-
-
- org.eclipse.jetty.aggregate
- jetty-all-server
-
-
-
- io.netty
- netty-all
-
-
org.apache.spark
spark-repl_2.10
@@ -141,6 +111,15 @@
${spark.version}
+
+
+
+ org.apache.hadoop
+ hadoop-client
+ ${hadoop.version}
+
+
+
org.apache.maven
@@ -276,6 +255,193 @@
+
+ spark-1.1
+
+
+
+
+ 1.1.1
+
+
+
+
+ cassandra-spark-1.1
+
+
+ com.datastax.spark
+ spark-cassandra-connector_${scala.binary.version}
+ 1.1.1
+
+
+ org.joda
+ joda-convert
+
+
+
+
+
+ 1.1.1
+
+
+
+
+ spark-1.2
+
+
+
+ 1.2.1
+
+
+
+
+ cassandra-spark-1.2
+
+ 1.2.1
+
+
+
+ com.datastax.spark
+ spark-cassandra-connector_${scala.binary.version}
+ 1.2.1
+
+
+ org.joda
+ joda-convert
+
+
+
+
+
+
+
+ spark-1.3
+
+
+ 1.3.1
+
+
+
+
+
+
+
+
+ cassandra-spark-1.3
+
+ 1.3.0
+
+
+
+
+ com.datastax.spark
+ spark-cassandra-connector_${scala.binary.version}
+
+ 1.3.0-SNAPSHOT
+
+
+ org.joda
+ joda-convert
+
+
+
+
+
+
+
+ spark-1.4
+
+ 1.4.0
+
+
+
+
+
+
+
+ hadoop-0.23
+
+
+
+ org.apache.avro
+ avro
+
+
+
+ 0.23.10
+
+
+
+
+ hadoop-2.2
+
+ 2.2.0
+
+
+
+
+ hadoop-2.3
+
+ 2.3.0
+
+
+
+
+ hadoop-2.4
+
+ 2.4.0
+
+
+
+
+ hadoop-2.6
+
+ 2.6.0
+
+
+
+
+ mapr3
+
+ false
+
+
+ 1.0.3-mapr-3.0.3
+ 2.3.0-mapr-4.0.0-FCS
+
+
+
+
+ mapr4
+
+ false
+
+
+ 2.3.0-mapr-4.0.0-FCS
+ 2.3.0-mapr-4.0.0-FCS
+
+
+
+ org.apache.curator
+ curator-recipes
+ 2.4.0
+
+
+ org.apache.zookeeper
+ zookeeper
+
+
+
+
+ org.apache.zookeeper
+ zookeeper
+ 3.4.5-mapr-1406
+
+
+
+
yarn
@@ -284,6 +450,63 @@
spark-yarn_2.10
${spark.version}
+
+
+ org.apache.hadoop
+ hadoop-yarn-api
+ ${yarn.version}
+
+
+
+
+
+
+ hadoop-provided
+
+ false
+
+
+
+ org.apache.hadoop
+ hadoop-client
+ provided
+
+
+ org.apache.hadoop
+ hadoop-yarn-api
+ provided
+
+
+ org.apache.hadoop
+ hadoop-yarn-common
+ provided
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-web-proxy
+ provided
+
+
+ org.apache.hadoop
+ hadoop-yarn-client
+ provided
+
+
+ org.apache.avro
+ avro
+ provided
+
+
+ org.apache.avro
+ avro-ipc
+ provided
+
+
+ org.apache.zookeeper
+ zookeeper
+ ${zookeeper.version}
+ provided
+
diff --git a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
index c49f1e123ae..c97e824f5b3 100644
--- a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
@@ -55,8 +55,8 @@ public static int getSparkVersionNumber() {
return 0;
}
- String[] split = repl.getSparkContext().version().split(".");
- int version = Integer.parseInt(split[0]) + Integer.parseInt(split[1]);
+ String[] split = repl.getSparkContext().version().split("\\.");
+ int version = Integer.parseInt(split[0]) * 10 + Integer.parseInt(split[1]);
return version;
}
diff --git a/tajo/pom.xml b/tajo/pom.xml
index cbed17d3ba8..0ed0fce934a 100644
--- a/tajo/pom.xml
+++ b/tajo/pom.xml
@@ -32,7 +32,7 @@
http://www.apache.org
- 2.5.0
+ 0.10.0
diff --git a/testing/startSparkCluster.sh b/testing/startSparkCluster.sh
index 896ed528007..e2d80796477 100755
--- a/testing/startSparkCluster.sh
+++ b/testing/startSparkCluster.sh
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
@@ -16,11 +16,33 @@
# limitations under the License.
#
-wget http://apache.mesi.com.ar/spark/spark-1.1.1/spark-1.1.1-bin-hadoop2.3.tgz
-tar zxvf spark-1.1.1-bin-hadoop2.3.tgz
-cd spark-1.1.1-bin-hadoop2.3
+
+if [ $# -ne 2 ]; then
+ echo "usage) $0 [spark version] [hadoop version]"
+ echo " eg) $0 1.3.1 2.6"
+ exit 1
+fi
+
+SPARK_VERSION="${1}"
+HADOOP_VERSION="${2}"
+
+FWDIR=$(dirname "${BASH_SOURCE-$0}")
+ZEPPELIN_HOME="$(cd "${FWDIR}/.."; pwd)"
+export SPARK_HOME=${ZEPPELIN_HOME}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}
+
+if [ ! -d "${SPARK_HOME}" ]; then
+ wget -q http://www.us.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
+ tar zxf spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
+fi
+
+# create PID dir. test case detect pid file so they can select active spark home dir for test
+mkdir -p ${SPARK_HOME}/run
+export SPARK_PID_DIR=${SPARK_HOME}/run
+
+
+# start
export SPARK_MASTER_PORT=7071
export SPARK_MASTER_WEBUI_PORT=7072
-./sbin/start-master.sh
-./bin/spark-class org.apache.spark.deploy.worker.Worker spark://localhost:7071 &> worker.log &
-./bin/spark-class org.apache.spark.deploy.worker.Worker spark://localhost:7071 &> worker2.log &
+export SPARK_WORKER_WEBUI_PORT=8082
+${SPARK_HOME}/sbin/start-master.sh
+${SPARK_HOME}/sbin/start-slave.sh 1 `hostname`:${SPARK_MASTER_PORT}
diff --git a/testing/stopSparkCluster.sh b/testing/stopSparkCluster.sh
index 4f8d7a9a3a1..0ae312d0a02 100755
--- a/testing/stopSparkCluster.sh
+++ b/testing/stopSparkCluster.sh
@@ -1,4 +1,4 @@
-#!/bin/sh#
+#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
@@ -16,8 +16,22 @@
# limitations under the License.
#
-cd spark-1.1.1-bin-hadoop2.3
-./sbin/stop-master.sh
-kill $(ps -ef | grep 'org.apache.spark.deploy.worker.Worker' | awk '{print $2}')
-cd ..
-rm -rf spark-1.1.1-bin-hadoop2.3*
+if [ $# -ne 2 ]; then
+ echo "usage) $0 [spark version] [hadoop version]"
+ echo " eg) $0 1.3.1 2.6"
+ exit 1
+fi
+
+SPARK_VERSION="${1}"
+HADOOP_VERSION="${2}"
+
+FWDIR=$(dirname "${BASH_SOURCE-$0}")
+ZEPPELIN_HOME="$(cd "${FWDIR}/.."; pwd)"
+export SPARK_HOME=${ZEPPELIN_HOME}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}
+
+# set create PID dir
+export SPARK_PID_DIR=${SPARK_HOME}/run
+
+
+${SPARK_HOME}/sbin/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker 1
+${SPARK_HOME}/sbin/stop-master.sh
diff --git a/zeppelin-interpreter/pom.xml b/zeppelin-interpreter/pom.xml
index ba6e88d0ff0..980fe4ac568 100644
--- a/zeppelin-interpreter/pom.xml
+++ b/zeppelin-interpreter/pom.xml
@@ -77,6 +77,7 @@
org.mockito
mockito-all
+ 1.9.0
test
diff --git a/zeppelin-server/pom.xml b/zeppelin-server/pom.xml
index 0e68df2ba80..da1ec5d87fd 100644
--- a/zeppelin-server/pom.xml
+++ b/zeppelin-server/pom.xml
@@ -32,6 +32,34 @@
Zeppelin: Server
http://www.nflabs.com
+
+ 2.7.7
+ 8.1.14.v20131031
+ 4.3.6
+
+
+
+
+
+ org.scala-lang
+ scala-library
+ 2.10.4
+
+
+
+ org.scala-lang
+ scala-compiler
+ 2.10.4
+
+
+
+ org.scala-lang
+ scalap
+ 2.10.4
+
+
+
+
${project.groupId}
@@ -111,6 +139,18 @@
com.sun.jersey
jersey-servlet
+
+ org.scala-lang
+ scala-library
+
+
+ org.scala-lang
+ scala-compiler
+
+
+ org.scala-lang
+ scalap
+
@@ -248,6 +288,7 @@
org.apache.httpcomponents
httpclient
+ 4.3.6
test
@@ -280,7 +321,8 @@
org.scalatest
- scalatest_${scala.binary.version}
+ scalatest_2.10
+ 2.1.1
test
diff --git a/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java b/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java
index 0072b87b085..bd93f8087b6 100644
--- a/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java
+++ b/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java
@@ -69,7 +69,9 @@ public class ZeppelinServer extends Application {
private SchedulerFactory schedulerFactory;
public static Notebook notebook;
- static NotebookServer notebookServer;
+ public static NotebookServer notebookServer;
+
+ public static Server jettyServer;
private InterpreterFactory replFactory;
@@ -79,7 +81,7 @@ public static void main(String[] args) throws Exception {
ZeppelinConfiguration conf = ZeppelinConfiguration.create();
conf.setProperty("args", args);
- final Server jettyServer = setupJettyServer(conf);
+ jettyServer = setupJettyServer(conf);
notebookServer = setupNotebookServer(conf);
// REST api
diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
index ac40dda07f2..744c1e0ce33 100644
--- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
+++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
@@ -17,17 +17,20 @@
package org.apache.zeppelin.rest;
+import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.methods.ByteArrayRequestEntity;
import org.apache.commons.httpclient.methods.GetMethod;
import org.apache.commons.httpclient.methods.PostMethod;
import org.apache.commons.httpclient.methods.RequestEntity;
+import org.apache.zeppelin.interpreter.InterpreterSetting;
import org.apache.zeppelin.server.ZeppelinServer;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
@@ -46,6 +49,7 @@ public abstract class AbstractTestRestApi {
static final String restApiUrl = "/api";
static final String url = getUrlToTest();
protected static final boolean wasRunning = checkIfServerIsRuning();
+ static boolean pySpark = false;
private String getUrl(String path) {
String url;
@@ -68,7 +72,7 @@ protected static String getUrlToTest() {
return url;
}
- static ExecutorService executor = Executors.newSingleThreadExecutor();
+ static ExecutorService executor;
protected static final Runnable server = new Runnable() {
@Override
public void run() {
@@ -84,6 +88,7 @@ public void run() {
protected static void startUp() throws Exception {
if (!wasRunning) {
LOG.info("Staring test Zeppelin up...");
+ executor = Executors.newSingleThreadExecutor();
executor.submit(server);
long s = System.currentTimeMillis();
boolean started = false;
@@ -98,19 +103,107 @@ protected static void startUp() throws Exception {
throw new RuntimeException("Can not start Zeppelin server");
}
LOG.info("Test Zeppelin stared.");
+
+
+ // ci environment runs spark cluster for testing
+ // so configure zeppelin use spark cluster
+ if ("true".equals(System.getenv("CI"))) {
+ // assume first one is spark
+ InterpreterSetting sparkIntpSetting = ZeppelinServer.notebook.getInterpreterFactory().get().get(0);
+
+ // set spark master
+ sparkIntpSetting.getProperties().setProperty("master", "spark://" + getHostname() + ":7071");
+
+ // set spark home for pyspark
+ sparkIntpSetting.getProperties().setProperty("spark.home", getSparkHome());
+ pySpark = true;
+
+ ZeppelinServer.notebook.getInterpreterFactory().restart(sparkIntpSetting.id());
+ } else {
+ // assume first one is spark
+ InterpreterSetting sparkIntpSetting = ZeppelinServer.notebook.getInterpreterFactory().get().get(0);
+
+ String sparkHome = getSparkHome();
+ if (sparkHome != null) {
+ // set spark home for pyspark
+ sparkIntpSetting.getProperties().setProperty("spark.home", sparkHome);
+ pySpark = true;
+ }
+
+ ZeppelinServer.notebook.getInterpreterFactory().restart(sparkIntpSetting.id());
+ }
}
}
- protected static void shutDown() {
+ private static String getHostname() {
+ try {
+ return InetAddress.getLocalHost().getHostName();
+ } catch (UnknownHostException e) {
+ e.printStackTrace();
+ return "localhost";
+ }
+ }
+
+ private static String getSparkHome() {
+ String sparkHome = getSparkHomeRecursively(new File(System.getProperty("user.dir")));
+ System.out.println("SPARK HOME detected " + sparkHome);
+ return sparkHome;
+ }
+
+ boolean isPyspark() {
+ return pySpark;
+ }
+
+ private static String getSparkHomeRecursively(File dir) {
+ if (dir == null) return null;
+ File files [] = dir.listFiles();
+ if (files == null) return null;
+
+ File homeDetected = null;
+ for (File f : files) {
+ if (isActiveSparkHome(f)) {
+ homeDetected = f;
+ break;
+ }
+ }
+
+ if (homeDetected != null) {
+ return homeDetected.getAbsolutePath();
+ } else {
+ return getSparkHomeRecursively(dir.getParentFile());
+ }
+ }
+
+ private static boolean isActiveSparkHome(File dir) {
+ if (dir.getName().matches("spark-[0-9\\.]+-bin-hadoop[0-9\\.]+")) {
+ File pidDir = new File(dir, "run");
+ if (pidDir.isDirectory() && pidDir.listFiles().length > 0) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ protected static void shutDown() throws Exception {
if (!wasRunning) {
LOG.info("Terminating test Zeppelin...");
+ ZeppelinServer.notebookServer.stop();
+ ZeppelinServer.jettyServer.stop();
executor.shutdown();
- try {
- executor.awaitTermination(10, TimeUnit.SECONDS);
- } catch (InterruptedException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
+
+ long s = System.currentTimeMillis();
+ boolean started = true;
+ while (System.currentTimeMillis() - s < 1000 * 60 * 3) { // 3 minutes
+ Thread.sleep(2000);
+ started = checkIfServerIsRuning();
+ if (started == false) {
+ break;
+ }
}
+ if (started == true) {
+ throw new RuntimeException("Can not stop Zeppelin server");
+ }
+
LOG.info("Test Zeppelin terminated.");
}
}
diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java
index 1a76ca5bd1c..0da5e0de19e 100644
--- a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java
+++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java
@@ -57,7 +57,7 @@ public static void init() throws Exception {
}
@AfterClass
- public static void destroy() {
+ public static void destroy() throws Exception {
AbstractTestRestApi.shutDown();
}
diff --git a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
new file mode 100644
index 00000000000..02b7e47dfa3
--- /dev/null
+++ b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.zeppelin.rest;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+
+import org.apache.zeppelin.notebook.Note;
+import org.apache.zeppelin.notebook.Paragraph;
+import org.apache.zeppelin.scheduler.Job.Status;
+import org.apache.zeppelin.server.ZeppelinServer;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.gson.Gson;
+
+/**
+ * Test against spark cluster.
+ * Spark cluster is started by CI server using testing/startSparkCluster.sh
+ */
+public class ZeppelinSparkClusterTest extends AbstractTestRestApi {
+ Gson gson = new Gson();
+
+ @BeforeClass
+ public static void init() throws Exception {
+ AbstractTestRestApi.startUp();
+ }
+
+ @AfterClass
+ public static void destroy() throws Exception {
+ AbstractTestRestApi.shutDown();
+ }
+
+ private void waitForFinish(Paragraph p) {
+ while (p.getStatus() != Status.FINISHED) {
+ try {
+ Thread.sleep(100);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ @Test
+ public void basicRDDTransformationAndActionTest() throws IOException {
+ // create new note
+ Note note = ZeppelinServer.notebook.createNote();
+
+ // run markdown paragraph, again
+ Paragraph p = note.addParagraph();
+ p.setText("print(sc.parallelize(1 to 10).reduce(_ + _))");
+ note.run(p.getId());
+ waitForFinish(p);
+ assertEquals("55", p.getResult().message());
+ ZeppelinServer.notebook.removeNote(note.id());
+ }
+
+ @Test
+ public void pySparkTest() throws IOException {
+ // create new note
+ Note note = ZeppelinServer.notebook.createNote();
+
+ int sparkVersion = getSparkVersionNumber(note);
+
+ if (isPyspark() && sparkVersion >= 12) { // pyspark supported from 1.2.1
+ // run markdown paragraph, again
+ Paragraph p = note.addParagraph();
+ p.setText("%pyspark print(sc.parallelize(range(1, 11)).reduce(lambda a, b: a + b))");
+ note.run(p.getId());
+ waitForFinish(p);
+ assertEquals("55\n", p.getResult().message());
+ }
+ ZeppelinServer.notebook.removeNote(note.id());
+ }
+
+ /**
+ * Get spark version number as a numerical value.
+ * eg. 1.1.x => 11, 1.2.x => 12, 1.3.x => 13 ...
+ */
+ private int getSparkVersionNumber(Note note) {
+ Paragraph p = note.addParagraph();
+ p.setText("print(sc.version)");
+ note.run(p.getId());
+ waitForFinish(p);
+ String sparkVersion = p.getResult().message();
+ System.out.println("Spark version detected " + sparkVersion);
+ String[] split = sparkVersion.split("\\.");
+ int version = Integer.parseInt(split[0]) * 10 + Integer.parseInt(split[1]);
+ return version;
+ }
+}