Repository: incubator-zeppelin
Updated Branches:
  refs/heads/master a90908d72 -> 109b0807f


Move Spark specific things from pom.xml to spark/pom.xml

Move Spark specific dependencyManagement and properties from pom.xml to 
spark/pom.xml.
Which interfere other interpreter's dependency version.

Author: Lee moon soo <[email protected]>

Closes #88 from Leemoonsoo/pom_refactor and squashes the following commits:

9916875 [Lee moon soo] automated ci test not only spark-1.3 but also spark-1.2, 
spark-1.1
aa6d1fd [Lee moon soo] Test pyspark with spark cluster
be0b7c4 [Lee moon soo] Remove unnecessary #
40698f3 [Lee moon soo] Make default version 1.3.1
18cb474 [Lee moon soo] Parse version correctly
b5f7343 [Lee moon soo] Make hadoop version configurable in test
bb47e81 [Lee moon soo] Add license header
8b6d3f5 [Lee moon soo] Gracefully shutdown ZeppelinServer in test
80698e9 [Lee moon soo] Add test against spark cluster
654d761 [Lee moon soo] Move spark specific dependencyManagement and properties 
block from pom.xml to spark/pom.xml


Project: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/commit/109b0807
Tree: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/tree/109b0807
Diff: http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/diff/109b0807

Branch: refs/heads/master
Commit: 109b0807fce6453b7543c68069f30b7cece0f20e
Parents: a90908d
Author: Lee moon soo <[email protected]>
Authored: Thu Jun 4 17:00:07 2015 +0900
Committer: Lee moon soo <[email protected]>
Committed: Mon Jun 8 09:09:50 2015 +0900

----------------------------------------------------------------------
 .travis.yml                                     |   27 +-
 pom.xml                                         | 1083 +-----------------
 spark/pom.xml                                   |  307 ++++-
 .../zeppelin/spark/SparkInterpreterTest.java    |    4 +-
 tajo/pom.xml                                    |    2 +-
 testing/startSparkCluster.sh                    |   36 +-
 testing/stopSparkCluster.sh                     |   26 +-
 zeppelin-interpreter/pom.xml                    |    1 +
 zeppelin-server/pom.xml                         |   44 +-
 .../apache/zeppelin/server/ZeppelinServer.java  |    6 +-
 .../zeppelin/rest/AbstractTestRestApi.java      |  109 +-
 .../zeppelin/rest/ZeppelinRestApiTest.java      |    2 +-
 .../zeppelin/rest/ZeppelinSparkClusterTest.java |  107 ++
 13 files changed, 624 insertions(+), 1130 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/.travis.yml
----------------------------------------------------------------------
diff --git a/.travis.yml b/.travis.yml
index b09e228..8bd717d 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -21,20 +21,39 @@ before_install:
   - "export DISPLAY=:99.0"
   - "sh -e /etc/init.d/xvfb start"
 
+install:
+  - mvn package -DskipTests -Phadoop-2.3 -B
+
 before_script:
-  - mvn package -Pbuild-distr -B
-  - ./testing/startSparkCluster.sh
+  -
 
 script:
-  - mvn verify -Pusing-packaged-distr -B
+# spark 1.3
+  - mvn package -Pbuild-distr -Phadoop-2.3 -B
+  - ./testing/startSparkCluster.sh 1.3.1 2.3
+  - SPARK_HOME=./spark-1.3.1-bin-hadoop2.3 mvn verify -Pusing-packaged-distr 
-Phadoop-2.3 -B
+  - ./testing/stopSparkCluster.sh 1.3.1 2.3
+# spark 1.2
+  - mvn clean package -DskipTests -Pspark-1.2 -Phadoop-2.3 -B -pl 
'!zeppelin-web,!zeppelin-distribution'
+  - mvn package -Pbuild-distr -Pspark-1.2 -Phadoop-2.3 -B
+  - ./testing/startSparkCluster.sh 1.2.1 2.3
+  - SPARK_HOME=./spark-1.2.1-bin-hadoop2.3 mvn verify -Pusing-packaged-distr 
-Pspark-1.2 -Phadoop-2.3 -B
+  - ./testing/stopSparkCluster.sh 1.2.1 2.3
+# spark 1.1
+  - mvn clean package -DskipTests -Pspark-1.1 -Phadoop-2.3 -B -pl 
'!zeppelin-web,!zeppelin-distribution'
+  - mvn package -Pbuild-distr -Pspark-1.1 -Phadoop-2.3 -B
+  - ./testing/startSparkCluster.sh 1.1.1 2.3
+  - SPARK_HOME=./spark-1.1.1-bin-hadoop2.3 mvn verify -Pusing-packaged-distr 
-Pspark-1.1 -Phadoop-2.3 -B
+  - ./testing/stopSparkCluster.sh 1.1.1 2.3
 
 after_failure:
   - cat target/rat.txt
+  - cat zeppelin-server/target/rat.txt
   - cat 
zeppelin-distribution/target/zeppelin-*-SNAPSHOT/zeppelin-*-SNAPSHOT/logs/zeppelin*.log
   - cat 
zeppelin-distribution/target/zeppelin-*-SNAPSHOT/zeppelin-*-SNAPSHOT/logs/zeppelin*.out
 
 after_script:
-  - ./testing/stopSparkCluster.sh
+  -
 
 notifications:
   slack:

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index fcd1a25..bbde084 100644
--- a/pom.xml
+++ b/pom.xml
@@ -98,111 +98,44 @@
   </modules>
 
   <properties>
-    <spark.version>1.1.1</spark.version>
-    <scala.version>2.10.4</scala.version>
-    <scala.binary.version>2.10</scala.binary.version>
-    <scala.macros.version>2.0.1</scala.macros.version>
-    <mesos.version>0.18.1</mesos.version>
-    <mesos.classifier>shaded-protobuf</mesos.classifier>
-    <akka.group>org.spark-project.akka</akka.group>
-    <akka.version>2.2.3-shaded-protobuf</akka.version>
     <slf4j.version>1.7.10</slf4j.version>
     <log4j.version>1.2.17</log4j.version>
-    <hadoop.version>1.0.4</hadoop.version>
-    <protobuf.version>2.4.1</protobuf.version>
-    <yarn.version>${hadoop.version}</yarn.version>
-    <hbase.version>0.94.6</hbase.version>
-    <zookeeper.version>3.4.5</zookeeper.version>
-    <hive.version>0.12.0</hive.version>
-    <tajo.version>0.10.0</tajo.version>
-    <derby.version>10.4.2.0</derby.version>
-    <parquet.version>1.4.3</parquet.version>
-    <jblas.version>1.2.3</jblas.version>
-    <jetty.version>8.1.14.v20131031</jetty.version>
-    <json4s.version>3.2.10</json4s.version>
-    <chill.version>0.3.6</chill.version>
-    <codahale.metrics.version>3.0.0</codahale.metrics.version>
-    <avro.version>1.7.6</avro.version>
-    <jets3t.version>0.7.1</jets3t.version>
-    <commons.math3.version>3.2</commons.math3.version>
-    <commons.httpclient.version>4.3.6</commons.httpclient.version>
-    <fasterxml.jackson.version>2.4.4</fasterxml.jackson.version>
-    <codehaus.jackson.version>1.8.8</codehaus.jackson.version>
-    <snappy.version>1.0.5</snappy.version>
-    <io.netty.version>4.0.17.Final</io.netty.version>
-
+    <libthrift.version>0.9.0</libthrift.version>
+    <gson.version>2.2</gson.version>
     <guava.version>15.0</guava.version>
-    <scalatest.version>2.2.1</scalatest.version>
+
     <PermGen>64m</PermGen>
     <MaxPermGen>512m</MaxPermGen>
-
-    <java.version>1.7</java.version>
-    <cxf.version>2.7.7</cxf.version>
-    <gson.version>2.2</gson.version>
-    <libthrift.version>0.9.0</libthrift.version>
   </properties>
 
   <dependencyManagement>
     <dependencies>
-
-      <!-- Jetty -->
-      <dependency>
-        <groupId>org.eclipse.jetty</groupId>
-        <artifactId>jetty-util</artifactId>
-        <version>${jetty.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.eclipse.jetty</groupId>
-        <artifactId>jetty-security</artifactId>
-        <version>${jetty.version}</version>
-      </dependency>
+      <!-- Logging -->
 
       <dependency>
-        <groupId>org.eclipse.jetty</groupId>
-        <artifactId>jetty-plus</artifactId>
-        <version>${jetty.version}</version>
+        <groupId>org.slf4j</groupId>
+        <artifactId>slf4j-api</artifactId>
+        <version>${slf4j.version}</version>
       </dependency>
 
       <dependency>
-        <groupId>org.eclipse.jetty</groupId>
-        <artifactId>jetty-server</artifactId>
-        <version>${jetty.version}</version>
+        <groupId>org.slf4j</groupId>
+        <artifactId>slf4j-log4j12</artifactId>
+        <version>${slf4j.version}</version>
       </dependency>
 
       <dependency>
-        <groupId>org.eclipse.jetty.aggregate</groupId>
-        <artifactId>jetty-all-server</artifactId>
-        <version>${jetty.version}</version>
+        <groupId>log4j</groupId>
+        <artifactId>log4j</artifactId>
+        <version>${log4j.version}</version>
       </dependency>
 
-      <!-- Thrift -->
       <dependency>
         <groupId>org.apache.thrift</groupId>
         <artifactId>libthrift</artifactId>
         <version>${libthrift.version}</version>
       </dependency>
 
-      <!-- Commons -->
-
-      <dependency>
-        <groupId>com.google.code.gson</groupId>
-        <artifactId>gson</artifactId>
-        <version>${gson.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.google.guava</groupId>
-        <artifactId>guava</artifactId>
-        <version>${guava.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.httpcomponents</groupId>
-        <artifactId>httpclient</artifactId>
-        <version>${commons.httpclient.version}</version>
-      </dependency>
-
       <dependency>
         <groupId>org.apache.httpcomponents</groupId>
         <artifactId>httpcore</artifactId>
@@ -210,9 +143,9 @@
       </dependency>
 
       <dependency>
-        <groupId>commons-collections</groupId>
-        <artifactId>commons-collections</artifactId>
-        <version>3.2.1</version>
+        <groupId>org.apache.httpcomponents</groupId>
+        <artifactId>httpclient</artifactId>
+        <version>4.3.6</version>
       </dependency>
 
       <dependency>
@@ -221,16 +154,17 @@
         <version>2.5</version>
       </dependency>
 
+
       <dependency>
-        <groupId>org.apache.commons</groupId>
-        <artifactId>commons-lang3</artifactId>
-        <version>3.3.2</version>
+        <groupId>com.google.code.gson</groupId>
+        <artifactId>gson</artifactId>
+        <version>${gson.version}</version>
       </dependency>
 
       <dependency>
-        <groupId>org.apache.commons</groupId>
-        <artifactId>commons-math3</artifactId>
-        <version>${commons.math3.version}</version>
+        <groupId>commons-configuration</groupId>
+        <artifactId>commons-configuration</artifactId>
+        <version>1.9</version>
       </dependency>
 
       <dependency>
@@ -239,28 +173,11 @@
         <version>1.5</version>
       </dependency>
 
-      <dependency>
-        <groupId>commons-configuration</groupId>
-        <artifactId>commons-configuration</artifactId>
-        <version>1.9</version>
-        <exclusions>
-          <exclusion>
-            <groupId>commons-logging</groupId>
-            <artifactId>commons-logging</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
 
       <dependency>
         <groupId>commons-io</groupId>
         <artifactId>commons-io</artifactId>
         <version>2.4</version>
-        <exclusions>
-          <exclusion>
-            <groupId>org.slf4j</groupId>
-            <artifactId>*</artifactId>
-          </exclusion>
-        </exclusions>
       </dependency>
 
       <dependency>
@@ -270,282 +187,11 @@
       </dependency>
 
       <dependency>
-        <groupId>com.google.code.findbugs</groupId>
-        <artifactId>jsr305</artifactId>
-        <version>1.3.9</version>
-      </dependency>
-
-      <!-- Logging -->
-
-      <dependency>
-        <groupId>org.slf4j</groupId>
-        <artifactId>slf4j-api</artifactId>
-        <version>${slf4j.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.slf4j</groupId>
-        <artifactId>slf4j-log4j12</artifactId>
-        <version>${slf4j.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.slf4j</groupId>
-        <artifactId>jul-to-slf4j</artifactId>
-        <version>${slf4j.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.slf4j</groupId>
-        <artifactId>jcl-over-slf4j</artifactId>
-        <version>${slf4j.version}</version>
-        <!-- <scope>runtime</scope> --> <!-- more correct, but scalac 2.10.3 
doesn't like it -->
-      </dependency>
-
-      <dependency>
-        <groupId>log4j</groupId>
-        <artifactId>log4j</artifactId>
-        <version>${log4j.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.ning</groupId>
-        <artifactId>compress-lzf</artifactId>
-        <version>1.0.0</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.xerial.snappy</groupId>
-        <artifactId>snappy-java</artifactId>
-        <version>${snappy.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.clearspring.analytics</groupId>
-        <artifactId>stream</artifactId>
-        <version>2.5.1</version>
-        <exclusions>
-          <!-- Only HyperLogLog is used, which doesn't depend on fastutil -->
-          <exclusion>
-            <groupId>it.unimi.dsi</groupId>
-            <artifactId>fastutil</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-      <!-- In theory we need not directly depend on protobuf since Spark does
-        not directly use it. However, when building with Hadoop/YARN 2.2 Maven 
doesn't
-        correctly bump the protobuf version up from the one Mesos gives. For 
now
-        we include this variable to explicitly bump the version when building 
with
-        YARN. It would be nice to figure out why Maven can't resolve this 
correctly
-        (like SBT does). -->
-      <dependency>
-        <groupId>com.google.protobuf</groupId>
-        <artifactId>protobuf-java</artifactId>
-        <version>${protobuf.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.twitter</groupId>
-        <artifactId>parquet-hadoop-bundle</artifactId>
-        <version>${parquet.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.twitter</groupId>
-        <artifactId>chill_${scala.binary.version}</artifactId>
-        <version>${chill.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>org.ow2.asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.ow2.asm</groupId>
-            <artifactId>asm-commons</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>com.twitter</groupId>
-        <artifactId>chill-java</artifactId>
-        <version>${chill.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>org.ow2.asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.ow2.asm</groupId>
-            <artifactId>asm-commons</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <!-- Akka -->
-
-      <dependency>
-        <groupId>${akka.group}</groupId>
-        <artifactId>akka-actor_${scala.binary.version}</artifactId>
-        <version>${akka.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>${akka.group}</groupId>
-        <artifactId>akka-remote_${scala.binary.version}</artifactId>
-        <version>${akka.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>${akka.group}</groupId>
-        <artifactId>akka-slf4j_${scala.binary.version}</artifactId>
-        <version>${akka.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>${akka.group}</groupId>
-        <artifactId>akka-testkit_${scala.binary.version}</artifactId>
-        <version>${akka.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>colt</groupId>
-        <artifactId>colt</artifactId>
-        <version>1.2.0</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.mesos</groupId>
-        <artifactId>mesos</artifactId>
-        <version>${mesos.version}</version>
-        <classifier>${mesos.classifier}</classifier>
-        <exclusions>
-          <exclusion>
-            <groupId>com.google.protobuf</groupId>
-            <artifactId>protobuf-java</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>commons-net</groupId>
-        <artifactId>commons-net</artifactId>
-        <version>2.2</version>
-      </dependency>
-
-      <dependency>
-        <groupId>io.netty</groupId>
-        <artifactId>netty-all</artifactId>
-        <version>${io.netty.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>io.netty</groupId>
-        <artifactId>netty</artifactId>
-        <version>3.6.6.Final</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.derby</groupId>
-        <artifactId>derby</artifactId>
-        <version>${derby.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.codahale.metrics</groupId>
-        <artifactId>metrics-core</artifactId>
-        <version>${codahale.metrics.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.codahale.metrics</groupId>
-        <artifactId>metrics-jvm</artifactId>
-        <version>${codahale.metrics.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.codahale.metrics</groupId>
-        <artifactId>metrics-json</artifactId>
-        <version>${codahale.metrics.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.codahale.metrics</groupId>
-        <artifactId>metrics-ganglia</artifactId>
-        <version>${codahale.metrics.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.codahale.metrics</groupId>
-        <artifactId>metrics-graphite</artifactId>
-        <version>${codahale.metrics.version}</version>
-      </dependency>
-
-      <!-- Scala -->
-
-      <dependency>
-        <groupId>org.scala-lang</groupId>
-        <artifactId>scala-compiler</artifactId>
-        <version>${scala.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.scala-lang</groupId>
-        <artifactId>scala-reflect</artifactId>
-        <version>${scala.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.scala-lang</groupId>
-        <artifactId>jline</artifactId>
-        <version>${scala.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.scala-lang</groupId>
-        <artifactId>scala-library</artifactId>
-        <version>${scala.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.scala-lang</groupId>
-        <artifactId>scala-actors</artifactId>
-        <version>${scala.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.scala-lang</groupId>
-        <artifactId>scalap</artifactId>
-        <version>${scala.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.scalatest</groupId>
-        <artifactId>scalatest_${scala.binary.version}</artifactId>
-        <version>2.1.5</version>
-        <scope>test</scope>
-      </dependency>
-
-      <dependency>
-        <groupId>org.easymock</groupId>
-        <artifactId>easymockclassextension</artifactId>
-        <version>3.1</version>
-        <scope>test</scope>
-      </dependency>
-
-      <dependency>
-        <groupId>org.mockito</groupId>
-        <artifactId>mockito-all</artifactId>
-        <version>1.9.0</version>
-        <scope>test</scope>
+        <groupId>com.google.guava</groupId>
+        <artifactId>guava</artifactId>
+        <version>${guava.version}</version>
       </dependency>
 
-      <dependency>
-        <groupId>org.scalacheck</groupId>
-        <artifactId>scalacheck_${scala.binary.version}</artifactId>
-        <version>1.11.3</version>
-        <scope>test</scope>
-      </dependency>
 
       <dependency>
         <groupId>junit</groupId>
@@ -553,348 +199,6 @@
         <version>4.11</version>
         <scope>test</scope>
       </dependency>
-
-      <dependency>
-        <groupId>com.novocode</groupId>
-        <artifactId>junit-interface</artifactId>
-        <version>0.10</version>
-        <scope>test</scope>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.curator</groupId>
-        <artifactId>curator-recipes</artifactId>
-        <version>2.4.0</version>
-        <exclusions>
-          <exclusion>
-            <groupId>org.jboss.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-client</artifactId>
-        <version>${hadoop.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.ow2.asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.jboss.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>commons-logging</groupId>
-            <artifactId>commons-logging</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.mortbay.jetty</groupId>
-            <artifactId>servlet-api-2.5</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.avro</groupId>
-        <artifactId>avro</artifactId>
-        <version>${avro.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.avro</groupId>
-        <artifactId>avro-ipc</artifactId>
-        <version>${avro.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.mortbay.jetty</groupId>
-            <artifactId>jetty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.mortbay.jetty</groupId>
-            <artifactId>jetty-util</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.mortbay.jetty</groupId>
-            <artifactId>servlet-api</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.apache.velocity</groupId>
-            <artifactId>velocity</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.avro</groupId>
-        <artifactId>avro-mapred</artifactId>
-        <version>${avro.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>io.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.mortbay.jetty</groupId>
-            <artifactId>jetty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.mortbay.jetty</groupId>
-            <artifactId>jetty-util</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.mortbay.jetty</groupId>
-            <artifactId>servlet-api</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.apache.velocity</groupId>
-            <artifactId>velocity</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.zookeeper</groupId>
-        <artifactId>zookeeper</artifactId>
-        <version>${zookeeper.version}</version>
-      </dependency>
-
-      <!-- See SPARK-1556 for info on this dependency: -->
-      <dependency>
-        <groupId>net.java.dev.jets3t</groupId>
-        <artifactId>jets3t</artifactId>
-        <version>${jets3t.version}</version>
-        <scope>runtime</scope>
-        <exclusions>
-          <exclusion>
-            <groupId>commons-logging</groupId>
-            <artifactId>commons-logging</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-yarn-api</artifactId>
-        <version>${yarn.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.ow2.asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.jboss.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>commons-logging</groupId>
-            <artifactId>commons-logging</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-yarn-common</artifactId>
-        <version>${yarn.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.ow2.asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.jboss.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>javax.servlet</groupId>
-            <artifactId>servlet-api</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>commons-logging</groupId>
-            <artifactId>commons-logging</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-yarn-server-web-proxy</artifactId>
-        <version>${yarn.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.ow2.asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.jboss.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>javax.servlet</groupId>
-            <artifactId>servlet-api</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>commons-logging</groupId>
-            <artifactId>commons-logging</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-yarn-client</artifactId>
-        <version>${yarn.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.ow2.asm</groupId>
-            <artifactId>asm</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>org.jboss.netty</groupId>
-            <artifactId>netty</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>javax.servlet</groupId>
-            <artifactId>servlet-api</artifactId>
-          </exclusion>
-          <exclusion>
-            <groupId>commons-logging</groupId>
-            <artifactId>commons-logging</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>org.codehaus.jackson</groupId>
-        <artifactId>jackson-mapper-asl</artifactId>
-        <version>${codehaus.jackson.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.codehaus.jackson</groupId>
-        <artifactId>jackson-core-asl</artifactId>
-        <version>${codehaus.jackson.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.codehaus.jackson</groupId>
-        <artifactId>jackson-xc</artifactId>
-        <version>${codehaus.jackson.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.codehaus.jackson</groupId>
-        <artifactId>jackson-jaxrs</artifactId>
-        <version>${codehaus.jackson.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.fasterxml.jackson.core</groupId>
-        <artifactId>jackson-databind</artifactId>
-        <version>${fasterxml.jackson.version}</version>
-      </dependency>
-
-      <!-- Guava is excluded because of SPARK-6149.  The Guava version 
referenced in this module is
-           15.0, which causes runtime incompatibility issues. -->
-      <dependency>
-        <groupId>com.fasterxml.jackson.module</groupId>
-        <artifactId>jackson-module-scala_2.10</artifactId>
-        <version>${fasterxml.jackson.version}</version>
-        <exclusions>
-          <exclusion>
-            <groupId>com.google.guava</groupId>
-            <artifactId>guava</artifactId>
-          </exclusion>
-        </exclusions>
-      </dependency>
-
-      <dependency>
-        <groupId>com.thoughtworks.paranamer</groupId>
-        <artifactId>paranamer</artifactId>
-        <version>2.6</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.sun.jersey</groupId>
-        <artifactId>jersey-server</artifactId>
-        <version>1.9</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.sun.jersey</groupId>
-        <artifactId>jersey-core</artifactId>
-        <version>1.9</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.sun.jersey</groupId>
-        <artifactId>jersey-client</artifactId>
-        <version>1.9</version>
-      </dependency>
-
-      <dependency>
-        <groupId>com.sun.xml.bind</groupId>
-        <artifactId>jaxb-impl</artifactId>
-        <version>2.2.6</version>
-      </dependency>
-
-
-      <!-- json4s -->
-      <dependency>
-        <groupId>org.json4s</groupId>
-        <artifactId>json4s-core_2.10</artifactId>
-        <version>${json4s.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.json4s</groupId>
-        <artifactId>json4s-native_2.10</artifactId>
-        <version>${json4s.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.json4s</groupId>
-        <artifactId>json4s-jackson_2.10</artifactId>
-        <version>${json4s.version}</version>
-      </dependency>
-
-      <dependency>
-        <groupId>org.json4s</groupId>
-        <artifactId>json4s-ext_2.10</artifactId>
-        <version>${json4s.version}</version>
-      </dependency>
-
     </dependencies>
   </dependencyManagement>
 
@@ -1112,6 +416,7 @@
               <exclude>STYLE.md</exclude>
               <exclude>Roadmap.md</exclude>
               <exclude>conf/interpreter.json</exclude>
+              <exclude>spark-*-bin*/**</exclude>
             </excludes>
           </configuration>
 
@@ -1315,339 +620,5 @@
         </plugins>
       </build>
     </profile>
-
-    <profile>
-      <id>spark-1.1</id>
-      <dependencies>
-
-      </dependencies>
-      <properties>
-        <spark.version>1.1.1</spark.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>cassandra-spark-1.1</id>
-      <dependencies>
-        <dependency>
-          <groupId>com.datastax.spark</groupId>
-          
<artifactId>spark-cassandra-connector_${scala.binary.version}</artifactId>
-          <version>1.1.1</version>
-          <exclusions>
-            <exclusion>
-              <groupId>org.joda</groupId>
-              <artifactId>joda-convert</artifactId>
-            </exclusion>
-          </exclusions>
-        </dependency>
-      </dependencies>
-      <properties>
-        <spark.version>1.1.1</spark.version>
-        <libthrift.version>0.9.2</libthrift.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>spark-1.2</id>
-      <dependencies>
-      </dependencies>
-      <properties>
-        <akka.version>2.3.4-spark</akka.version>
-        <spark.version>1.2.1</spark.version>
-        <hive.version>0.13.1a</hive.version>
-        <derby.version>10.10.1.1</derby.version>
-        <parquet.version>1.6.0rc3</parquet.version>
-        <chill.version>0.5.0</chill.version>
-        <commons.httpclient.version>4.2.6</commons.httpclient.version>
-        <commons.math3.version>3.1.1</commons.math3.version>
-        <io.netty.version>4.0.23.Final</io.netty.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>cassandra-spark-1.2</id>
-      <properties>
-        <akka.version>2.3.4-spark</akka.version>
-        <spark.version>1.2.1</spark.version>
-        <hive.version>0.13.1a</hive.version>
-        <derby.version>10.10.1.1</derby.version>
-        <parquet.version>1.6.0rc3</parquet.version>
-        <chill.version>0.5.0</chill.version>
-        <commons.httpclient.version>4.2.6</commons.httpclient.version>
-        <commons.math3.version>3.1.1</commons.math3.version>
-        <io.netty.version>4.0.23.Final</io.netty.version>
-        <libthrift.version>0.9.2</libthrift.version>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>com.datastax.spark</groupId>
-          
<artifactId>spark-cassandra-connector_${scala.binary.version}</artifactId>
-          <version>1.2.1</version>
-          <exclusions>
-            <exclusion>
-              <groupId>org.joda</groupId>
-              <artifactId>joda-convert</artifactId>
-            </exclusion>
-          </exclusions>
-        </dependency>
-      </dependencies>
-    </profile>
-
-    <profile>
-      <id>spark-1.3</id>
-      <properties>
-        <akka.version>2.3.4-spark</akka.version>
-        <spark.version>1.3.1</spark.version>
-        <mesos.version>0.21.0</mesos.version>
-        <hbase.version>0.98.7</hbase.version>
-        <hbase.artifact>hbase</hbase.artifact>
-        <hive.group>org.spark-project.hive</hive.group>
-        <hive.version>0.13.1a</hive.version>
-        <derby.version>10.10.1.1</derby.version>
-        <orbit.version>3.0.0.v201112011016</orbit.version>
-        <parquet.version>1.6.0rc3</parquet.version>
-        <chill.version>0.5.0</chill.version>
-        <ivy.version>2.4.0</ivy.version>
-        <oro.version>2.0.8</oro.version>
-        <avro.mapred.classifier></avro.mapred.classifier>
-        <codahale.metrics.version>3.1.0</codahale.metrics.version>
-        <commons.httpclient.version>4.2.6</commons.httpclient.version>
-        <commons.math3.version>3.1.1</commons.math3.version>
-        <io.netty.version>4.0.23.Final</io.netty.version>
-        <codehaus.jackson.version>1.9.13</codehaus.jackson.version>
-        <fasterxml.jackson.version>2.4.4</fasterxml.jackson.version>
-        <snappy.version>1.1.1.6</snappy.version>
-        <mesos.version>0.21.0</mesos.version>
-      </properties>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <dependencies>
-      </dependencies>
-    </profile>
-
-    <profile>
-      <id>cassandra-spark-1.3</id>
-      <properties>
-        <akka.version>2.3.4-spark</akka.version>
-        <spark.version>1.3.1</spark.version>
-        <mesos.version>0.21.0</mesos.version>
-        <hbase.version>0.98.7</hbase.version>
-        <hbase.artifact>hbase</hbase.artifact>
-        <hive.group>org.spark-project.hive</hive.group>
-        <hive.version>0.13.1a</hive.version>
-        <derby.version>10.10.1.1</derby.version>
-        <orbit.version>3.0.0.v201112011016</orbit.version>
-        <parquet.version>1.6.0rc3</parquet.version>
-        <chill.version>0.5.0</chill.version>
-        <ivy.version>2.4.0</ivy.version>
-        <oro.version>2.0.8</oro.version>
-        <avro.mapred.classifier></avro.mapred.classifier>
-        <codahale.metrics.version>3.1.0</codahale.metrics.version>
-        <commons.httpclient.version>4.2.6</commons.httpclient.version>
-        <commons.math3.version>3.1.1</commons.math3.version>
-        <io.netty.version>4.0.23.Final</io.netty.version>
-        <codehaus.jackson.version>1.9.13</codehaus.jackson.version>
-        <fasterxml.jackson.version>2.4.4</fasterxml.jackson.version>
-        <snappy.version>1.1.1.6</snappy.version>
-        <mesos.version>0.21.0</mesos.version>
-        <libthrift.version>0.9.2</libthrift.version>
-      </properties>
-
-      <dependencies>
-        <dependency>
-          <groupId>com.datastax.spark</groupId>
-          
<artifactId>spark-cassandra-connector_${scala.binary.version}</artifactId>
-          <!--You need to build your own version of Spark Cassandra connector 
1.3.0-SNAPSHOT
-              because it is not yet released-->
-          <version>1.3.0-SNAPSHOT</version>
-          <exclusions>
-            <exclusion>
-              <groupId>org.joda</groupId>
-              <artifactId>joda-convert</artifactId>
-            </exclusion>
-          </exclusions>
-        </dependency>
-      </dependencies>
-    </profile>
-
-   <profile>
-      <id>spark-1.4</id>
-      <properties>
-        <akka.version>2.3.4-spark</akka.version>
-        <spark.version>1.4.0</spark.version>
-        <mesos.version>0.21.1</mesos.version>
-        <hbase.version>0.98.7-hadoop2</hbase.version>
-        <hbase.artifact>hbase</hbase.artifact>
-        <hive.group>org.spark-project.hive</hive.group>
-        <hive.version>0.13.1a</hive.version>
-        <derby.version>10.10.1.1</derby.version>
-        <orbit.version>3.0.0.v201112011016</orbit.version>
-        <parquet.version>1.6.0rc3</parquet.version>
-        <chill.version>0.5.0</chill.version>
-        <ivy.version>2.4.0</ivy.version>
-        <oro.version>2.0.8</oro.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-        <codahale.metrics.version>3.1.0</codahale.metrics.version>
-        <commons.httpclient.version>4.3.2</commons.httpclient.version>
-        <commons.math3.version>3.4.1</commons.math3.version>
-        <io.netty.version>4.0.23.Final</io.netty.version>
-        <codehaus.jackson.version>1.9.13</codehaus.jackson.version>
-        <fasterxml.jackson.version>2.4.4</fasterxml.jackson.version>
-        <snappy.version>1.1.1.7</snappy.version>
-        <mesos.version>0.21.0</mesos.version>
-      </properties>
-
-      <dependencies>
-      </dependencies>
-
-    </profile>
-
-
-    <profile>
-      <id>hadoop-0.23</id>
-      <!-- SPARK-1121: Adds an explicit dependency on Avro to work around a
-        Hadoop 0.23.X issue -->
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.avro</groupId>
-          <artifactId>avro</artifactId>
-        </dependency>
-      </dependencies>
-      <properties>
-        <hadoop.version>0.23.10</hadoop.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.2</id>
-      <properties>
-        <hadoop.version>2.2.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.3</id>
-      <properties>
-        <hadoop.version>2.3.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.0</jets3t.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.4</id>
-      <properties>
-        <hadoop.version>2.4.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.3</jets3t.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>hadoop-2.6</id>
-      <properties>
-        <hadoop.version>2.6.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.3</jets3t.version>
-        <codehaus.jackson.version>1.9.13</codehaus.jackson.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>mapr3</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <properties>
-        <hadoop.version>1.0.3-mapr-3.0.3</hadoop.version>
-        <yarn.version>2.3.0-mapr-4.0.0-FCS</yarn.version>
-        <hbase.version>0.94.17-mapr-1405</hbase.version>
-        <zookeeper.version>3.4.5-mapr-1406</zookeeper.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>mapr4</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <properties>
-        <hadoop.version>2.3.0-mapr-4.0.0-FCS</hadoop.version>
-        <yarn.version>2.3.0-mapr-4.0.0-FCS</yarn.version>
-        <hbase.version>0.94.17-mapr-1405-4.0.0-FCS</hbase.version>
-        <zookeeper.version>3.4.5-mapr-1406</zookeeper.version>
-      </properties>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.curator</groupId>
-          <artifactId>curator-recipes</artifactId>
-          <version>2.4.0</version>
-          <exclusions>
-            <exclusion>
-              <groupId>org.apache.zookeeper</groupId>
-              <artifactId>zookeeper</artifactId>
-            </exclusion>
-          </exclusions>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.zookeeper</groupId>
-          <artifactId>zookeeper</artifactId>
-          <version>3.4.5-mapr-1406</version>
-        </dependency>
-      </dependencies>
-    </profile>
-
-    <!-- Build without Hadoop dependencies that are included in some runtime 
environments. -->
-    <profile>
-      <id>hadoop-provided</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-      </activation>
-      <dependencies>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-client</artifactId>
-          <scope>provided</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-api</artifactId>
-          <scope>provided</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-common</artifactId>
-          <scope>provided</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-server-web-proxy</artifactId>
-          <scope>provided</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.hadoop</groupId>
-          <artifactId>hadoop-yarn-client</artifactId>
-          <scope>provided</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.avro</groupId>
-          <artifactId>avro</artifactId>
-          <scope>provided</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.avro</groupId>
-          <artifactId>avro-ipc</artifactId>
-          <scope>provided</scope>
-        </dependency>
-        <dependency>
-          <groupId>org.apache.zookeeper</groupId>
-          <artifactId>zookeeper</artifactId>
-          <version>${zookeeper.version}</version>
-          <scope>provided</scope>
-        </dependency>
-      </dependencies>
-    </profile>
   </profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/spark/pom.xml
----------------------------------------------------------------------
diff --git a/spark/pom.xml b/spark/pom.xml
index b4c05e5..dc37bf7 100644
--- a/spark/pom.xml
+++ b/spark/pom.xml
@@ -32,6 +32,15 @@
   <name>Zeppelin: Spark</name>
   <description>Zeppelin spark support</description>
   <url>http://zeppelin.incubator.apache.org</url>
+  
+
+  <properties>
+    <spark.version>1.3.1</spark.version>
+    <scala.version>2.10.4</scala.version>
+    <scala.binary.version>2.10</scala.binary.version>
+
+    <hadoop.version>2.3.0</hadoop.version>
+  </properties>
 
   <dependencies>
     <dependency>
@@ -51,59 +60,20 @@
       <scope>provided</scope>
     </dependency>
 
-    <dependency>
-      <groupId>com.google.code.gson</groupId>
-      <artifactId>gson</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>commons-collections</groupId>
-      <artifactId>commons-collections</artifactId>
-    </dependency>
-
+    <!-- Spark -->
     <dependency>
       <groupId>org.apache.spark</groupId>
       <artifactId>spark-core_2.10</artifactId>
       <version>${spark.version}</version>
       <exclusions>
         <exclusion>
-          <groupId>com.fasterxml.jackson.core</groupId>
-          <artifactId>jackson-databind</artifactId>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client</artifactId>
         </exclusion>
       </exclusions>
     </dependency>
 
     <dependency>
-      <groupId>com.fasterxml.jackson.core</groupId>
-      <artifactId>jackson-databind</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-library</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-compiler</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-reflect</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>org.eclipse.jetty.aggregate</groupId>
-      <artifactId>jetty-all-server</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>io.netty</groupId>
-      <artifactId>netty-all</artifactId>
-    </dependency>
-
-    <dependency>
       <groupId>org.apache.spark</groupId>
       <artifactId>spark-repl_2.10</artifactId>
       <version>${spark.version}</version>
@@ -141,6 +111,15 @@
       <version>${spark.version}</version>
     </dependency>
 
+
+    <!-- hadoop -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+
+
     <!-- Aether :: maven dependency resolution -->
     <dependency>
       <groupId>org.apache.maven</groupId>
@@ -277,6 +256,193 @@
 
   <profiles>
     <profile>
+      <id>spark-1.1</id>
+      <dependencies>
+
+      </dependencies>
+      <properties>
+        <spark.version>1.1.1</spark.version>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>cassandra-spark-1.1</id>
+      <dependencies>
+        <dependency>
+          <groupId>com.datastax.spark</groupId>
+          
<artifactId>spark-cassandra-connector_${scala.binary.version}</artifactId>
+          <version>1.1.1</version>
+          <exclusions>
+            <exclusion>
+              <groupId>org.joda</groupId>
+              <artifactId>joda-convert</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+      </dependencies>
+      <properties>
+        <spark.version>1.1.1</spark.version>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>spark-1.2</id>
+      <dependencies>
+      </dependencies>
+      <properties>
+        <spark.version>1.2.1</spark.version>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>cassandra-spark-1.2</id>
+      <properties>
+        <spark.version>1.2.1</spark.version>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>com.datastax.spark</groupId>
+          
<artifactId>spark-cassandra-connector_${scala.binary.version}</artifactId>
+          <version>1.2.1</version>
+          <exclusions>
+            <exclusion>
+              <groupId>org.joda</groupId>
+              <artifactId>joda-convert</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+      </dependencies>
+    </profile>
+
+    <profile>
+      <id>spark-1.3</id>
+
+      <properties>
+        <spark.version>1.3.1</spark.version>
+      </properties>
+
+      <dependencies>
+      </dependencies>
+
+    </profile>
+
+    <profile>
+      <id>cassandra-spark-1.3</id>
+      <properties>
+        <spark.version>1.3.0</spark.version>
+      </properties>
+
+      <dependencies>
+        <dependency>
+          <groupId>com.datastax.spark</groupId>
+          
<artifactId>spark-cassandra-connector_${scala.binary.version}</artifactId>
+          <!--You need to build your own version of Spark Cassandra connector 
1.3.0-SNAPSHOT
+              because it is not yet released-->
+          <version>1.3.0-SNAPSHOT</version>
+          <exclusions>
+            <exclusion>
+              <groupId>org.joda</groupId>
+              <artifactId>joda-convert</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+      </dependencies>
+    </profile>
+
+   <profile>
+      <id>spark-1.4</id>
+      <properties>
+        <spark.version>1.4.0</spark.version>
+      </properties>
+
+      <dependencies>
+      </dependencies>
+    </profile>
+
+    <profile>
+      <id>hadoop-0.23</id>
+      <!-- SPARK-1121: Adds an explicit dependency on Avro to work around a
+        Hadoop 0.23.X issue -->
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.avro</groupId>
+          <artifactId>avro</artifactId>
+        </dependency>
+      </dependencies>
+      <properties>
+        <hadoop.version>0.23.10</hadoop.version>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>hadoop-2.2</id>
+      <properties>
+        <hadoop.version>2.2.0</hadoop.version>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>hadoop-2.3</id>
+      <properties>
+        <hadoop.version>2.3.0</hadoop.version>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>hadoop-2.4</id>
+      <properties>
+        <hadoop.version>2.4.0</hadoop.version>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>hadoop-2.6</id>
+      <properties>
+        <hadoop.version>2.6.0</hadoop.version>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>mapr3</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <properties>
+        <hadoop.version>1.0.3-mapr-3.0.3</hadoop.version>
+        <yarn.version>2.3.0-mapr-4.0.0-FCS</yarn.version>
+      </properties>
+    </profile>
+
+    <profile>
+      <id>mapr4</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <properties>
+        <hadoop.version>2.3.0-mapr-4.0.0-FCS</hadoop.version>
+        <yarn.version>2.3.0-mapr-4.0.0-FCS</yarn.version>
+      </properties>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.curator</groupId>
+          <artifactId>curator-recipes</artifactId>
+          <version>2.4.0</version>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.zookeeper</groupId>
+              <artifactId>zookeeper</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.zookeeper</groupId>
+          <artifactId>zookeeper</artifactId>
+          <version>3.4.5-mapr-1406</version>
+        </dependency>
+      </dependencies>
+    </profile>
+
+    <profile>
       <id>yarn</id>
       <dependencies>
         <dependency>
@@ -284,6 +450,63 @@
           <artifactId>spark-yarn_2.10</artifactId>
           <version>${spark.version}</version>
         </dependency>
+
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-yarn-api</artifactId>
+          <version>${yarn.version}</version>
+        </dependency>
+      </dependencies>
+    </profile>
+
+    <!-- Build without Hadoop dependencies that are included in some runtime 
environments. -->
+    <profile>
+      <id>hadoop-provided</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client</artifactId>
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-yarn-api</artifactId>
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-yarn-common</artifactId>
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-yarn-client</artifactId>
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.avro</groupId>
+          <artifactId>avro</artifactId>
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.avro</groupId>
+          <artifactId>avro-ipc</artifactId>
+          <scope>provided</scope>
+        </dependency>
+        <dependency>
+          <groupId>org.apache.zookeeper</groupId>
+          <artifactId>zookeeper</artifactId>
+          <version>${zookeeper.version}</version>
+          <scope>provided</scope>
+        </dependency>
       </dependencies>
     </profile>
   </profiles>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java 
b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
index c49f1e1..c97e824 100644
--- a/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
+++ b/spark/src/test/java/org/apache/zeppelin/spark/SparkInterpreterTest.java
@@ -55,8 +55,8 @@ public class SparkInterpreterTest {
       return 0;
     }
 
-    String[] split = repl.getSparkContext().version().split(".");
-    int version = Integer.parseInt(split[0]) + Integer.parseInt(split[1]);
+    String[] split = repl.getSparkContext().version().split("\\.");
+    int version = Integer.parseInt(split[0]) * 10 + Integer.parseInt(split[1]);
     return version;
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/tajo/pom.xml
----------------------------------------------------------------------
diff --git a/tajo/pom.xml b/tajo/pom.xml
index cbed17d..0ed0fce 100644
--- a/tajo/pom.xml
+++ b/tajo/pom.xml
@@ -32,7 +32,7 @@
   <url>http://www.apache.org</url>
 
   <properties>
-    <protobuf.version>2.5.0</protobuf.version>
+    <tajo.version>0.10.0</tajo.version>
   </properties>
 
   <dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/testing/startSparkCluster.sh
----------------------------------------------------------------------
diff --git a/testing/startSparkCluster.sh b/testing/startSparkCluster.sh
index 896ed52..e2d8079 100755
--- a/testing/startSparkCluster.sh
+++ b/testing/startSparkCluster.sh
@@ -1,4 +1,4 @@
-#!/bin/sh
+#!/bin/bash
 #
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
@@ -16,11 +16,33 @@
 # limitations under the License.
 #
 
-wget http://apache.mesi.com.ar/spark/spark-1.1.1/spark-1.1.1-bin-hadoop2.3.tgz
-tar zxvf spark-1.1.1-bin-hadoop2.3.tgz
-cd spark-1.1.1-bin-hadoop2.3
+
+if [ $# -ne 2 ]; then
+    echo "usage) $0 [spark version] [hadoop version]"
+    echo "   eg) $0 1.3.1 2.6"
+    exit 1
+fi
+
+SPARK_VERSION="${1}"
+HADOOP_VERSION="${2}"
+
+FWDIR=$(dirname "${BASH_SOURCE-$0}")
+ZEPPELIN_HOME="$(cd "${FWDIR}/.."; pwd)"
+export 
SPARK_HOME=${ZEPPELIN_HOME}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}
+
+if [ ! -d "${SPARK_HOME}" ]; then
+    wget -q 
http://www.us.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
+    tar zxf spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
+fi
+
+# create PID dir. test case detect pid file so they can select active spark 
home dir for test
+mkdir -p ${SPARK_HOME}/run
+export SPARK_PID_DIR=${SPARK_HOME}/run
+
+
+# start
 export SPARK_MASTER_PORT=7071
 export SPARK_MASTER_WEBUI_PORT=7072
-./sbin/start-master.sh
-./bin/spark-class org.apache.spark.deploy.worker.Worker spark://localhost:7071 
&> worker.log &
-./bin/spark-class org.apache.spark.deploy.worker.Worker spark://localhost:7071 
&> worker2.log &
+export SPARK_WORKER_WEBUI_PORT=8082
+${SPARK_HOME}/sbin/start-master.sh
+${SPARK_HOME}/sbin/start-slave.sh 1 `hostname`:${SPARK_MASTER_PORT}

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/testing/stopSparkCluster.sh
----------------------------------------------------------------------
diff --git a/testing/stopSparkCluster.sh b/testing/stopSparkCluster.sh
index 4f8d7a9..0ae312d 100755
--- a/testing/stopSparkCluster.sh
+++ b/testing/stopSparkCluster.sh
@@ -1,4 +1,4 @@
-#!/bin/sh#
+#!/bin/bash
 #
 # Licensed to the Apache Software Foundation (ASF) under one or more
 # contributor license agreements.  See the NOTICE file distributed with
@@ -16,8 +16,22 @@
 # limitations under the License.
 #
 
-cd spark-1.1.1-bin-hadoop2.3
-./sbin/stop-master.sh
-kill $(ps -ef | grep 'org.apache.spark.deploy.worker.Worker' | awk '{print 
$2}')
-cd ..
-rm -rf spark-1.1.1-bin-hadoop2.3*
+if [ $# -ne 2 ]; then
+    echo "usage) $0 [spark version] [hadoop version]"
+    echo "   eg) $0 1.3.1 2.6"
+    exit 1
+fi
+
+SPARK_VERSION="${1}"
+HADOOP_VERSION="${2}"
+
+FWDIR=$(dirname "${BASH_SOURCE-$0}")
+ZEPPELIN_HOME="$(cd "${FWDIR}/.."; pwd)"
+export 
SPARK_HOME=${ZEPPELIN_HOME}/spark-${SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}
+
+# set create PID dir
+export SPARK_PID_DIR=${SPARK_HOME}/run
+
+
+${SPARK_HOME}/sbin/spark-daemon.sh stop org.apache.spark.deploy.worker.Worker 1
+${SPARK_HOME}/sbin/stop-master.sh

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/zeppelin-interpreter/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/pom.xml b/zeppelin-interpreter/pom.xml
index ba6e88d..980fe4a 100644
--- a/zeppelin-interpreter/pom.xml
+++ b/zeppelin-interpreter/pom.xml
@@ -77,6 +77,7 @@
     <dependency>
       <groupId>org.mockito</groupId>
       <artifactId>mockito-all</artifactId>
+      <version>1.9.0</version>
       <scope>test</scope>
     </dependency>
   </dependencies>

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/zeppelin-server/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-server/pom.xml b/zeppelin-server/pom.xml
index 0e68df2..da1ec5d 100644
--- a/zeppelin-server/pom.xml
+++ b/zeppelin-server/pom.xml
@@ -32,6 +32,34 @@
   <name>Zeppelin: Server</name>
   <url>http://www.nflabs.com</url>
 
+  <properties>
+    <cxf.version>2.7.7</cxf.version>
+    <jetty.version>8.1.14.v20131031</jetty.version>
+    <commons.httpclient.version>4.3.6</commons.httpclient.version>
+  </properties>
+
+  <dependencyManagement>
+    <dependencies>
+      <dependency>
+        <groupId>org.scala-lang</groupId>
+        <artifactId>scala-library</artifactId>
+        <version>2.10.4</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.scala-lang</groupId>
+        <artifactId>scala-compiler</artifactId>
+        <version>2.10.4</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.scala-lang</groupId>
+        <artifactId>scalap</artifactId>
+        <version>2.10.4</version>
+      </dependency>
+    </dependencies>
+  </dependencyManagement>
+
   <dependencies>
     <dependency>
       <groupId>${project.groupId}</groupId>
@@ -111,6 +139,18 @@
          <groupId>com.sun.jersey</groupId>
          <artifactId>jersey-servlet</artifactId>
        </exclusion>
+        <exclusion>
+          <groupId>org.scala-lang</groupId>
+          <artifactId>scala-library</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.scala-lang</groupId>
+          <artifactId>scala-compiler</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.scala-lang</groupId>
+          <artifactId>scalap</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 
@@ -248,6 +288,7 @@
     <dependency>
       <groupId>org.apache.httpcomponents</groupId>
       <artifactId>httpclient</artifactId>
+      <version>4.3.6</version>
       <scope>test</scope>
     </dependency>
 
@@ -280,7 +321,8 @@
 
     <dependency>
       <groupId>org.scalatest</groupId>
-      <artifactId>scalatest_${scala.binary.version}</artifactId>
+      <artifactId>scalatest_2.10</artifactId>
+      <version>2.1.1</version>
       <scope>test</scope>
     </dependency>
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java 
b/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java
index 0072b87..bd93f80 100644
--- 
a/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java
+++ 
b/zeppelin-server/src/main/java/org/apache/zeppelin/server/ZeppelinServer.java
@@ -69,7 +69,9 @@ public class ZeppelinServer extends Application {
   private SchedulerFactory schedulerFactory;
   public static Notebook notebook;
 
-  static NotebookServer notebookServer;
+  public static NotebookServer notebookServer;
+
+  public static Server jettyServer;
 
   private InterpreterFactory replFactory;
 
@@ -79,7 +81,7 @@ public class ZeppelinServer extends Application {
     ZeppelinConfiguration conf = ZeppelinConfiguration.create();
     conf.setProperty("args", args);
 
-    final Server jettyServer = setupJettyServer(conf);
+    jettyServer = setupJettyServer(conf);
     notebookServer = setupNotebookServer(conf);
 
     // REST api

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
index ac40dda..744c1e0 100644
--- 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
+++ 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
@@ -17,17 +17,20 @@
 
 package org.apache.zeppelin.rest;
 
+import java.io.File;
 import java.io.IOException;
 import java.lang.ref.WeakReference;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
-import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.httpclient.HttpClient;
 import org.apache.commons.httpclient.methods.ByteArrayRequestEntity;
 import org.apache.commons.httpclient.methods.GetMethod;
 import org.apache.commons.httpclient.methods.PostMethod;
 import org.apache.commons.httpclient.methods.RequestEntity;
+import org.apache.zeppelin.interpreter.InterpreterSetting;
 import org.apache.zeppelin.server.ZeppelinServer;
 import org.hamcrest.Description;
 import org.hamcrest.Matcher;
@@ -46,6 +49,7 @@ public abstract class AbstractTestRestApi {
   static final String restApiUrl = "/api";
   static final String url = getUrlToTest();
   protected static final boolean wasRunning = checkIfServerIsRuning();
+  static boolean pySpark = false;
 
   private String getUrl(String path) {
     String url;
@@ -68,7 +72,7 @@ public abstract class AbstractTestRestApi {
     return url;
   }
 
-  static ExecutorService executor = Executors.newSingleThreadExecutor();
+  static ExecutorService executor;
   protected static final Runnable server = new Runnable() {
     @Override
     public void run() {
@@ -84,6 +88,7 @@ public abstract class AbstractTestRestApi {
   protected static void startUp() throws Exception {
     if (!wasRunning) {
       LOG.info("Staring test Zeppelin up...");
+      executor = Executors.newSingleThreadExecutor();
       executor.submit(server);
       long s = System.currentTimeMillis();
       boolean started = false;
@@ -98,19 +103,107 @@ public abstract class AbstractTestRestApi {
          throw new RuntimeException("Can not start Zeppelin server");
       }
       LOG.info("Test Zeppelin stared.");
+
+
+      // ci environment runs spark cluster for testing
+      // so configure zeppelin use spark cluster
+      if ("true".equals(System.getenv("CI"))) {
+        // assume first one is spark
+        InterpreterSetting sparkIntpSetting = 
ZeppelinServer.notebook.getInterpreterFactory().get().get(0);
+
+        // set spark master
+        sparkIntpSetting.getProperties().setProperty("master", "spark://" + 
getHostname() + ":7071");
+
+        // set spark home for pyspark
+        sparkIntpSetting.getProperties().setProperty("spark.home", 
getSparkHome());
+        pySpark = true;
+
+        
ZeppelinServer.notebook.getInterpreterFactory().restart(sparkIntpSetting.id());
+      } else {
+        // assume first one is spark
+        InterpreterSetting sparkIntpSetting = 
ZeppelinServer.notebook.getInterpreterFactory().get().get(0);
+
+        String sparkHome = getSparkHome();
+        if (sparkHome != null) {
+          // set spark home for pyspark
+          sparkIntpSetting.getProperties().setProperty("spark.home", 
sparkHome);
+          pySpark = true;
+        }
+
+        
ZeppelinServer.notebook.getInterpreterFactory().restart(sparkIntpSetting.id());
+      }
     }
   }
 
-  protected static void shutDown() {
+  private static String getHostname() {
+    try {
+      return InetAddress.getLocalHost().getHostName();
+    } catch (UnknownHostException e) {
+      e.printStackTrace();
+      return "localhost";
+    }
+  }
+
+  private static String getSparkHome() {
+    String sparkHome = getSparkHomeRecursively(new 
File(System.getProperty("user.dir")));
+    System.out.println("SPARK HOME detected " + sparkHome);
+    return sparkHome;
+  }
+
+  boolean isPyspark() {
+    return pySpark;
+  }
+
+  private static String getSparkHomeRecursively(File dir) {
+    if (dir == null) return null;
+    File files []  = dir.listFiles();
+    if (files == null) return null;
+
+    File homeDetected = null;
+    for (File f : files) {
+      if (isActiveSparkHome(f)) {
+        homeDetected = f;
+        break;
+      }
+    }
+
+    if (homeDetected != null) {
+      return homeDetected.getAbsolutePath();
+    } else {
+      return getSparkHomeRecursively(dir.getParentFile());
+    }
+  }
+
+  private static boolean isActiveSparkHome(File dir) {
+    if (dir.getName().matches("spark-[0-9\\.]+-bin-hadoop[0-9\\.]+")) {
+      File pidDir = new File(dir, "run");
+      if (pidDir.isDirectory() && pidDir.listFiles().length > 0) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  protected static void shutDown() throws Exception {
     if (!wasRunning) {
       LOG.info("Terminating test Zeppelin...");
+      ZeppelinServer.notebookServer.stop();
+      ZeppelinServer.jettyServer.stop();
       executor.shutdown();
-      try {
-        executor.awaitTermination(10, TimeUnit.SECONDS);
-      } catch (InterruptedException e) {
-        // TODO Auto-generated catch block
-        e.printStackTrace();
+
+      long s = System.currentTimeMillis();
+      boolean started = true;
+      while (System.currentTimeMillis() - s < 1000 * 60 * 3) {  // 3 minutes
+        Thread.sleep(2000);
+        started = checkIfServerIsRuning();
+        if (started == false) {
+          break;
+        }
       }
+      if (started == true) {
+        throw new RuntimeException("Can not stop Zeppelin server");
+      }
+
       LOG.info("Test Zeppelin terminated.");
     }
   }

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java
 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java
index 1a76ca5..0da5e0d 100644
--- 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java
+++ 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinRestApiTest.java
@@ -57,7 +57,7 @@ public class ZeppelinRestApiTest extends AbstractTestRestApi {
   }
 
   @AfterClass
-  public static void destroy() {
+  public static void destroy() throws Exception {
     AbstractTestRestApi.shutDown();
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-zeppelin/blob/109b0807/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
new file mode 100644
index 0000000..02b7e47
--- /dev/null
+++ 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.zeppelin.rest;
+
+import static org.junit.Assert.assertEquals;
+
+import java.io.IOException;
+
+import org.apache.zeppelin.notebook.Note;
+import org.apache.zeppelin.notebook.Paragraph;
+import org.apache.zeppelin.scheduler.Job.Status;
+import org.apache.zeppelin.server.ZeppelinServer;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.gson.Gson;
+
+/**
+ * Test against spark cluster.
+ * Spark cluster is started by CI server using testing/startSparkCluster.sh
+ */
+public class ZeppelinSparkClusterTest extends AbstractTestRestApi {
+  Gson gson = new Gson();
+
+  @BeforeClass
+  public static void init() throws Exception {
+    AbstractTestRestApi.startUp();
+  }
+
+  @AfterClass
+  public static void destroy() throws Exception {
+    AbstractTestRestApi.shutDown();
+  }
+
+  private void waitForFinish(Paragraph p) {
+    while (p.getStatus() != Status.FINISHED) {
+      try {
+        Thread.sleep(100);
+      } catch (InterruptedException e) {
+        e.printStackTrace();
+      }
+    }
+  }
+
+  @Test
+  public void basicRDDTransformationAndActionTest() throws IOException {
+    // create new note
+    Note note = ZeppelinServer.notebook.createNote();
+
+    // run markdown paragraph, again
+    Paragraph p = note.addParagraph();
+    p.setText("print(sc.parallelize(1 to 10).reduce(_ + _))");
+    note.run(p.getId());
+    waitForFinish(p);
+    assertEquals("55", p.getResult().message());
+    ZeppelinServer.notebook.removeNote(note.id());
+  }
+
+  @Test
+  public void pySparkTest() throws IOException {
+    // create new note
+    Note note = ZeppelinServer.notebook.createNote();
+
+    int sparkVersion = getSparkVersionNumber(note);
+
+    if (isPyspark() && sparkVersion >= 12) {   // pyspark supported from 1.2.1
+      // run markdown paragraph, again
+      Paragraph p = note.addParagraph();
+      p.setText("%pyspark print(sc.parallelize(range(1, 11)).reduce(lambda a, 
b: a + b))");
+      note.run(p.getId());
+      waitForFinish(p);
+      assertEquals("55\n", p.getResult().message());
+    }
+    ZeppelinServer.notebook.removeNote(note.id());
+  }
+
+  /**
+   * Get spark version number as a numerical value.
+   * eg. 1.1.x => 11, 1.2.x => 12, 1.3.x => 13 ...
+   */
+  private int getSparkVersionNumber(Note note) {
+    Paragraph p = note.addParagraph();
+    p.setText("print(sc.version)");
+    note.run(p.getId());
+    waitForFinish(p);
+    String sparkVersion = p.getResult().message();
+    System.out.println("Spark version detected " + sparkVersion);
+    String[] split = sparkVersion.split("\\.");
+    int version = Integer.parseInt(split[0]) * 10 + Integer.parseInt(split[1]);
+    return version;
+  }
+}

Reply via email to