http://git-wip-us.apache.org/repos/asf/zeppelin/blob/d762b528/spark/pom.xml
----------------------------------------------------------------------
diff --git a/spark/pom.xml b/spark/pom.xml
index 1972f26..06b7d9f 100644
--- a/spark/pom.xml
+++ b/spark/pom.xml
@@ -16,680 +16,227 @@
   ~ limitations under the License.
   -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
-  <modelVersion>4.0.0</modelVersion>
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <modelVersion>4.0.0</modelVersion>
+
+    <parent>
+        <artifactId>interpreter-parent</artifactId>
+        <groupId>org.apache.zeppelin</groupId>
+        <version>0.9.0-SNAPSHOT</version>
+        <relativePath>../interpreter-parent/pom.xml</relativePath>
+    </parent>
 
-  <parent>
-    <artifactId>zeppelin</artifactId>
     <groupId>org.apache.zeppelin</groupId>
+    <artifactId>spark-parent</artifactId>
+    <packaging>pom</packaging>
     <version>0.9.0-SNAPSHOT</version>
-    <relativePath>..</relativePath>
-  </parent>
-
-  <groupId>org.apache.zeppelin</groupId>
-  <artifactId>zeppelin-spark_2.10</artifactId>
-  <packaging>jar</packaging>
-  <version>0.9.0-SNAPSHOT</version>
-  <name>Zeppelin: Spark</name>
-  <description>Zeppelin spark support</description>
-
-  <properties>
-    <!--library versions-->
-    <jsoup.version>1.8.2</jsoup.version>
-    <spark.version>2.0.2</spark.version>
-    <guava.version>14.0.1</guava.version>
-    <commons.exec.version>1.3</commons.exec.version>
-    <commons.compress.version>1.9</commons.compress.version>
-    <maven.plugin.api.version>3.0</maven.plugin.api.version>
-    <aether.version>1.12</aether.version>
-    <maven.aeither.provider.version>3.0.3</maven.aeither.provider.version>
-    <wagon.version>1.0</wagon.version>
-
-    <datanucleus.rdbms.version>3.2.9</datanucleus.rdbms.version>
-    <datanucleus.apijdo.version>3.2.6</datanucleus.apijdo.version>
-    <datanucleus.core.version>3.2.10</datanucleus.core.version>
-
-    <!--plugin versions-->
-    <plugin.shade.version>2.3</plugin.shade.version>
-    <plugin.scala.version>2.15.2</plugin.scala.version>
-
-    <!-- settings -->
-    
<pyspark.test.exclude>**/PySparkInterpreterMatplotlibTest.java</pyspark.test.exclude>
-    <pyspark.test.include>**/*Test.*</pyspark.test.include>
-  </properties>
-
-  <dependencies>
-    <dependency>
-      <groupId>${project.groupId}</groupId>
-      <artifactId>zeppelin-display_${scala.binary.version}</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>${project.groupId}</groupId>
-      <artifactId>zeppelin-interpreter</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>${project.groupId}</groupId>
-      <artifactId>zeppelin-python</artifactId>
-      <version>${project.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>net.sf.py4j</groupId>
-          <artifactId>py4j</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-
-    <dependency>
-      <groupId>${project.groupId}</groupId>
-      <artifactId>zeppelin-python</artifactId>
-      <version>${project.version}</version>
-      <classifier>tests</classifier>
-      <scope>test</scope>
-      <exclusions>
-        <exclusion>
-          <groupId>net.sf.py4j</groupId>
-          <artifactId>py4j</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-api</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>org.slf4j</groupId>
-      <artifactId>slf4j-log4j12</artifactId>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-repl_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-hive_${scala.binary.version}</artifactId>
-      <version>${spark.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <!-- Aether :: maven dependency resolution -->
-    <dependency>
-      <groupId>org.apache.maven</groupId>
-      <artifactId>maven-plugin-api</artifactId>
-      <version>${maven.plugin.api.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.codehaus.plexus</groupId>
-          <artifactId>plexus-utils</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.sonatype.sisu</groupId>
-          <artifactId>sisu-inject-plexus</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.apache.maven</groupId>
-          <artifactId>maven-model</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-
-    <dependency>
-      <groupId>org.sonatype.aether</groupId>
-      <artifactId>aether-api</artifactId>
-      <version>${aether.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>org.sonatype.aether</groupId>
-      <artifactId>aether-util</artifactId>
-      <version>${aether.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>org.sonatype.aether</groupId>
-      <artifactId>aether-impl</artifactId>
-      <version>${aether.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.maven</groupId>
-      <artifactId>maven-aether-provider</artifactId>
-      <version>${maven.aeither.provider.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.sonatype.aether</groupId>
-          <artifactId>aether-api</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.sonatype.aether</groupId>
-          <artifactId>aether-spi</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.sonatype.aether</groupId>
-          <artifactId>aether-util</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.sonatype.aether</groupId>
-          <artifactId>aether-impl</artifactId>
-        </exclusion>
-        <exclusion>
-          <groupId>org.codehaus.plexus</groupId>
-          <artifactId>plexus-utils</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-
-    <dependency>
-      <groupId>org.sonatype.aether</groupId>
-      <artifactId>aether-connector-file</artifactId>
-      <version>${aether.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>org.sonatype.aether</groupId>
-      <artifactId>aether-connector-wagon</artifactId>
-      <version>${aether.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.maven.wagon</groupId>
-          <artifactId>wagon-provider-api</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.maven.wagon</groupId>
-      <artifactId>wagon-provider-api</artifactId>
-      <version>${wagon.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.codehaus.plexus</groupId>
-          <artifactId>plexus-utils</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.maven.wagon</groupId>
-      <artifactId>wagon-http-lightweight</artifactId>
-      <version>${wagon.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.maven.wagon</groupId>
-          <artifactId>wagon-http-shared</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.maven.wagon</groupId>
-      <artifactId>wagon-http</artifactId>
-      <version>${wagon.version}</version>
-      <exclusions>
-      </exclusions>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.commons</groupId>
-      <artifactId>commons-exec</artifactId>
-      <version>${commons.exec.version}</version>
-    </dependency>
-
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-library</artifactId>
-      <version>${scala.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-compiler</artifactId>
-      <version>${scala.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.scala-lang</groupId>
-      <artifactId>scala-reflect</artifactId>
-      <version>${scala.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>commons-lang</groupId>
-      <artifactId>commons-lang</artifactId>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.apache.commons</groupId>
-      <artifactId>commons-compress</artifactId>
-      <version>${commons.compress.version}</version>
-      <scope>provided</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.jsoup</groupId>
-      <artifactId>jsoup</artifactId>
-      <version>${jsoup.version}</version>
-    </dependency>
-
-    <!--test libraries-->
-    <dependency>
-      <groupId>org.scalatest</groupId>
-      <artifactId>scalatest_${scala.binary.version}</artifactId>
-      <version>${scalatest.version}</version>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.datanucleus</groupId>
-      <artifactId>datanucleus-core</artifactId>
-      <version>${datanucleus.core.version}</version>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.datanucleus</groupId>
-      <artifactId>datanucleus-api-jdo</artifactId>
-      <version>${datanucleus.apijdo.version}</version>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.datanucleus</groupId>
-      <artifactId>datanucleus-rdbms</artifactId>
-      <version>${datanucleus.rdbms.version}</version>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.mockito</groupId>
-      <artifactId>mockito-core</artifactId>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.powermock</groupId>
-      <artifactId>powermock-api-mockito</artifactId>
-      <scope>test</scope>
-    </dependency>
-
-    <dependency>
-      <groupId>org.powermock</groupId>
-      <artifactId>powermock-module-junit4</artifactId>
-      <scope>test</scope>
-    </dependency>
-
-  </dependencies>
-
-  <build>
-    <plugins>
-      <plugin>
-        <artifactId>maven-enforcer-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>enforce</id>
-            <phase>none</phase>
-          </execution>
-        </executions>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <forkCount>1</forkCount>
-          <reuseForks>false</reuseForks>
-          <argLine>-Xmx1024m -XX:MaxPermSize=256m</argLine>
-          <excludes>
-            <exclude>**/SparkRInterpreterTest.java</exclude>
-            <exclude>${pyspark.test.exclude}</exclude>
-          </excludes>
-          <environmentVariables>
-            <!-- local pyspark execution needs PYTHONPATH otherwise python 
daemon in executor side will fail
-            e.g. sc.range(1,10).sum()
-            -->
-            
<PYTHONPATH>../interpreter/spark/pyspark/pyspark.zip:../interpreter/spark/pyspark/py4j-${spark.py4j.version}-src.zip:../interpreter/lib/python</PYTHONPATH>
-          </environmentVariables>
-        </configuration>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-shade-plugin</artifactId>
-        <version>${plugin.shade.version}</version>
-        <configuration>
-          <filters>
-            <filter>
-              <artifact>*:*</artifact>
-              <excludes>
-                <exclude>META-INF/*.SF</exclude>
-                <exclude>META-INF/*.DSA</exclude>
-                <exclude>META-INF/*.RSA</exclude>
-              </excludes>
-            </filter>
-          </filters>
-          <transformers>
-            <transformer 
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
-            <transformer 
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
-              <resource>reference.conf</resource>
-            </transformer>
-          </transformers>
-
-          <relocations>
-            <!-- shade guava and proto-buf, because it might conflict with 
those of spark -->
-            <relocation>
-              <pattern>com.google</pattern>
-              <shadedPattern>org.apache.zeppelin.com.google</shadedPattern>
-            </relocation>
-            <!-- shade netty, because it might conflict with that of spark-->
-            <relocation>
-              <pattern>io.netty</pattern>
-              <shadedPattern>org.apache.zeppelin.io.netty</shadedPattern>
-            </relocation>
-          </relocations>
-        </configuration>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>shade</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <execution>
-            <phase>package</phase>
-            <goals>
-              <goal>copy</goal>
-            </goals>
-            <configuration>
-              
<outputDirectory>${project.build.directory}/../../interpreter/spark</outputDirectory>
-              <overWriteReleases>false</overWriteReleases>
-              <overWriteSnapshots>false</overWriteSnapshots>
-              <overWriteIfNewer>true</overWriteIfNewer>
-              <includeScope>runtime</includeScope>
-              <artifactItems>
-                <artifactItem>
-                  <groupId>${project.groupId}</groupId>
-                  <artifactId>${project.artifactId}</artifactId>
-                  <version>${project.version}</version>
-                  <type>${project.packaging}</type>
-                </artifactItem>
-              </artifactItems>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-
-      <!-- Plugin to compile Scala code -->
-      <plugin>
-        <groupId>org.scala-tools</groupId>
-        <artifactId>maven-scala-plugin</artifactId>
-        <version>${plugin.scala.version}</version>
-        <configuration>
-            <scalaVersion>${scala.version}</scalaVersion>
-          <excludes>
-            <exclude>**/ZeppelinR.scala</exclude>
-            <exclude>**/SparkRBackend.scala</exclude>
-          </excludes>
-        </configuration>
-        <executions>
-          <execution>
-            <id>compile</id>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-            <phase>compile</phase>
-          </execution>
-          <execution>
-            <id>test-compile</id>
-            <goals>
-              <goal>testCompile</goal>
-            </goals>
-            <phase>test-compile</phase>
-          </execution>
-          <execution>
-            <phase>process-resources</phase>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <configuration>
-          <excludes combine.self="override"></excludes>
-          <testExcludes combine.self="override">
-            <testExclude>${pyspark.test.exclude}</testExclude>
-          </testExcludes>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.scala-tools</groupId>
-        <artifactId>maven-scala-plugin</artifactId>
-        <configuration>
-          <excludes combine.self="override">
-          </excludes>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <excludes combine.self="override">
-            <exclude>${pyspark.test.exclude}</exclude>
-          </excludes>
-        </configuration>
-      </plugin>
-
-      <!-- include sparkr by default -->
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <configuration>
-          <excludes combine.self="override"></excludes>
-          <testExcludes combine.self="override">
-            <testExclude>${pyspark.test.exclude}</testExclude>
-          </testExcludes>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.scala-tools</groupId>
-        <artifactId>maven-scala-plugin</artifactId>
-        <configuration>
-          <excludes combine.self="override">
-          </excludes>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <configuration>
-          <excludes combine.self="override">
-            <exclude>${pyspark.test.exclude}</exclude>
-          </excludes>
-        </configuration>
-      </plugin>
-
-      <plugin>
-        <artifactId>maven-resources-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>copy-interpreter-setting</id>
-            <phase>package</phase>
-            <goals>
-              <goal>resources</goal>
-            </goals>
-            <configuration>
-              
<outputDirectory>${project.build.directory}/../../interpreter/spark</outputDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-
-  <profiles>
-    <profile>
-      <id>spark-1.4</id>
-      <properties>
-        <spark.version>1.4.1</spark.version>
-      </properties>
-
-      <dependencies>
-      </dependencies>
-    </profile>
-
-    <profile>
-      <id>spark-1.5</id>
-      <properties>
-        <spark.version>1.5.2</spark.version>
-        <akka.group>com.typesafe.akka</akka.group>
-        <akka.version>2.3.11</akka.version>
-        <protobuf.version>2.5.0</protobuf.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>spark-1.6</id>
-      <properties>
-        <spark.version>1.6.3</spark.version>
-        <spark.py4j.version>0.9</spark.py4j.version>
-        <akka.group>com.typesafe.akka</akka.group>
-        <akka.version>2.3.11</akka.version>
-        <protobuf.version>2.5.0</protobuf.version>
-      </properties>
-    </profile>
-
-    <profile>
-      <id>spark-2.0</id>
-      <properties>
-        <spark.version>2.0.2</spark.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <spark.py4j.version>0.10.3</spark.py4j.version>
-      </properties>
-    </profile>
+    <name>spark-parent</name>
+    <description>Zeppelin spark support</description>
+
+    <properties>
+        <!--library versions-->
+        <datanucleus.rdbms.version>3.2.9</datanucleus.rdbms.version>
+        <datanucleus.apijdo.version>3.2.6</datanucleus.apijdo.version>
+        <datanucleus.core.version>3.2.10</datanucleus.core.version>
+
+        <!--plugin versions-->
+        <plugin.shade.version>2.4.1</plugin.shade.version>
+        <plugin.scala.version>2.15.2</plugin.scala.version>
+        <!-- spark versions -->
+        <spark.version>2.2.0</spark.version>
+        <py4j.version>0.10.4</py4j.version>
+    </properties>
 
-    <profile>
-      <id>spark-2.1</id>
-      <properties>
-        <spark.version>2.1.0</spark.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <spark.py4j.version>0.10.4</spark.py4j.version>
-        <scala.version>2.11.8</scala.version>
-      </properties>
-    </profile>
+    <dependencies>
 
-    <profile>
-      <id>spark-2.2</id>
-      <activation>
-        <activeByDefault>true</activeByDefault>
-      </activation>
-      <properties>
-        <spark.version>2.2.0</spark.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <spark.py4j.version>0.10.4</spark.py4j.version>
-      </properties>
-    </profile>
+        <dependency>
+            <groupId>org.apache.zeppelin</groupId>
+            <artifactId>zeppelin-interpreter</artifactId>
+            <version>${project.version}</version>
+        </dependency>
 
-    <profile>
-      <id>hadoop-0.23</id>
-      <!-- SPARK-1121: Adds an explicit dependency on Avro to work around a
-        Hadoop 0.23.X issue -->
-      <dependencies>
+        <!--test libraries-->
         <dependency>
-          <groupId>org.apache.avro</groupId>
-          <artifactId>avro</artifactId>
+            <groupId>org.apache.zeppelin</groupId>
+            <artifactId>zeppelin-display</artifactId>
+            <version>${project.version}</version>
+            <scope>test</scope>
         </dependency>
-      </dependencies>
-      <properties>
-        <hadoop.version>0.23.10</hadoop.version>
-      </properties>
-    </profile>
 
-    <profile>
-      <id>hadoop-1</id>
-      <properties>
-        <hadoop.version>1.0.4</hadoop.version>
-        <avro.mapred.classifier>hadoop1</avro.mapred.classifier>
-        <codehaus.jackson.version>1.8.8</codehaus.jackson.version>
-        <akka.group>org.spark-project.akka</akka.group>
-      </properties>
-    </profile>
+        <dependency>
+            <groupId>org.scalatest</groupId>
+            <artifactId>scalatest_${scala.binary.version}</artifactId>
+            <version>${scalatest.version}</version>
+            <scope>test</scope>
+        </dependency>
 
-    <profile>
-      <id>hadoop-2.2</id>
-      <properties>
-        <hadoop.version>2.2.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
 
-    <profile>
-      <id>hadoop-2.3</id>
-      <properties>
-        <hadoop.version>2.3.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.3</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
+        <dependency>
+            <groupId>org.datanucleus</groupId>
+            <artifactId>datanucleus-core</artifactId>
+            <version>${datanucleus.core.version}</version>
+            <scope>test</scope>
+        </dependency>
 
-    <profile>
-      <id>hadoop-2.4</id>
-      <properties>
-        <hadoop.version>2.4.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.3</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
+        <dependency>
+            <groupId>org.datanucleus</groupId>
+            <artifactId>datanucleus-api-jdo</artifactId>
+            <version>${datanucleus.apijdo.version}</version>
+            <scope>test</scope>
+        </dependency>
 
-    <profile>
-      <id>hadoop-2.6</id>
-      <properties>
-        <hadoop.version>2.6.0</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.3</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
+        <dependency>
+            <groupId>org.datanucleus</groupId>
+            <artifactId>datanucleus-rdbms</artifactId>
+            <version>${datanucleus.rdbms.version}</version>
+            <scope>test</scope>
+        </dependency>
 
-    <profile>
-      <id>hadoop-2.7</id>
-      <properties>
-        <hadoop.version>2.7.2</hadoop.version>
-        <protobuf.version>2.5.0</protobuf.version>
-        <jets3t.version>0.9.0</jets3t.version>
-        <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
-      </properties>
-    </profile>
-  </profiles>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <artifactId>maven-enforcer-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>enforce</id>
+                        <phase>none</phase>
+                    </execution>
+                </executions>
+            </plugin>
+
+            <plugin>
+                <groupId>org.scalatest</groupId>
+                <artifactId>scalatest-maven-plugin</artifactId>
+                <configuration>
+                    
<reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
+                    <junitxml>.</junitxml>
+                    <filereports>WDF TestSuite.txt</filereports>
+                </configuration>
+                <executions>
+                    <execution>
+                        <id>test</id>
+                        <goals>
+                            <goal>test</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+
+            <plugin>
+                <groupId>net.alchim31.maven</groupId>
+                <artifactId>scala-maven-plugin</artifactId>
+                <version>3.2.2</version>
+                <executions>
+                    <execution>
+                        <id>eclipse-add-source</id>
+                        <goals>
+                            <goal>add-source</goal>
+                        </goals>
+                    </execution>
+                    <execution>
+                        <id>scala-compile-first</id>
+                        <phase>process-resources</phase>
+                        <goals>
+                            <goal>compile</goal>
+                        </goals>
+                    </execution>
+                    <execution>
+                        <id>scala-test-compile-first</id>
+                        <phase>process-test-resources</phase>
+                        <goals>
+                            <goal>testCompile</goal>
+                        </goals>
+                    </execution>
+                </executions>
+                <configuration>
+                    <scalaVersion>${scala.compile.version}</scalaVersion>
+                    <!--<recompileMode>incremental</recompileMode>-->
+                    <!--<useZincServer>true</useZincServer>-->
+                    <args>
+                        <arg>-unchecked</arg>
+                        <arg>-deprecation</arg>
+                        <arg>-feature</arg>
+                    </args>
+                    <jvmArgs>
+                        <jvmArg>-Xms1024m</jvmArg>
+                        <jvmArg>-Xmx1024m</jvmArg>
+                        <jvmArg>-XX:PermSize=${PermGen}</jvmArg>
+                        <jvmArg>-XX:MaxPermSize=${MaxPermGen}</jvmArg>
+                    </jvmArgs>
+                    <javacArgs>
+                        <javacArg>-source</javacArg>
+                        <javacArg>${java.version}</javacArg>
+                        <javacArg>-target</javacArg>
+                        <javacArg>${java.version}</javacArg>
+                        <javacArg>-Xlint:all,-serial,-path,-options</javacArg>
+                    </javacArgs>
+                </configuration>
+            </plugin>
+
+        </plugins>
+    </build>
+
+
+    <profiles>
+        <profile>
+            <id>spark-2.2</id>
+            <activation>
+                <activeByDefault>true</activeByDefault>
+            </activation>
+            <properties>
+                <spark.version>2.2.0</spark.version>
+                <py4j.version>0.10.4</py4j.version>
+            </properties>
+        </profile>
+
+        <profile>
+            <id>spark-2.1</id>
+            <properties>
+                <spark.version>2.1.0</spark.version>
+                <py4j.version>0.10.4</py4j.version>
+            </properties>
+        </profile>
+
+        <profile>
+            <id>spark-2.0</id>
+            <properties>
+                <spark.version>2.0.2</spark.version>
+                <py4j.version>0.10.3</py4j.version>
+            </properties>
+        </profile>
+
+        <profile>
+            <id>spark-1.6</id>
+            <properties>
+                <spark.version>1.6.3</spark.version>
+                <py4j.version>0.9</py4j.version>
+            </properties>
+        </profile>
+
+        <profile>
+            <id>spark-1.5</id>
+            <properties>
+                <spark.version>1.5.2</spark.version>
+                <py4j.version>0.8.2.1</py4j.version>
+            </properties>
+        </profile>
+
+        <profile>
+            <id>spark-1.4</id>
+            <properties>
+                <spark.version>1.4.1</spark.version>
+                <py4j.version>0.8.2.1</py4j.version>
+            </properties>
+        </profile>
+
+    </profiles>
 </project>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/d762b528/spark/scala-2.10/pom.xml
----------------------------------------------------------------------
diff --git a/spark/scala-2.10/pom.xml b/spark/scala-2.10/pom.xml
new file mode 100644
index 0000000..e32e620
--- /dev/null
+++ b/spark/scala-2.10/pom.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.zeppelin</groupId>
+  <artifactId>spark-scala-2.10</artifactId>
+  <version>0.9.0-SNAPSHOT</version>
+  <packaging>jar</packaging>
+  <name>Spark Interpreter: Scala_2.10</name>
+
+  <parent>
+    <groupId>org.apache.zeppelin</groupId>
+    <artifactId>spark-scala-parent</artifactId>
+    <version>0.9.0-SNAPSHOT</version>
+    <relativePath>../spark-scala-parent/pom.xml</relativePath>
+  </parent>
+
+  <properties>
+    <scala.version>2.10.5</scala.version>
+    <scala.binary.version>2.10</scala.binary.version>
+    <scala.compile.version>${scala.version}</scala.compile.version>
+  </properties>
+
+</project>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/d762b528/spark/scala-2.10/spark-scala-parent
----------------------------------------------------------------------
diff --git a/spark/scala-2.10/spark-scala-parent 
b/spark/scala-2.10/spark-scala-parent
new file mode 120000
index 0000000..e5e899e
--- /dev/null
+++ b/spark/scala-2.10/spark-scala-parent
@@ -0,0 +1 @@
+../spark-scala-parent
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/d762b528/spark/scala-2.10/src/main/scala/org/apache/zeppelin/spark/SparkScala210Interpreter.scala
----------------------------------------------------------------------
diff --git 
a/spark/scala-2.10/src/main/scala/org/apache/zeppelin/spark/SparkScala210Interpreter.scala
 
b/spark/scala-2.10/src/main/scala/org/apache/zeppelin/spark/SparkScala210Interpreter.scala
new file mode 100644
index 0000000..43aa864
--- /dev/null
+++ 
b/spark/scala-2.10/src/main/scala/org/apache/zeppelin/spark/SparkScala210Interpreter.scala
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark
+
+import java.io.File
+import java.nio.file.{Files, Paths}
+
+import org.apache.spark.SparkConf
+import org.apache.spark.repl.SparkILoop
+import org.apache.spark.repl.SparkILoop._
+import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion
+import org.apache.zeppelin.interpreter.util.InterpreterOutputStream
+import org.apache.zeppelin.interpreter.{InterpreterContext, InterpreterResult}
+import org.slf4j.{Logger, LoggerFactory}
+
+import scala.tools.nsc.Settings
+import scala.tools.nsc.interpreter._
+
+/**
+  * SparkInterpreter for scala-2.10
+  */
+class SparkScala210Interpreter(override val conf: SparkConf,
+                               override val depFiles: java.util.List[String])
+  extends BaseSparkScalaInterpreter(conf, depFiles) {
+
+  lazy override val LOGGER: Logger = LoggerFactory.getLogger(getClass)
+
+  private var sparkILoop: SparkILoop = _
+
+  override val interpreterOutput =
+    new 
InterpreterOutputStream(LoggerFactory.getLogger(classOf[SparkScala210Interpreter]))
+
+  override def open(): Unit = {
+    super.open()
+    // redirect the output of open to InterpreterOutputStream, so that user 
can have more
+    // diagnose info in frontend
+    if (InterpreterContext.get() != null) {
+      interpreterOutput.setInterpreterOutput(InterpreterContext.get().out)
+    }
+    val rootDir = conf.get("spark.repl.classdir", 
System.getProperty("java.io.tmpdir"))
+    val outputDir = Files.createTempDirectory(Paths.get(rootDir), 
"spark").toFile
+    outputDir.deleteOnExit()
+    conf.set("spark.repl.class.outputDir", outputDir.getAbsolutePath)
+    // Only Spark1 requires to create http server, Spark2 removes HttpServer 
class.
+    startHttpServer(outputDir).foreach { case (server, uri) =>
+      sparkHttpServer = server
+      conf.set("spark.repl.class.uri", uri)
+    }
+
+    val settings = new Settings()
+    settings.embeddedDefaults(Thread.currentThread().getContextClassLoader())
+    settings.usejavacp.value = true
+    settings.classpath.value = getUserJars.mkString(File.pathSeparator)
+    Console.setOut(interpreterOutput)
+    sparkILoop = new SparkILoop(null, new JPrintWriter(Console.out, true))
+
+    setDeclaredField(sparkILoop, "settings", settings)
+    callMethod(sparkILoop, "createInterpreter")
+    sparkILoop.initializeSynchronous()
+    callMethod(sparkILoop, "postInitialization")
+    val reader = callMethod(sparkILoop,
+      "org$apache$spark$repl$SparkILoop$$chooseReader",
+      Array(settings.getClass), 
Array(settings)).asInstanceOf[InteractiveReader]
+    setDeclaredField(sparkILoop, "org$apache$spark$repl$SparkILoop$$in", 
reader)
+    scalaCompleter = reader.completion.completer()
+
+    createSparkContext()
+  }
+
+  override def close(): Unit = {
+    super.close()
+    if (sparkILoop != null) {
+      callMethod(sparkILoop, 
"org$apache$spark$repl$SparkILoop$$closeInterpreter")
+    }
+  }
+
+  protected override def interpret(code: String, context: InterpreterContext): 
InterpreterResult = {
+    if (context != null) {
+      interpreterOutput.setInterpreterOutput(context.out)
+      context.out.clear()
+    } else {
+      interpreterOutput.setInterpreterOutput(null)
+    }
+
+    Console.withOut(if (context != null) context.out else Console.out) {
+      interpreterOutput.ignoreLeadingNewLinesFromScalaReporter()
+      // add print("") at the end in case the last line is comment which lead 
to INCOMPLETE
+      val lines = code.split("\\n") ++ List("print(\"\")")
+      var incompleteCode = ""
+      var lastStatus: InterpreterResult.Code = null
+      for (line <- lines if !line.trim.isEmpty) {
+        val nextLine = if (incompleteCode != "") {
+          incompleteCode + "\n" + line
+        } else {
+          line
+        }
+        scalaInterpret(nextLine) match {
+          case scala.tools.nsc.interpreter.IR.Success =>
+            // continue the next line
+            incompleteCode = ""
+            lastStatus = InterpreterResult.Code.SUCCESS
+          case er...@scala.tools.nsc.interpreter.ir.Error =>
+            return new InterpreterResult(InterpreterResult.Code.ERROR)
+          case scala.tools.nsc.interpreter.IR.Incomplete =>
+            // put this line into inCompleteCode for the next execution.
+            incompleteCode = incompleteCode + "\n" + line
+            lastStatus = InterpreterResult.Code.INCOMPLETE
+        }
+      }
+      // flush all output before returning result to frontend
+      Console.flush()
+      interpreterOutput.setInterpreterOutput(null)
+      return new InterpreterResult(lastStatus)
+    }
+  }
+
+  def scalaInterpret(code: String): scala.tools.nsc.interpreter.IR.Result =
+    sparkILoop.interpret(code)
+
+  protected def bind(name: String, tpe: String, value: Object, modifier: 
List[String]): Unit = {
+    sparkILoop.beQuietDuring {
+      sparkILoop.bind(name, tpe, value, modifier)
+    }
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/d762b528/spark/scala-2.11/pom.xml
----------------------------------------------------------------------
diff --git a/spark/scala-2.11/pom.xml b/spark/scala-2.11/pom.xml
new file mode 100644
index 0000000..d9113d1
--- /dev/null
+++ b/spark/scala-2.11/pom.xml
@@ -0,0 +1,41 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>org.apache.zeppelin</groupId>
+  <artifactId>spark-scala-2.11</artifactId>
+  <version>0.9.0-SNAPSHOT</version>
+  <packaging>jar</packaging>
+  <name>Spark Interpreter: Scala_2.11</name>
+
+  <parent>
+    <groupId>org.apache.zeppelin</groupId>
+    <artifactId>spark-scala-parent</artifactId>
+    <version>0.9.0-SNAPSHOT</version>
+    <relativePath>../spark-scala-parent/pom.xml</relativePath>
+  </parent>
+
+  <properties>
+    <scala.version>2.11.8</scala.version>
+    <scala.binary.version>2.11</scala.binary.version>
+    <scala.compile.version>${scala.version}</scala.compile.version>
+  </properties>
+
+</project>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/d762b528/spark/scala-2.11/spark-scala-parent
----------------------------------------------------------------------
diff --git a/spark/scala-2.11/spark-scala-parent 
b/spark/scala-2.11/spark-scala-parent
new file mode 120000
index 0000000..e5e899e
--- /dev/null
+++ b/spark/scala-2.11/spark-scala-parent
@@ -0,0 +1 @@
+../spark-scala-parent
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/d762b528/spark/scala-2.11/src/main/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/spark/scala-2.11/src/main/resources/log4j.properties 
b/spark/scala-2.11/src/main/resources/log4j.properties
new file mode 100644
index 0000000..0c90b21
--- /dev/null
+++ b/spark/scala-2.11/src/main/resources/log4j.properties
@@ -0,0 +1,50 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Direct log messages to stdout
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target=System.out
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{ABSOLUTE} %5p %c:%L - %m%n
+#log4j.appender.stdout.layout.ConversionPattern=
+#%5p [%t] (%F:%L) - %m%n
+#%-4r [%t] %-5p %c %x - %m%n
+#
+
+# Root logger option
+log4j.rootLogger=INFO, stdout
+ 
+#mute some noisy guys
+log4j.logger.org.apache.hadoop.mapred=WARN
+log4j.logger.org.apache.hadoop.hive.ql=WARN
+log4j.logger.org.apache.hadoop.hive.metastore=WARN
+log4j.logger.org.apache.haadoop.hive.service.HiveServer=WARN
+log4j.logger.org.apache.zeppelin.scheduler=WARN
+
+log4j.logger.org.quartz=WARN
+log4j.logger.DataNucleus=WARN
+log4j.logger.DataNucleus.MetaData=ERROR
+log4j.logger.DataNucleus.Datastore=ERROR
+
+# Log all JDBC parameters
+log4j.logger.org.hibernate.type=ALL
+
+log4j.logger.org.apache.zeppelin.interpreter=DEBUG
+log4j.logger.org.apache.zeppelin.spark=DEBUG
+
+
+log4j.logger.org.apache.spark.repl.Main=INFO

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/d762b528/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
----------------------------------------------------------------------
diff --git 
a/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
 
b/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
new file mode 100644
index 0000000..e145260
--- /dev/null
+++ 
b/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark
+
+import java.io.{BufferedReader, File}
+import java.net.URLClassLoader
+import java.nio.file.{Files, Paths}
+
+import org.apache.spark.SparkConf
+import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion
+import org.apache.zeppelin.interpreter.util.InterpreterOutputStream
+import org.apache.zeppelin.interpreter.{InterpreterContext, InterpreterResult}
+import org.slf4j.LoggerFactory
+import org.slf4j.Logger
+
+import scala.tools.nsc.Settings
+import scala.tools.nsc.interpreter._
+
+/**
+  * SparkInterpreter for scala-2.11
+  */
+class SparkScala211Interpreter(override val conf: SparkConf,
+                               override val depFiles: java.util.List[String])
+  extends BaseSparkScalaInterpreter(conf, depFiles) {
+
+  lazy override val LOGGER: Logger = LoggerFactory.getLogger(getClass)
+
+  private var sparkILoop: ILoop = _
+
+  override val interpreterOutput = new InterpreterOutputStream(LOGGER)
+
+  override def open(): Unit = {
+    super.open()
+    if (conf.get("spark.master", "local") == "yarn-client") {
+      System.setProperty("SPARK_YARN_MODE", "true")
+    }
+    // Only Spark1 requires to create http server, Spark2 removes HttpServer 
class.
+    val rootDir = conf.get("spark.repl.classdir", 
System.getProperty("java.io.tmpdir"))
+    val outputDir = Files.createTempDirectory(Paths.get(rootDir), 
"spark").toFile
+    outputDir.deleteOnExit()
+    conf.set("spark.repl.class.outputDir", outputDir.getAbsolutePath)
+    startHttpServer(outputDir).foreach { case (server, uri) =>
+      sparkHttpServer = server
+      conf.set("spark.repl.class.uri", uri)
+    }
+
+    val settings = new Settings()
+    settings.processArguments(List("-Yrepl-class-based",
+      "-Yrepl-outdir", s"${outputDir.getAbsolutePath}"), true)
+    settings.embeddedDefaults(Thread.currentThread().getContextClassLoader())
+    settings.usejavacp.value = true
+    settings.classpath.value = getUserJars.mkString(File.pathSeparator)
+
+    val replOut = new JPrintWriter(interpreterOutput, true)
+    sparkILoop = new ILoop(None, replOut)
+    sparkILoop.settings = settings
+    sparkILoop.createInterpreter()
+
+    val in0 = getField(sparkILoop, 
"scala$tools$nsc$interpreter$ILoop$$in0").asInstanceOf[Option[BufferedReader]]
+    val reader = in0.fold(sparkILoop.chooseReader(settings))(r => 
SimpleReader(r, replOut, interactive = true))
+
+    sparkILoop.in = reader
+    sparkILoop.initializeSynchronous()
+    callMethod(sparkILoop, "scala$tools$nsc$interpreter$ILoop$$loopPostInit")
+    this.scalaCompleter = reader.completion.completer()
+
+    createSparkContext()
+  }
+
+  protected def bind(name: String, tpe: String, value: Object, modifier: 
List[String]): Unit = {
+    sparkILoop.beQuietDuring {
+      sparkILoop.bind(name, tpe, value, modifier)
+    }
+  }
+
+
+  override def close(): Unit = {
+    super.close()
+    if (sparkILoop != null) {
+      sparkILoop.closeInterpreter()
+    }
+  }
+
+  protected override def interpret(code: String, context: InterpreterContext): 
InterpreterResult = {
+    if (context != null) {
+      interpreterOutput.setInterpreterOutput(context.out)
+      context.out.clear()
+    }
+
+    Console.withOut(if (context != null) context.out else Console.out) {
+      interpreterOutput.ignoreLeadingNewLinesFromScalaReporter()
+      // add print("") at the end in case the last line is comment which lead 
to INCOMPLETE
+      val lines = code.split("\\n") ++ List("print(\"\")")
+      var incompleteCode = ""
+      var lastStatus: InterpreterResult.Code = null
+      for (line <- lines if !line.trim.isEmpty) {
+        val nextLine = if (incompleteCode != "") {
+          incompleteCode + "\n" + line
+        } else {
+          line
+        }
+        scalaInterpret(nextLine) match {
+          case scala.tools.nsc.interpreter.IR.Success =>
+            // continue the next line
+            incompleteCode = ""
+            lastStatus = InterpreterResult.Code.SUCCESS
+          case er...@scala.tools.nsc.interpreter.ir.Error =>
+            return new InterpreterResult(InterpreterResult.Code.ERROR)
+          case scala.tools.nsc.interpreter.IR.Incomplete =>
+            // put this line into inCompleteCode for the next execution.
+            incompleteCode = incompleteCode + "\n" + line
+            lastStatus = InterpreterResult.Code.INCOMPLETE
+        }
+      }
+      // flush all output before returning result to frontend
+      Console.flush()
+      interpreterOutput.setInterpreterOutput(null)
+      return new InterpreterResult(lastStatus)
+    }
+  }
+
+  def scalaInterpret(code: String): scala.tools.nsc.interpreter.IR.Result =
+    sparkILoop.interpret(code)
+
+}

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/d762b528/spark/spark-dependencies/pom.xml
----------------------------------------------------------------------
diff --git a/spark/spark-dependencies/pom.xml b/spark/spark-dependencies/pom.xml
new file mode 100644
index 0000000..7643dc9
--- /dev/null
+++ b/spark/spark-dependencies/pom.xml
@@ -0,0 +1,591 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <artifactId>spark-parent</artifactId>
+    <groupId>org.apache.zeppelin</groupId>
+    <version>0.9.0-SNAPSHOT</version>
+    <relativePath>..</relativePath>
+  </parent>
+
+  <groupId>org.apache.zeppelin</groupId>
+  <artifactId>zeppelin-spark-dependencies_2.10</artifactId>
+  <packaging>jar</packaging>
+  <version>0.9.0-SNAPSHOT</version>
+  <name>Zeppelin: Spark dependencies</name>
+  <description>Zeppelin spark support</description>
+
+  <properties>
+    <!-- library version defined in this section brought from spark 1.4.1 and 
it's dependency.
+         Therefore changing only spark.version is not going to be enough when 
this module
+         support new version of spark to make the new version as default 
supported version.
+
+         Each profile (spark-2.0, spark-1.6, etc) will overrides necessary 
dependency version.
+         So we'll make one of those profile 'activateByDefault' to make it 
default supported version
+         instead of changing spark.version in this section.
+    -->
+
+    <hadoop.version>2.3.0</hadoop.version>
+    <yarn.version>${hadoop.version}</yarn.version>
+    <avro.version>1.7.7</avro.version>
+    <avro.mapred.classifier/>
+    <jets3t.version>0.7.1</jets3t.version>
+    <protobuf.version>2.4.1</protobuf.version>
+
+    <akka.group>org.spark-project.akka</akka.group>
+    <akka.version>2.3.4-spark</akka.version>
+
+    <spark.archive>spark-${spark.version}</spark.archive>
+    <spark.src.download.url>
+      http://d3kbcqa49mib13.cloudfront.net/${spark.archive}.tgz
+    </spark.src.download.url>
+    <spark.bin.download.url>
+      
http://d3kbcqa49mib13.cloudfront.net/${spark.archive}-bin-without-hadoop.tgz
+    </spark.bin.download.url>
+
+    <!--plugin versions-->
+    <plugin.shade.version>2.3</plugin.shade.version>
+  </properties>
+
+  <dependencyManagement>
+    <dependencies>
+      <dependency>
+        <groupId>org.apache.avro</groupId>
+        <artifactId>avro</artifactId>
+        <version>${avro.version}</version>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.avro</groupId>
+        <artifactId>avro-ipc</artifactId>
+        <version>${avro.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.mortbay.jetty</groupId>
+            <artifactId>jetty</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.mortbay.jetty</groupId>
+            <artifactId>jetty-util</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.mortbay.jetty</groupId>
+            <artifactId>servlet-api</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.apache.velocity</groupId>
+            <artifactId>velocity</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.avro</groupId>
+        <artifactId>avro-mapred</artifactId>
+        <version>${avro.version}</version>
+        <classifier>${avro.mapred.classifier}</classifier>
+        <exclusions>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.mortbay.jetty</groupId>
+            <artifactId>jetty</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.mortbay.jetty</groupId>
+            <artifactId>jetty-util</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.mortbay.jetty</groupId>
+            <artifactId>servlet-api</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.apache.velocity</groupId>
+            <artifactId>velocity</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+
+      <!-- See SPARK-1556 for info on this dependency: -->
+      <dependency>
+        <groupId>net.java.dev.jets3t</groupId>
+        <artifactId>jets3t</artifactId>
+        <version>${jets3t.version}</version>
+        <scope>runtime</scope>
+        <exclusions>
+          <exclusion>
+            <groupId>commons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-api</artifactId>
+        <version>${yarn.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>asm</groupId>
+            <artifactId>asm</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.ow2.asm</groupId>
+            <artifactId>asm</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.jboss.netty</groupId>
+            <artifactId>netty</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>commons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-common</artifactId>
+        <version>${yarn.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>asm</groupId>
+            <artifactId>asm</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.ow2.asm</groupId>
+            <artifactId>asm</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.jboss.netty</groupId>
+            <artifactId>netty</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>javax.servlet</groupId>
+            <artifactId>servlet-api</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>commons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-server-web-proxy</artifactId>
+        <version>${yarn.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>asm</groupId>
+            <artifactId>asm</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.ow2.asm</groupId>
+            <artifactId>asm</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.jboss.netty</groupId>
+            <artifactId>netty</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>javax.servlet</groupId>
+            <artifactId>servlet-api</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>commons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-client</artifactId>
+        <version>${yarn.version}</version>
+        <exclusions>
+          <exclusion>
+            <groupId>asm</groupId>
+            <artifactId>asm</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.ow2.asm</groupId>
+            <artifactId>asm</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.jboss.netty</groupId>
+            <artifactId>netty</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>javax.servlet</groupId>
+            <artifactId>servlet-api</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>commons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+    </dependencies>
+  </dependencyManagement>
+
+  <dependencies>
+    <!-- Spark -->
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-core_${scala.binary.version}</artifactId>
+      <version>${spark.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-repl_${scala.binary.version}</artifactId>
+      <version>${spark.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-sql_${scala.binary.version}</artifactId>
+      <version>${spark.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-hive_${scala.binary.version}</artifactId>
+      <version>${spark.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-streaming_${scala.binary.version}</artifactId>
+      <version>${spark.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-catalyst_${scala.binary.version}</artifactId>
+      <version>${spark.version}</version>
+    </dependency>
+
+    <!-- hadoop -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-client</artifactId>
+      <version>${hadoop.version}</version>
+    </dependency>
+
+
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <version>${protobuf.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>${akka.group}</groupId>
+      <artifactId>akka-actor_${scala.binary.version}</artifactId>
+      <version>${akka.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${akka.group}</groupId>
+      <artifactId>akka-remote_${scala.binary.version}</artifactId>
+      <version>${akka.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${akka.group}</groupId>
+      <artifactId>akka-slf4j_${scala.binary.version}</artifactId>
+      <version>${akka.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${akka.group}</groupId>
+      <artifactId>akka-testkit_${scala.binary.version}</artifactId>
+      <version>${akka.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>${akka.group}</groupId>
+      <artifactId>akka-zeromq_${scala.binary.version}</artifactId>
+      <version>${akka.version}</version>
+      <exclusions>
+        <exclusion>
+          <groupId>${akka.group}</groupId>
+          <artifactId>akka-actor_${scala.binary.version}</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+
+    <!-- yarn (not supported for Spark v1.5.0 or higher) -->
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-yarn_${scala.binary.version}</artifactId>
+      <version>${spark.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-api</artifactId>
+      <version>${yarn.version}</version>
+    </dependency>
+
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-enforcer-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>enforce</id>
+            <phase>none</phase>
+          </execution>
+        </executions>
+      </plugin>
+
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <configuration>
+          <forkCount>1</forkCount>
+          <reuseForks>false</reuseForks>
+          <argLine>-Xmx1024m -XX:MaxPermSize=256m</argLine>
+        </configuration>
+      </plugin>
+
+      <plugin>
+        <groupId>com.googlecode.maven-download-plugin</groupId>
+        <artifactId>download-maven-plugin</artifactId>
+        <version>${plugin.download.version}</version>
+      </plugin>
+
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <version>${plugin.shade.version}</version>
+        <configuration>
+          <filters>
+            <filter>
+              <artifact>*:*</artifact>
+              <excludes>
+                <exclude>org/datanucleus/**</exclude>
+                <exclude>META-INF/*.SF</exclude>
+                <exclude>META-INF/*.DSA</exclude>
+                <exclude>META-INF/*.RSA</exclude>
+              </excludes>
+            </filter>
+          </filters>
+          <transformers>
+            <transformer 
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
+            <transformer 
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
+              <resource>reference.conf</resource>
+            </transformer>
+          </transformers>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+
+      <!-- Deploy datanucleus jars to the interpreter/spark directory -->
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-interpreter-dependencies</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <skip>true</skip>
+            </configuration>
+          </execution>
+
+          <execution>
+            <id>copy-spark-interpreter-dependencies</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              
<outputDirectory>${project.build.directory}/../../../interpreter/spark/dep</outputDirectory>
+              <overWriteReleases>false</overWriteReleases>
+              <overWriteSnapshots>false</overWriteSnapshots>
+              <overWriteIfNewer>true</overWriteIfNewer>
+              <includeGroupIds>org.datanucleus</includeGroupIds>
+            </configuration>
+          </execution>
+          <execution>
+            <id>copy-artifact</id>
+            <phase>package</phase>
+            <goals>
+              <goal>copy</goal>
+            </goals>
+            <configuration>
+              
<outputDirectory>${project.build.directory}/../../../interpreter/spark/dep</outputDirectory>
+              <overWriteReleases>false</overWriteReleases>
+              <overWriteSnapshots>false</overWriteSnapshots>
+              <overWriteIfNewer>true</overWriteIfNewer>
+              <artifactItems>
+                <artifactItem>
+                  <groupId>${project.groupId}</groupId>
+                  <artifactId>${project.artifactId}</artifactId>
+                  <version>${project.version}</version>
+                  <type>${project.packaging}</type>
+                </artifactItem>
+              </artifactItems>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+
+      <plugin>
+        <artifactId>maven-resources-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>copy-interpreter-setting</id>
+            <phase>none</phase>
+            <configuration>
+              <skip>true</skip>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+
+      <!-- include pyspark by default -->
+      <plugin>
+        <groupId>com.googlecode.maven-download-plugin</groupId>
+        <artifactId>download-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>download-pyspark-files</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>wget</goal>
+            </goals>
+            <configuration>
+              <readTimeOut>60000</readTimeOut>
+              <retries>5</retries>
+              <unpack>true</unpack>
+              <url>${spark.src.download.url}</url>
+              <outputDirectory>${project.build.directory}</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+
+      <plugin>
+        <artifactId>maven-clean-plugin</artifactId>
+        <configuration>
+          <filesets>
+            <fileset>
+              <directory>${basedir}/../python/build</directory>
+            </fileset>
+          </filesets>
+        </configuration>
+      </plugin>
+
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>zip-pyspark-files</id>
+            <phase>generate-resources</phase>
+            <goals>
+              <goal>run</goal>
+            </goals>
+            <configuration>
+              <target>
+                <delete dir="../../interpreter/spark/pyspark"/>
+                <copy todir="../../interpreter/spark/pyspark"
+                      
file="${project.build.directory}/${spark.archive}/python/lib/py4j-${py4j.version}-src.zip"/>
+                <zip 
destfile="${project.build.directory}/../../../interpreter/spark/pyspark/pyspark.zip"
+                     
basedir="${project.build.directory}/${spark.archive}/python"
+                     includes="pyspark/*.py,pyspark/**/*.py"/>
+              </target>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+
+      <!-- include sparkr by default -->
+      <plugin>
+        <groupId>com.googlecode.maven-download-plugin</groupId>
+        <artifactId>download-maven-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>download-sparkr-files</id>
+            <phase>validate</phase>
+            <goals>
+              <goal>wget</goal>
+            </goals>
+            <configuration>
+              <readTimeOut>60000</readTimeOut>
+              <retries>5</retries>
+              <url>${spark.bin.download.url}</url>
+              <unpack>true</unpack>
+              <outputDirectory>${project.build.directory}</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-resources-plugin</artifactId>
+        <version>2.7</version>
+        <executions>
+          <execution>
+            <id>copy-sparkr-files</id>
+            <phase>generate-resources</phase>
+            <goals>
+              <goal>copy-resources</goal>
+            </goals>
+            <configuration>
+              
<outputDirectory>${project.build.directory}/../../../interpreter/spark/R/lib</outputDirectory>
+              <resources>
+                <resource>
+                  <directory>
+                    
${project.build.directory}/spark-${spark.version}-bin-without-hadoop/R/lib
+                  </directory>
+                </resource>
+              </resources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/d762b528/spark/spark-scala-parent/pom.xml
----------------------------------------------------------------------
diff --git a/spark/spark-scala-parent/pom.xml b/spark/spark-scala-parent/pom.xml
new file mode 100644
index 0000000..830fa59
--- /dev/null
+++ b/spark/spark-scala-parent/pom.xml
@@ -0,0 +1,172 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+
+    <parent>
+        <artifactId>spark-parent</artifactId>
+        <groupId>org.apache.zeppelin</groupId>
+        <version>0.9.0-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+
+    <modelVersion>4.0.0</modelVersion>
+    <groupId>org.apache.zeppelin</groupId>
+    <artifactId>spark-scala-parent</artifactId>
+    <version>0.9.0-SNAPSHOT</version>
+    <packaging>pom</packaging>
+
+    <dependencies>
+
+        <dependency>
+            <groupId>org.apache.zeppelin</groupId>
+            <artifactId>zeppelin-interpreter</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.spark</groupId>
+            <artifactId>spark-repl_${scala.binary.version}</artifactId>
+            <version>${spark.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.spark</groupId>
+            <artifactId>spark-core_${scala.binary.version}</artifactId>
+            <version>${spark.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.spark</groupId>
+            <artifactId>spark-hive_${scala.binary.version}</artifactId>
+            <version>${spark.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.scala-lang</groupId>
+            <artifactId>scala-compiler</artifactId>
+            <version>${scala.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.scala-lang</groupId>
+            <artifactId>scala-library</artifactId>
+            <version>${scala.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.scala-lang</groupId>
+            <artifactId>scala-reflect</artifactId>
+            <version>${scala.version}</version>
+            <scope>provided</scope>
+        </dependency>
+
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>add-scala-sources</id>
+                        <phase>generate-sources</phase>
+                        <goals>
+                            <goal>add-source</goal>
+                        </goals>
+                        <configuration>
+                            <sources>
+                                
<source>${project.basedir}/../spark-scala-parent/src/main/scala</source>
+                            </sources>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>add-scala-test-sources</id>
+                        <phase>generate-test-sources</phase>
+                        <goals>
+                            <goal>add-test-source</goal>
+                        </goals>
+                        <configuration>
+                            <sources>
+                                
<source>${project.basedir}/../spark-scala-parent/src/test/scala</source>
+                            </sources>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>add-resource</id>
+                        <phase>generate-resources</phase>
+                        <goals>
+                            <goal>add-resource</goal>
+                        </goals>
+                        <configuration>
+                            <resources>
+                                <resource>
+                                    
<directory>${project.basedir}/../spark-scala-parent/src/main/resources</directory>
+                                </resource>
+                            </resources>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>add-test-resource</id>
+                        <phase>generate-test-resources</phase>
+                        <goals>
+                            <goal>add-test-resource</goal>
+                        </goals>
+                        <configuration>
+                            <resources>
+                                <resource>
+                                    
<directory>${project.basedir}/../spark-scala-parent/src/test/resources</directory>
+                                </resource>
+                            </resources>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+
+            <plugin>
+                <artifactId>maven-dependency-plugin</artifactId>
+                <configuration>
+                    <skip>true</skip>
+                </configuration>
+            </plugin>
+
+            <plugin>
+                <artifactId>maven-resources-plugin</artifactId>
+                <executions>
+                    <execution>
+                    <id>copy-interpreter-setting</id>
+                    <phase>none</phase>
+                    <configuration>
+                        <skip>true</skip>
+                    </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+
+        </plugins>
+    </build>
+
+</project>
\ No newline at end of file

Reply via email to