Repository: zeppelin
Updated Branches:
  refs/heads/master 26a39df08 -> 32517c9d9


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/spark/src/main/resources/python/zeppelin_ipyspark.py
----------------------------------------------------------------------
diff --git a/spark/src/main/resources/python/zeppelin_ipyspark.py 
b/spark/src/main/resources/python/zeppelin_ipyspark.py
new file mode 100644
index 0000000..324f481
--- /dev/null
+++ b/spark/src/main/resources/python/zeppelin_ipyspark.py
@@ -0,0 +1,53 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+
+from py4j.java_gateway import java_import, JavaGateway, GatewayClient
+from pyspark.conf import SparkConf
+from pyspark.context import SparkContext
+
+# for back compatibility
+from pyspark.sql import SQLContext
+
+# start JVM gateway
+client = GatewayClient(port=${JVM_GATEWAY_PORT})
+gateway = JavaGateway(client, auto_convert=True)
+
+java_import(gateway.jvm, "org.apache.spark.SparkEnv")
+java_import(gateway.jvm, "org.apache.spark.SparkConf")
+java_import(gateway.jvm, "org.apache.spark.api.java.*")
+java_import(gateway.jvm, "org.apache.spark.api.python.*")
+java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")
+
+intp = gateway.entry_point
+jsc = intp.getJavaSparkContext()
+
+java_import(gateway.jvm, "org.apache.spark.sql.*")
+java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
+java_import(gateway.jvm, "scala.Tuple2")
+
+jconf = jsc.getConf()
+conf = SparkConf(_jvm=gateway.jvm, _jconf=jconf)
+sc = _zsc_ = SparkContext(jsc=jsc, gateway=gateway, conf=conf)
+
+if intp.isSpark2():
+    from pyspark.sql import SparkSession
+
+    spark = __zSpark__ = SparkSession(sc, intp.getSparkSession())
+    sqlContext = sqlc = __zSqlc__ = __zSpark__._wrapped
+else:
+    sqlContext = sqlc = __zSqlc__ = SQLContext(sparkContext=sc, 
sqlContext=intp.getSQLContext())

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/spark/src/main/sparkr-resources/interpreter-setting.json
----------------------------------------------------------------------
diff --git a/spark/src/main/sparkr-resources/interpreter-setting.json 
b/spark/src/main/sparkr-resources/interpreter-setting.json
index d0fbd3e..300aff0 100644
--- a/spark/src/main/sparkr-resources/interpreter-setting.json
+++ b/spark/src/main/sparkr-resources/interpreter-setting.json
@@ -189,5 +189,16 @@
     "editor": {
       "language": "r"
     }
+  },
+
+  {
+    "group": "spark",
+    "name": "ipyspark",
+    "className": "org.apache.zeppelin.spark.IPySparkInterpreter",
+    "properties": {},
+    "editor": {
+      "language": "python",
+      "editOnDblClick": false
+    }
   }
 ]

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/spark/src/test/java/org/apache/zeppelin/spark/IPySparkInterpreterTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/IPySparkInterpreterTest.java 
b/spark/src/test/java/org/apache/zeppelin/spark/IPySparkInterpreterTest.java
new file mode 100644
index 0000000..5a2e884
--- /dev/null
+++ b/spark/src/test/java/org/apache/zeppelin/spark/IPySparkInterpreterTest.java
@@ -0,0 +1,203 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.spark;
+
+
+import com.google.common.io.Files;
+import org.apache.zeppelin.display.AngularObjectRegistry;
+import org.apache.zeppelin.display.GUI;
+import org.apache.zeppelin.interpreter.Interpreter;
+import org.apache.zeppelin.interpreter.InterpreterContext;
+import org.apache.zeppelin.interpreter.InterpreterContextRunner;
+import org.apache.zeppelin.interpreter.InterpreterGroup;
+import org.apache.zeppelin.interpreter.InterpreterOutput;
+import org.apache.zeppelin.interpreter.InterpreterOutputListener;
+import org.apache.zeppelin.interpreter.InterpreterResult;
+import org.apache.zeppelin.interpreter.InterpreterResultMessage;
+import org.apache.zeppelin.interpreter.InterpreterResultMessageOutput;
+import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion;
+import org.apache.zeppelin.python.IPythonInterpreterTest;
+import org.apache.zeppelin.resource.LocalResourcePool;
+import org.apache.zeppelin.user.AuthenticationInfo;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Properties;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+public class IPySparkInterpreterTest {
+
+  private IPySparkInterpreter iPySparkInterpreter;
+  private InterpreterGroup intpGroup;
+
+  @Before
+  public void setup() {
+    Properties p = new Properties();
+    p.setProperty("spark.master", "local[4]");
+    p.setProperty("master", "local[4]");
+    p.setProperty("spark.app.name", "Zeppelin Test");
+    p.setProperty("zeppelin.spark.useHiveContext", "true");
+    p.setProperty("zeppelin.spark.maxResult", "1000");
+    p.setProperty("zeppelin.spark.importImplicit", "true");
+    p.setProperty("zeppelin.pyspark.python", "python");
+    p.setProperty("zeppelin.dep.localrepo", 
Files.createTempDir().getAbsolutePath());
+
+    intpGroup = new InterpreterGroup();
+    intpGroup.put("session_1", new LinkedList<Interpreter>());
+
+    SparkInterpreter sparkInterpreter = new SparkInterpreter(p);
+    intpGroup.get("session_1").add(sparkInterpreter);
+    sparkInterpreter.setInterpreterGroup(intpGroup);
+    sparkInterpreter.open();
+
+    iPySparkInterpreter = new IPySparkInterpreter(p);
+    intpGroup.get("session_1").add(iPySparkInterpreter);
+    iPySparkInterpreter.setInterpreterGroup(intpGroup);
+    iPySparkInterpreter.open();
+  }
+
+
+  @After
+  public void tearDown() {
+    if (iPySparkInterpreter != null) {
+      iPySparkInterpreter.close();
+    }
+  }
+
+  @Test
+  public void testBasics() throws InterruptedException, IOException {
+    // all the ipython test should pass too.
+    IPythonInterpreterTest.testInterpreter(iPySparkInterpreter);
+
+    // rdd
+    InterpreterContext context = getInterpreterContext();
+    InterpreterResult result = 
iPySparkInterpreter.interpret("sc.range(1,10).sum()", context);
+    Thread.sleep(100);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    List<InterpreterResultMessage> interpreterResultMessages = 
context.out.getInterpreterResultMessages();
+    assertEquals("45", interpreterResultMessages.get(0).getData());
+
+    context = getInterpreterContext();
+    result = iPySparkInterpreter.interpret("sc.version", context);
+    Thread.sleep(100);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    interpreterResultMessages = context.out.getInterpreterResultMessages();
+    // spark sql
+    context = getInterpreterContext();
+    if (interpreterResultMessages.get(0).getData().startsWith("'1.") ||
+        interpreterResultMessages.get(0).getData().startsWith("u'1.")) {
+      result = iPySparkInterpreter.interpret("df = 
sqlContext.createDataFrame([(1,'a'),(2,'b')])\ndf.show()", context);
+      assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+      interpreterResultMessages = context.out.getInterpreterResultMessages();
+      assertEquals(
+          "+---+---+\n" +
+          "| _1| _2|\n" +
+          "+---+---+\n" +
+          "|  1|  a|\n" +
+          "|  2|  b|\n" +
+          "+---+---+\n\n", interpreterResultMessages.get(0).getData());
+    } else {
+      result = iPySparkInterpreter.interpret("df = 
spark.createDataFrame([(1,'a'),(2,'b')])\ndf.show()", context);
+      assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+      interpreterResultMessages = context.out.getInterpreterResultMessages();
+      assertEquals(
+          "+---+---+\n" +
+          "| _1| _2|\n" +
+          "+---+---+\n" +
+          "|  1|  a|\n" +
+          "|  2|  b|\n" +
+          "+---+---+\n\n", interpreterResultMessages.get(0).getData());
+    }
+
+    // cancel
+    final InterpreterContext context2 = getInterpreterContext();
+
+    Thread thread = new Thread(){
+      @Override
+      public void run() {
+        InterpreterResult result = iPySparkInterpreter.interpret("import 
time\nsc.range(1,10).foreach(lambda x: time.sleep(1))", context2);
+        assertEquals(InterpreterResult.Code.ERROR, result.code());
+        List<InterpreterResultMessage> interpreterResultMessages = null;
+        try {
+          interpreterResultMessages = 
context2.out.getInterpreterResultMessages();
+          
assertTrue(interpreterResultMessages.get(0).getData().contains("cancelled"));
+        } catch (IOException e) {
+          e.printStackTrace();
+        }
+      }
+    };
+    thread.start();
+
+    // sleep 1 second to wait for the spark job starts
+    Thread.sleep(1000);
+    iPySparkInterpreter.cancel(context);
+    thread.join();
+
+    // completions
+    List<InterpreterCompletion> completions = 
iPySparkInterpreter.completion("sc.ran", 6, getInterpreterContext());
+    assertEquals(1, completions.size());
+    assertEquals("sc.range", completions.get(0).getValue());
+
+    // pyspark streaming
+    context = getInterpreterContext();
+    result = iPySparkInterpreter.interpret(
+        "from pyspark.streaming import StreamingContext\n" +
+        "import time\n" +
+        "ssc = StreamingContext(sc, 1)\n" +
+        "rddQueue = []\n" +
+        "for i in range(5):\n" +
+        "    rddQueue += [ssc.sparkContext.parallelize([j for j in range(1, 
1001)], 10)]\n" +
+        "inputStream = ssc.queueStream(rddQueue)\n" +
+        "mappedStream = inputStream.map(lambda x: (x % 10, 1))\n" +
+        "reducedStream = mappedStream.reduceByKey(lambda a, b: a + b)\n" +
+        "reducedStream.pprint()\n" +
+        "ssc.start()\n" +
+        "time.sleep(6)\n" +
+        "ssc.stop(stopSparkContext=False, stopGraceFully=True)", context);
+    Thread.sleep(100);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    interpreterResultMessages = context.out.getInterpreterResultMessages();
+    assertEquals(1, interpreterResultMessages.size());
+    assertTrue(interpreterResultMessages.get(0).getData().contains("(0, 
100)"));
+  }
+
+  private InterpreterContext getInterpreterContext() {
+    return new InterpreterContext(
+        "noteId",
+        "paragraphId",
+        "replName",
+        "paragraphTitle",
+        "paragraphText",
+        new AuthenticationInfo(),
+        new HashMap<String, Object>(),
+        new GUI(),
+        null,
+        null,
+        null,
+        new InterpreterOutput(null));
+  }
+}

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterMatplotlibTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterMatplotlibTest.java
 
b/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterMatplotlibTest.java
index 7fe8b5e..c6eb1d4 100644
--- 
a/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterMatplotlibTest.java
+++ 
b/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterMatplotlibTest.java
@@ -89,6 +89,7 @@ public class PySparkInterpreterMatplotlibTest {
     p.setProperty("zeppelin.spark.importImplicit", "true");
     p.setProperty("zeppelin.pyspark.python", "python");
     p.setProperty("zeppelin.dep.localrepo", 
tmpDir.newFolder().getAbsolutePath());
+    p.setProperty("zeppelin.spark.useIPython", "false");
     return p;
   }
 
@@ -110,6 +111,15 @@ public class PySparkInterpreterMatplotlibTest {
   public static void setUp() throws Exception {
     intpGroup = new InterpreterGroup();
     intpGroup.put("note", new LinkedList<Interpreter>());
+    context = new InterpreterContext("note", "id", null, "title", "text",
+        new AuthenticationInfo(),
+        new HashMap<String, Object>(),
+        new GUI(),
+        new AngularObjectRegistry(intpGroup.getId(), null),
+        new LocalResourcePool("id"),
+        new LinkedList<InterpreterContextRunner>(),
+        new InterpreterOutput(null));
+    InterpreterContext.set(context);
 
     sparkInterpreter = new SparkInterpreter(getPySparkTestProperties());
     intpGroup.get("note").add(sparkInterpreter);
@@ -121,14 +131,6 @@ public class PySparkInterpreterMatplotlibTest {
     pyspark.setInterpreterGroup(intpGroup);
     pyspark.open();
 
-    context = new InterpreterContext("note", "id", null, "title", "text",
-      new AuthenticationInfo(),
-      new HashMap<String, Object>(),
-      new GUI(),
-      new AngularObjectRegistry(intpGroup.getId(), null),
-      new LocalResourcePool("id"),
-      new LinkedList<InterpreterContextRunner>(),
-      new InterpreterOutput(null));
   }
 
   @AfterClass

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java
----------------------------------------------------------------------
diff --git 
a/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java 
b/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java
index ce0c86c..ffdb4e8 100644
--- a/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java
+++ b/spark/src/test/java/org/apache/zeppelin/spark/PySparkInterpreterTest.java
@@ -59,6 +59,7 @@ public class PySparkInterpreterTest {
     p.setProperty("zeppelin.spark.importImplicit", "true");
     p.setProperty("zeppelin.pyspark.python", "python");
     p.setProperty("zeppelin.dep.localrepo", 
tmpDir.newFolder().getAbsolutePath());
+    p.setProperty("zeppelin.spark.useIPython", "false");
     return p;
   }
 
@@ -81,6 +82,16 @@ public class PySparkInterpreterTest {
     intpGroup = new InterpreterGroup();
     intpGroup.put("note", new LinkedList<Interpreter>());
 
+    context = new InterpreterContext("note", "id", null, "title", "text",
+        new AuthenticationInfo(),
+        new HashMap<String, Object>(),
+        new GUI(),
+        new AngularObjectRegistry(intpGroup.getId(), null),
+        new LocalResourcePool("id"),
+        new LinkedList<InterpreterContextRunner>(),
+        new InterpreterOutput(null));
+    InterpreterContext.set(context);
+
     sparkInterpreter = new SparkInterpreter(getPySparkTestProperties());
     intpGroup.get("note").add(sparkInterpreter);
     sparkInterpreter.setInterpreterGroup(intpGroup);
@@ -91,14 +102,7 @@ public class PySparkInterpreterTest {
     pySparkInterpreter.setInterpreterGroup(intpGroup);
     pySparkInterpreter.open();
 
-    context = new InterpreterContext("note", "id", null, "title", "text",
-      new AuthenticationInfo(),
-      new HashMap<String, Object>(),
-      new GUI(),
-      new AngularObjectRegistry(intpGroup.getId(), null),
-      new LocalResourcePool("id"),
-      new LinkedList<InterpreterContextRunner>(),
-      new InterpreterOutput(null));
+
   }
 
   @AfterClass
@@ -113,6 +117,22 @@ public class PySparkInterpreterTest {
       assertEquals(InterpreterResult.Code.SUCCESS,
         pySparkInterpreter.interpret("a = 1\n", context).code());
     }
+
+    InterpreterResult result = pySparkInterpreter.interpret(
+        "from pyspark.streaming import StreamingContext\n" +
+            "import time\n" +
+            "ssc = StreamingContext(sc, 1)\n" +
+            "rddQueue = []\n" +
+            "for i in range(5):\n" +
+            "    rddQueue += [ssc.sparkContext.parallelize([j for j in 
range(1, 1001)], 10)]\n" +
+            "inputStream = ssc.queueStream(rddQueue)\n" +
+            "mappedStream = inputStream.map(lambda x: (x % 10, 1))\n" +
+            "reducedStream = mappedStream.reduceByKey(lambda a, b: a + b)\n" +
+            "reducedStream.pprint()\n" +
+            "ssc.start()\n" +
+            "time.sleep(6)\n" +
+            "ssc.stop(stopSparkContext=False, stopGraceFully=True)", context);
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/spark/src/test/resources/log4j.properties
----------------------------------------------------------------------
diff --git a/spark/src/test/resources/log4j.properties 
b/spark/src/test/resources/log4j.properties
index b0d1067..3ee61ab 100644
--- a/spark/src/test/resources/log4j.properties
+++ b/spark/src/test/resources/log4j.properties
@@ -45,3 +45,5 @@ log4j.logger.org.hibernate.type=ALL
 
 log4j.logger.org.apache.zeppelin.interpreter=DEBUG
 log4j.logger.org.apache.zeppelin.spark=DEBUG
+log4j.logger.org.apache.zeppelin.python.IPythonInterpreter=DEBUG
+log4j.logger.org.apache.zeppelin.python.IPythonClient=DEBUG

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/testing/install_external_dependencies.sh
----------------------------------------------------------------------
diff --git a/testing/install_external_dependencies.sh 
b/testing/install_external_dependencies.sh
index e88f63b..c5c0676 100755
--- a/testing/install_external_dependencies.sh
+++ b/testing/install_external_dependencies.sh
@@ -44,5 +44,6 @@ if [[ -n "$PYTHON" ]] ; then
   conda update -q conda
   conda info -a
   conda config --add channels conda-forge
-  conda install -q matplotlib pandasql
+  conda install -q matplotlib pandasql ipython jupyter_client ipykernel 
matplotlib bokeh
+  pip install grpcio ggplot
 fi

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-interpreter/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-interpreter/pom.xml b/zeppelin-interpreter/pom.xml
index 109099c..384b9d1 100644
--- a/zeppelin-interpreter/pom.xml
+++ b/zeppelin-interpreter/pom.xml
@@ -215,11 +215,6 @@
     </dependency>
 
     <dependency>
-      <groupId>com.google.guava</groupId>
-      <artifactId>guava</artifactId>
-    </dependency>
-
-    <dependency>
       <groupId>junit</groupId>
       <artifactId>junit</artifactId>
       <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/BaseZeppelinContext.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/BaseZeppelinContext.java
 
b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/BaseZeppelinContext.java
index 6774531..12376f0 100644
--- 
a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/BaseZeppelinContext.java
+++ 
b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/BaseZeppelinContext.java
@@ -123,6 +123,10 @@ public abstract class BaseZeppelinContext {
     this.gui = o;
   }
 
+  public GUI getGui() {
+    return gui;
+  }
+
   private void restartInterpreter() {
   }
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutput.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutput.java
 
b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutput.java
index c3d25c9..d89dad0 100644
--- 
a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutput.java
+++ 
b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/InterpreterOutput.java
@@ -23,6 +23,7 @@ import org.slf4j.LoggerFactory;
 import java.io.*;
 import java.net.URISyntaxException;
 import java.net.URL;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.LinkedList;
 import java.util.List;
@@ -115,6 +116,16 @@ public class InterpreterOutput extends OutputStream {
     };
   }
 
+  public List<InterpreterResultMessage> getInterpreterResultMessages() throws 
IOException {
+    synchronized (resultMessageOutputs) {
+      List<InterpreterResultMessage> resultMessages = new ArrayList<>();
+      for (InterpreterResultMessageOutput output : this.resultMessageOutputs) {
+        resultMessages.add(output.toInterpreterResultMessage());
+      }
+      return resultMessages;
+    }
+  }
+
   public InterpreterResultMessageOutput getCurrentOutput() {
     synchronized (resultMessageOutputs) {
       return currentOut;

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java
 
b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java
index f501014..3853468 100644
--- 
a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java
+++ 
b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/remote/RemoteInterpreterServer.java
@@ -513,6 +513,13 @@ public class RemoteInterpreterServer
         List<InterpreterResultMessage> resultMessages = 
context.out.toInterpreterResultMessage();
         resultMessages.addAll(result.message());
 
+        for (InterpreterResultMessage msg : resultMessages) {
+          if (msg.getType() == InterpreterResult.Type.IMG) {
+            logger.debug("InterpreterResultMessage: IMAGE_DATA");
+          } else {
+            logger.debug("InterpreterResultMessage: " + msg.toString());
+          }
+        }
         // put result into resource pool
         if (resultMessages.size() > 0) {
           int lastMessageIndex = resultMessages.size() - 1;

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/util/InterpreterOutputStream.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/util/InterpreterOutputStream.java
 
b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/util/InterpreterOutputStream.java
index 6f2a0b4..258a65d 100644
--- 
a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/util/InterpreterOutputStream.java
+++ 
b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/util/InterpreterOutputStream.java
@@ -29,7 +29,7 @@ import java.io.IOException;
  */
 public class InterpreterOutputStream extends LogOutputStream {
   private Logger logger;
-  InterpreterOutput interpreterOutput;
+  volatile InterpreterOutput interpreterOutput;
   boolean ignoreLeadingNewLinesFromScalaReporter = false;
 
   public InterpreterOutputStream(Logger logger) {

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-jupyter/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-jupyter/pom.xml b/zeppelin-jupyter/pom.xml
index 2390ef1..914ec51 100644
--- a/zeppelin-jupyter/pom.xml
+++ b/zeppelin-jupyter/pom.xml
@@ -52,6 +52,7 @@
     <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
+      <version>15.0</version>
     </dependency>
 
     <dependency>

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-server/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-server/pom.xml b/zeppelin-server/pom.xml
index d73f7bf..e69fba4 100644
--- a/zeppelin-server/pom.xml
+++ b/zeppelin-server/pom.xml
@@ -87,7 +87,7 @@
     <dependency>
       <groupId>${project.groupId}</groupId>
       <artifactId>zeppelin-zengine</artifactId>
-      <version>${project.version}</version>
+      <version>0.8.0-SNAPSHOT</version>
       <exclusions>
         <exclusion>
           <groupId>com.fasterxml.jackson.core</groupId>
@@ -275,6 +275,10 @@
           <groupId>org.apache.commons</groupId>
           <artifactId>commons-lang3</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>com.google.guava</groupId>
+          <artifactId>guava</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-server/src/test/java/org/apache/zeppelin/WebDriverManager.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/test/java/org/apache/zeppelin/WebDriverManager.java 
b/zeppelin-server/src/test/java/org/apache/zeppelin/WebDriverManager.java
index da34e72..1405cb2 100644
--- a/zeppelin-server/src/test/java/org/apache/zeppelin/WebDriverManager.java
+++ b/zeppelin-server/src/test/java/org/apache/zeppelin/WebDriverManager.java
@@ -141,6 +141,7 @@ public class WebDriverManager {
       fail();
     }
 
+    driver.manage().window().maximize();
     return driver;
   }
 

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-server/src/test/java/org/apache/zeppelin/integration/InterpreterModeActionsIT.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/test/java/org/apache/zeppelin/integration/InterpreterModeActionsIT.java
 
b/zeppelin-server/src/test/java/org/apache/zeppelin/integration/InterpreterModeActionsIT.java
index 9bfeae0..7f8765f 100644
--- 
a/zeppelin-server/src/test/java/org/apache/zeppelin/integration/InterpreterModeActionsIT.java
+++ 
b/zeppelin-server/src/test/java/org/apache/zeppelin/integration/InterpreterModeActionsIT.java
@@ -72,7 +72,7 @@ public class InterpreterModeActionsIT extends 
AbstractZeppelinIT {
   static String interpreterOptionPath = "";
   static String originalInterpreterOption = "";
 
-  static String cmdPsPython = "ps aux | grep 'zeppelin_python-' | grep -v 
'grep' | wc -l";
+  static String cmdPsPython = "ps aux | grep 'zeppelin_ipython' | grep -v 
'grep' | wc -l";
   static String cmdPsInterpreter = "ps aux | grep 
'zeppelin/interpreter/python/*' |" +
       " sed -E '/grep|local-repo/d' | wc -l";
 
@@ -145,19 +145,19 @@ public class InterpreterModeActionsIT extends 
AbstractZeppelinIT {
   }
 
   private void logoutUser(String userName) throws URISyntaxException {
-    pollingWait(By.xpath("//div[contains(@class, 
'navbar-collapse')]//li[contains(.,'" +
-        userName + "')]"), MAX_BROWSER_TIMEOUT_SEC).click();
-    pollingWait(By.xpath("//div[contains(@class, 
'navbar-collapse')]//li[contains(.,'" +
-        userName + "')]//a[@ng-click='navbar.logout()']"), 
MAX_BROWSER_TIMEOUT_SEC).click();
-
-    By locator = By.xpath("//*[@id='loginModal']//div[contains(@class, 
'modal-header')]/button");
-    WebElement element = (new WebDriverWait(driver, MAX_BROWSER_TIMEOUT_SEC))
-        .until(ExpectedConditions.visibilityOfElementLocated(locator));
-    if (element.isDisplayed()) {
+    ZeppelinITUtils.sleep(500, false);
+    driver.findElement(By.xpath("//div[contains(@class, 
'navbar-collapse')]//li[contains(.,'" +
+        userName + "')]")).click();
+    ZeppelinITUtils.sleep(500, false);
+    driver.findElement(By.xpath("//div[contains(@class, 
'navbar-collapse')]//li[contains(.,'" +
+        userName + "')]//a[@ng-click='navbar.logout()']")).click();
+    ZeppelinITUtils.sleep(2000, false);
+    if 
(driver.findElement(By.xpath("//*[@id='loginModal']//div[contains(@class, 
'modal-header')]/button"))
+        .isDisplayed()) {
       driver.findElement(By.xpath("//*[@id='loginModal']//div[contains(@class, 
'modal-header')]/button")).click();
     }
     driver.get(new URI(driver.getCurrentUrl()).resolve("/#/").toString());
-    ZeppelinITUtils.sleep(1000, false);
+    ZeppelinITUtils.sleep(500, false);
   }
 
   private void setPythonParagraph(int num, String text) {
@@ -199,7 +199,6 @@ public class InterpreterModeActionsIT extends 
AbstractZeppelinIT {
           
"//div[@class='modal-dialog']//div[@class='bootstrap-dialog-footer-buttons']//button[contains(.,
 'OK')]"));
       clickAndWait(By.xpath("//a[@class='navbar-brand 
navbar-title'][contains(@href, '#/')]"));
       interpreterModeActionsIT.logoutUser("admin");
-
       //step 2: (user1) login, create a new note, run two paragraph with 
'python', check result, check process, logout
       //paragraph: Check if the result is 'user1' in the second paragraph
       //System: Check if the number of python interpreter process is '1'

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-server/src/test/java/org/apache/zeppelin/integration/SparkParagraphIT.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/test/java/org/apache/zeppelin/integration/SparkParagraphIT.java
 
b/zeppelin-server/src/test/java/org/apache/zeppelin/integration/SparkParagraphIT.java
index 9b651c1..8afdb9b 100644
--- 
a/zeppelin-server/src/test/java/org/apache/zeppelin/integration/SparkParagraphIT.java
+++ 
b/zeppelin-server/src/test/java/org/apache/zeppelin/integration/SparkParagraphIT.java
@@ -138,7 +138,7 @@ public class SparkParagraphIT extends AbstractZeppelinIT {
       WebElement paragraph1Result = driver.findElement(By.xpath(
           getParagraphXPath(1) + "//div[contains(@id,\"_text\")]"));
       collector.checkThat("Paragraph from SparkParagraphIT of testPySpark 
result: ",
-          paragraph1Result.getText().toString(), CoreMatchers.equalTo("test 
loop 0\ntest loop 1\ntest loop 2")
+          paragraph1Result.getText().toString(), 
CoreMatchers.containsString("test loop 0\ntest loop 1\ntest loop 2")
       );
 
       // the last statement's evaluation result is printed

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
index ae0911c..a7907db 100644
--- 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
+++ 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/AbstractTestRestApi.java
@@ -211,6 +211,7 @@ public abstract class AbstractTestRestApi {
         // set spark home for pyspark
         sparkProperties.put("spark.home",
             new InterpreterProperty("spark.home", getSparkHome(), 
InterpreterPropertyType.TEXTAREA.getValue()));
+        sparkProperties.put("zeppelin.spark.useIPython",  new 
InterpreterProperty("zeppelin.spark.useIPython", "false", 
InterpreterPropertyType.TEXTAREA.getValue()));
 
         sparkIntpSetting.setProperties(sparkProperties);
         pySpark = true;
@@ -233,6 +234,8 @@ public abstract class AbstractTestRestApi {
               new InterpreterProperty("spark.home", sparkHome, 
InterpreterPropertyType.TEXTAREA.getValue()));
           sparkProperties.put("zeppelin.spark.useHiveContext",
               new InterpreterProperty("zeppelin.spark.useHiveContext", false, 
InterpreterPropertyType.CHECKBOX.getValue()));
+          sparkProperties.put("zeppelin.spark.useIPython",  new 
InterpreterProperty("zeppelin.spark.useIPython", "false", 
InterpreterPropertyType.TEXTAREA.getValue()));
+
           pySpark = true;
           sparkR = true;
         }

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
----------------------------------------------------------------------
diff --git 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
index e1700b2..3e46449 100644
--- 
a/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
+++ 
b/zeppelin-server/src/test/java/org/apache/zeppelin/rest/ZeppelinSparkClusterTest.java
@@ -271,7 +271,8 @@ public class ZeppelinSparkClusterTest extends 
AbstractTestRestApi {
                 note.run(p.getId());
                 waitForFinish(p);
                 assertEquals(Status.FINISHED, p.getStatus());
-                assertEquals("[Row(len=u'3')]\n", 
p.getResult().message().get(0).getData());
+                
assertTrue("[Row(len=u'3')]\n".equals(p.getResult().message().get(0).getData()) 
||
+                    
"[Row(len='3')]\n".equals(p.getResult().message().get(0).getData()));
 
                 // test exception
                 p = note.addNewParagraph(AuthenticationInfo.ANONYMOUS);
@@ -321,7 +322,8 @@ public class ZeppelinSparkClusterTest extends 
AbstractTestRestApi {
                 note.run(p.getId());
                 waitForFinish(p);
                 assertEquals(Status.FINISHED, p.getStatus());
-                assertEquals("[Row(len=u'3')]\n", 
p.getResult().message().get(0).getData());
+                
assertTrue("[Row(len=u'3')]\n".equals(p.getResult().message().get(0).getData()) 
||
+                    
"[Row(len='3')]\n".equals(p.getResult().message().get(0).getData()));
             }
         }
         ZeppelinServer.notebook.removeNote(note.getId(), anonymous);

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/32517c9d/zeppelin-zengine/pom.xml
----------------------------------------------------------------------
diff --git a/zeppelin-zengine/pom.xml b/zeppelin-zengine/pom.xml
index 337b710..b3d5c63 100644
--- a/zeppelin-zengine/pom.xml
+++ b/zeppelin-zengine/pom.xml
@@ -57,7 +57,7 @@
     <dependency>
       <groupId>${project.groupId}</groupId>
       <artifactId>zeppelin-interpreter</artifactId>
-      <version>${project.version}</version>
+      <version>0.8.0-SNAPSHOT</version>
     </dependency>
 
     <dependency>
@@ -171,6 +171,7 @@
     <dependency>
       <groupId>com.google.guava</groupId>
       <artifactId>guava</artifactId>
+      <version>15.0</version>
     </dependency>
 
     <dependency>
@@ -262,6 +263,12 @@
       <artifactId>truth</artifactId>
       <version>${google.truth.version}</version>
       <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>com.google.guava</groupId>
+          <artifactId>guava</artifactId>
+        </exclusion>
+      </exclusions>
     </dependency>
 
     <dependency>
@@ -365,6 +372,10 @@
           <groupId>xerces</groupId>
           <artifactId>xercesImpl</artifactId>
         </exclusion>
+        <exclusion>
+          <groupId>com.google.guava</groupId>
+          <artifactId>guava</artifactId>
+        </exclusion>
       </exclusions>
     </dependency>
   </dependencies>

Reply via email to