Repository: zeppelin
Updated Branches:
  refs/heads/master 1caa6aa2f -> 6ed3d88a2


ZEPPELIN-3519. zeppelin.spark.printREPLOutput doesn't work for new 
SparkInterpreter

### What is this PR for?
Straightforward fix to make `zeppelin.spark.printREPLOutput` work in 
`NewSparkInterpreter`

### What type of PR is it?
[Bug Fix]

### Todos
* [ ] - Task

### What is the Jira issue?
* https://issues.apache.org/jira/browse/ZEPPELIN-3519

### How should this be tested?
* CI pass

### Screenshots (if appropriate)

### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No

Author: Jeff Zhang <zjf...@apache.org>

Closes #3022 from zjffdu/ZEPPELIN-3519 and squashes the following commits:

9d3bbe578 [Jeff Zhang] ZEPPELIN-3519. zeppelin.spark.printREPLOutput doesn't 
work for new SparkInterpreter


Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/6ed3d88a
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/6ed3d88a
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/6ed3d88a

Branch: refs/heads/master
Commit: 6ed3d88a2a43e33c6bbd0931c92c13304d7ab0de
Parents: 1caa6aa
Author: Jeff Zhang <zjf...@apache.org>
Authored: Fri Jun 1 12:54:16 2018 +0800
Committer: Jeff Zhang <zjf...@apache.org>
Committed: Fri Jun 15 17:16:10 2018 +0800

----------------------------------------------------------------------
 .../zeppelin/spark/NewSparkInterpreter.java     |  7 +++---
 .../zeppelin/spark/NewSparkInterpreterTest.java | 26 ++++++++++++++++++++
 .../spark/SparkScala210Interpreter.scala        |  9 ++++---
 .../spark/SparkScala211Interpreter.scala        | 11 ++++++---
 .../spark/BaseSparkScalaInterpreter.scala       |  3 ++-
 5 files changed, 46 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/6ed3d88a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
----------------------------------------------------------------------
diff --git 
a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
 
b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
index b60e38b..c532074 100644
--- 
a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
+++ 
b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
@@ -95,9 +95,10 @@ public class NewSparkInterpreter extends 
AbstractSparkInterpreter {
 
       String innerIntpClassName = innerInterpreterClassMap.get(scalaVersion);
       Class clazz = Class.forName(innerIntpClassName);
-      this.innerInterpreter =
-          (BaseSparkScalaInterpreter) clazz.getConstructor(SparkConf.class, 
List.class)
-              .newInstance(conf, getDependencyFiles());
+      this.innerInterpreter = (BaseSparkScalaInterpreter)
+          clazz.getConstructor(SparkConf.class, List.class, Boolean.class)
+              .newInstance(conf, getDependencyFiles(),
+                  
Boolean.parseBoolean(getProperty("zeppelin.spark.printREPLOutput", "true")));
       this.innerInterpreter.open();
 
       sc = this.innerInterpreter.sc();

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/6ed3d88a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
----------------------------------------------------------------------
diff --git 
a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
 
b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
index 84bdc43..a694f08 100644
--- 
a/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
+++ 
b/spark/interpreter/src/test/java/org/apache/zeppelin/spark/NewSparkInterpreterTest.java
@@ -390,6 +390,32 @@ public class NewSparkInterpreterTest {
     assertEquals(InterpreterResult.Code.SUCCESS, result.code());
   }
 
+  @Test
+  public void testDisableReplOutput() throws InterpreterException {
+    Properties properties = new Properties();
+    properties.setProperty("spark.master", "local");
+    properties.setProperty("spark.app.name", "test");
+    properties.setProperty("zeppelin.spark.maxResult", "100");
+    properties.setProperty("zeppelin.spark.test", "true");
+    properties.setProperty("zeppelin.spark.useNew", "true");
+    properties.setProperty("zeppelin.spark.printREPLOutput", "false");
+
+    interpreter = new SparkInterpreter(properties);
+    assertTrue(interpreter.getDelegation() instanceof NewSparkInterpreter);
+    interpreter.setInterpreterGroup(mock(InterpreterGroup.class));
+    interpreter.open();
+
+    InterpreterResult result = interpreter.interpret("val a=\"hello world\"", 
getInterpreterContext());
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    // no output for define new variable
+    assertEquals("", output);
+
+    result = interpreter.interpret("print(a)", getInterpreterContext());
+    assertEquals(InterpreterResult.Code.SUCCESS, result.code());
+    // output from print statement will still be displayed
+    assertEquals("hello world", output);
+  }
+
   @After
   public void tearDown() throws InterpreterException {
     if (this.interpreter != null) {

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/6ed3d88a/spark/scala-2.10/src/main/scala/org/apache/zeppelin/spark/SparkScala210Interpreter.scala
----------------------------------------------------------------------
diff --git 
a/spark/scala-2.10/src/main/scala/org/apache/zeppelin/spark/SparkScala210Interpreter.scala
 
b/spark/scala-2.10/src/main/scala/org/apache/zeppelin/spark/SparkScala210Interpreter.scala
index 9d371ba..a9943fb 100644
--- 
a/spark/scala-2.10/src/main/scala/org/apache/zeppelin/spark/SparkScala210Interpreter.scala
+++ 
b/spark/scala-2.10/src/main/scala/org/apache/zeppelin/spark/SparkScala210Interpreter.scala
@@ -35,8 +35,9 @@ import scala.tools.nsc.interpreter._
   * SparkInterpreter for scala-2.10
   */
 class SparkScala210Interpreter(override val conf: SparkConf,
-                               override val depFiles: java.util.List[String])
-  extends BaseSparkScalaInterpreter(conf, depFiles) {
+                               override val depFiles: java.util.List[String],
+                               override val printReplOutput: java.lang.Boolean)
+  extends BaseSparkScalaInterpreter(conf, depFiles, printReplOutput) {
 
   lazy override val LOGGER: Logger = LoggerFactory.getLogger(getClass)
 
@@ -66,7 +67,9 @@ class SparkScala210Interpreter(override val conf: SparkConf,
     settings.embeddedDefaults(Thread.currentThread().getContextClassLoader())
     settings.usejavacp.value = true
     settings.classpath.value = getUserJars.mkString(File.pathSeparator)
-    Console.setOut(interpreterOutput)
+    if (printReplOutput) {
+      Console.setOut(interpreterOutput)
+    }
     sparkILoop = new SparkILoop()
 
     setDeclaredField(sparkILoop, "settings", settings)

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/6ed3d88a/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
----------------------------------------------------------------------
diff --git 
a/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
 
b/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
index e145260..7ddb3fb 100644
--- 
a/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
+++ 
b/spark/scala-2.11/src/main/scala/org/apache/zeppelin/spark/SparkScala211Interpreter.scala
@@ -35,8 +35,9 @@ import scala.tools.nsc.interpreter._
   * SparkInterpreter for scala-2.11
   */
 class SparkScala211Interpreter(override val conf: SparkConf,
-                               override val depFiles: java.util.List[String])
-  extends BaseSparkScalaInterpreter(conf, depFiles) {
+                               override val depFiles: java.util.List[String],
+                               override val printReplOutput: java.lang.Boolean)
+  extends BaseSparkScalaInterpreter(conf, depFiles, printReplOutput) {
 
   lazy override val LOGGER: Logger = LoggerFactory.getLogger(getClass)
 
@@ -66,7 +67,11 @@ class SparkScala211Interpreter(override val conf: SparkConf,
     settings.usejavacp.value = true
     settings.classpath.value = getUserJars.mkString(File.pathSeparator)
 
-    val replOut = new JPrintWriter(interpreterOutput, true)
+    val replOut = if (printReplOutput) {
+      new JPrintWriter(interpreterOutput, true)
+    } else {
+      new JPrintWriter(Console.out, true)
+    }
     sparkILoop = new ILoop(None, replOut)
     sparkILoop.settings = settings
     sparkILoop.createInterpreter()

http://git-wip-us.apache.org/repos/asf/zeppelin/blob/6ed3d88a/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala
----------------------------------------------------------------------
diff --git 
a/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala
 
b/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala
index 883beb0..7f9674c 100644
--- 
a/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala
+++ 
b/spark/spark-scala-parent/src/main/scala/org/apache/zeppelin/spark/BaseSparkScalaInterpreter.scala
@@ -38,7 +38,8 @@ import scala.util.control.NonFatal
   * @param depFiles
   */
 abstract class BaseSparkScalaInterpreter(val conf: SparkConf,
-                                         val depFiles: java.util.List[String]) 
{
+                                         val depFiles: java.util.List[String],
+                                         val printReplOutput: 
java.lang.Boolean) {
 
   protected lazy val LOGGER: Logger = LoggerFactory.getLogger(getClass)
 

Reply via email to