Repository: zeppelin
Updated Branches:
  refs/heads/master 6fc3057d5 -> 767a50b41


ZEPPELIN-3531. Don't look for py4j in NewSparkInterpreter

### What is this PR for?
Just remove setupConfForPySpark in NewSparkInterpreter as it is not necessary 
and will cause NPE when the node launch spark interpreter doesn't have spark 
installed in yarn cluster mode.

### What type of PR is it?
[Bug Fix]

### Todos
* [ ] - Task

### What is the Jira issue?
* https://issues.apache.org/jira/browse/ZEPPELIN-3531

### How should this be tested?
* CI pass & Manually tested in a 3 node cluster

### Screenshots (if appropriate)

### Questions:
* Does the licenses files need update? No
* Is there breaking changes for older versions? No
* Does this needs documentation? No

Author: Jeff Zhang <zjf...@apache.org>

Closes #3008 from zjffdu/ZEPPELIN-3531 and squashes the following commits:

e8b2969e9 [Jeff Zhang] ZEPPELIN-3531. Don't look for py4j in NewSparkInterpreter


Project: http://git-wip-us.apache.org/repos/asf/zeppelin/repo
Commit: http://git-wip-us.apache.org/repos/asf/zeppelin/commit/767a50b4
Tree: http://git-wip-us.apache.org/repos/asf/zeppelin/tree/767a50b4
Diff: http://git-wip-us.apache.org/repos/asf/zeppelin/diff/767a50b4

Branch: refs/heads/master
Commit: 767a50b4164ef323ef18160485d226c013c7c00d
Parents: 6fc3057
Author: Jeff Zhang <zjf...@apache.org>
Authored: Tue Jun 5 21:38:17 2018 +0800
Committer: Jeff Zhang <zjf...@apache.org>
Committed: Thu Jun 7 16:19:14 2018 +0800

----------------------------------------------------------------------
 .../zeppelin/spark/NewSparkInterpreter.java     | 43 --------------------
 1 file changed, 43 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/zeppelin/blob/767a50b4/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
----------------------------------------------------------------------
diff --git 
a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
 
b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
index b77e1a7..fcfee27 100644
--- 
a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
+++ 
b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/NewSparkInterpreter.java
@@ -81,7 +81,6 @@ public class NewSparkInterpreter extends 
AbstractSparkInterpreter {
     try {
       String scalaVersion = extractScalaVersion();
       LOGGER.info("Using Scala Version: " + scalaVersion);
-      setupConfForPySpark();
       SparkConf conf = new SparkConf();
       for (Map.Entry<Object, Object> entry : getProperties().entrySet()) {
         if (!StringUtils.isBlank(entry.getValue().toString())) {
@@ -126,48 +125,6 @@ public class NewSparkInterpreter extends 
AbstractSparkInterpreter {
     }
   }
 
-  private void setupConfForPySpark() {
-    String sparkHome = getProperty("SPARK_HOME");
-    File pysparkFolder = null;
-    if (sparkHome == null) {
-      String zeppelinHome =
-          new DefaultInterpreterProperty("ZEPPELIN_HOME", "zeppelin.home", 
"../../")
-              .getValue().toString();
-      pysparkFolder = new File(zeppelinHome,
-          "interpreter" + File.separator + "spark" + File.separator + 
"pyspark");
-    } else {
-      pysparkFolder = new File(sparkHome, "python" + File.separator + "lib");
-    }
-
-    ArrayList<String> pysparkPackages = new ArrayList<>();
-    for (File file : pysparkFolder.listFiles()) {
-      if (file.getName().equals("pyspark.zip")) {
-        pysparkPackages.add(file.getAbsolutePath());
-      }
-      if (file.getName().startsWith("py4j-")) {
-        pysparkPackages.add(file.getAbsolutePath());
-      }
-    }
-
-    if (pysparkPackages.size() != 2) {
-      throw new RuntimeException("Not correct number of pyspark packages: " +
-          StringUtils.join(pysparkPackages, ","));
-    }
-    // Distribute two libraries(pyspark.zip and py4j-*.zip) to workers
-    System.setProperty("spark.files", 
mergeProperty(System.getProperty("spark.files", ""),
-        StringUtils.join(pysparkPackages, ",")));
-    System.setProperty("spark.submit.pyFiles", mergeProperty(
-        System.getProperty("spark.submit.pyFiles", ""), 
StringUtils.join(pysparkPackages, ",")));
-
-  }
-
-  private String mergeProperty(String originalValue, String appendedValue) {
-    if (StringUtils.isBlank(originalValue)) {
-      return appendedValue;
-    }
-    return originalValue + "," + appendedValue;
-  }
-
   @Override
   public void close() {
     LOGGER.info("Close SparkInterpreter");

Reply via email to