This is an automated email from the ASF dual-hosted git repository.

zjffdu pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/zeppelin.git


The following commit(s) were added to refs/heads/master by this push:
     new b896e6f  [security] Fix spark interpreter command injection
b896e6f is described below

commit b896e6f566b5be43c1a3b00cc04574afbd1c3f4c
Author: Jeff Zhang <zjf...@apache.org>
AuthorDate: Tue Jul 30 17:05:35 2019 +0800

    [security] Fix spark interpreter command injection
    
    (cherry picked from commit af423636c05e6f9738bcacc20c1c1406f7165ba7)
---
 .../interpreter/launcher/InterpreterLauncher.java  | 13 ++++++
 .../launcher/InterpreterLauncherTest.java          | 31 +++++++++++++
 .../launcher/SparkInterpreterLauncher.java         | 14 +-----
 .../launcher/SparkInterpreterLauncherTest.java     | 52 +++++++++++-----------
 4 files changed, 73 insertions(+), 37 deletions(-)

diff --git 
a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/launcher/InterpreterLauncher.java
 
b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/launcher/InterpreterLauncher.java
index fe105c6..8e8bf53 100644
--- 
a/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/launcher/InterpreterLauncher.java
+++ 
b/zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/launcher/InterpreterLauncher.java
@@ -28,6 +28,8 @@ import java.util.Properties;
  */
 public abstract class InterpreterLauncher {
 
+  private static String SPECIAL_CHARACTER="{}()<>&*‘|=?;[]$–#~!.\"%/\\:+,`";
+
   protected ZeppelinConfiguration zConf;
   protected Properties properties;
   protected RecoveryStorage recoveryStorage;
@@ -52,5 +54,16 @@ public abstract class InterpreterLauncher {
     return connectTimeout;
   }
 
+  public static String escapeSpecialCharacter(String command) {
+    StringBuilder builder = new StringBuilder();
+    for (char c : command.toCharArray()) {
+      if (SPECIAL_CHARACTER.indexOf(c) != -1) {
+        builder.append("\\");
+      }
+      builder.append(c);
+    }
+    return builder.toString();
+  }
+
   public abstract InterpreterClient launch(InterpreterLaunchContext context) 
throws IOException;
 }
diff --git 
a/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/launcher/InterpreterLauncherTest.java
 
b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/launcher/InterpreterLauncherTest.java
new file mode 100644
index 0000000..7c348a6
--- /dev/null
+++ 
b/zeppelin-interpreter/src/test/java/org/apache/zeppelin/interpreter/launcher/InterpreterLauncherTest.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.zeppelin.interpreter.launcher;
+
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+
+public class InterpreterLauncherTest {
+
+  @Test
+  public void testEscapeSpecialCharacters() {
+    String cmd = "{}.";
+    assertEquals("\\{\\}\\.", InterpreterLauncher.escapeSpecialCharacter(cmd));
+  }
+}
diff --git 
a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java
 
b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java
index d02d82d..c3428bd 100644
--- 
a/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java
+++ 
b/zeppelin-zengine/src/main/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncher.java
@@ -67,7 +67,7 @@ public class SparkInterpreterLauncher extends 
StandardInterpreterLauncher {
         env.put(key, properties.getProperty(key));
       }
       if (isSparkConf(key, properties.getProperty(key))) {
-        sparkProperties.setProperty(key, 
toShellFormat(properties.getProperty(key)));
+        sparkProperties.setProperty(key, properties.getProperty(key));
       }
     }
 
@@ -137,7 +137,7 @@ public class SparkInterpreterLauncher extends 
StandardInterpreterLauncher {
       sparkConfBuilder.append(" --proxy-user " + context.getUserName());
     }
 
-    env.put("ZEPPELIN_SPARK_CONF", sparkConfBuilder.toString());
+    env.put("ZEPPELIN_SPARK_CONF", 
escapeSpecialCharacter(sparkConfBuilder.toString()));
 
     // set these env in the order of
     // 1. interpreter-setting
@@ -335,14 +335,4 @@ public class SparkInterpreterLauncher extends 
StandardInterpreterLauncher {
     return getSparkMaster(properties).startsWith("yarn");
   }
 
-  private String toShellFormat(String value) {
-    if (value.contains("'") && value.contains("\"")) {
-      throw new RuntimeException("Spark property value could not contain both 
\" and '");
-    } else if (value.contains("'")) {
-      return "\"" + value + "\"";
-    } else {
-      return "'" + value + "'";
-    }
-  }
-
 }
diff --git 
a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java
 
b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java
index 8500de6..d973713 100644
--- 
a/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java
+++ 
b/zeppelin-zengine/src/test/java/org/apache/zeppelin/interpreter/launcher/SparkInterpreterLauncherTest.java
@@ -104,7 +104,8 @@ public class SparkInterpreterLauncherTest {
     assertEquals(zConf.getInterpreterRemoteRunnerPath(), 
interpreterProcess.getInterpreterRunner());
     assertTrue(interpreterProcess.getEnv().size() >= 2);
     assertEquals(sparkHome, interpreterProcess.getEnv().get("SPARK_HOME"));
-    assertEquals(" --master local[*] --conf spark.files='file_1' --conf 
spark.jars='jar_1'", interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF"));
+    assertEquals(InterpreterLauncher.escapeSpecialCharacter(" --master 
local[*] --conf spark.files=file_1 --conf spark.jars=jar_1"),
+            interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF"));
   }
 
   @Test
@@ -130,12 +131,12 @@ public class SparkInterpreterLauncherTest {
     assertTrue(interpreterProcess.getEnv().size() >= 2);
     assertEquals(sparkHome, interpreterProcess.getEnv().get("SPARK_HOME"));
 
-    String sparkJars = "'jar_1'";
+    String sparkJars = "jar_1";
     String sparkrZip = sparkHome + "/R/lib/sparkr.zip#sparkr";
-    String sparkFiles = "'file_1'";
-    assertEquals(" --master yarn-client --conf spark.yarn.dist.archives=" + 
sparkrZip +
+    String sparkFiles = "file_1";
+    assertEquals(InterpreterLauncher.escapeSpecialCharacter(" --master 
yarn-client --conf spark.yarn.dist.archives=" + sparkrZip +
                     " --conf spark.files=" + sparkFiles + " --conf 
spark.jars=" + sparkJars +
-                    " --conf spark.yarn.isPython=true",
+                    " --conf spark.yarn.isPython=true"),
             interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF"));
   }
 
@@ -163,13 +164,13 @@ public class SparkInterpreterLauncherTest {
     assertTrue(interpreterProcess.getEnv().size() >= 2);
     assertEquals(sparkHome, interpreterProcess.getEnv().get("SPARK_HOME"));
 
-    String sparkJars = "'jar_1'";
+    String sparkJars = "jar_1";
     String sparkrZip = sparkHome + "/R/lib/sparkr.zip#sparkr";
-    String sparkFiles = "'file_1'";
-    assertEquals(" --master yarn --conf spark.yarn.dist.archives=" + sparkrZip 
+
+    String sparkFiles = "file_1";
+    assertEquals(InterpreterLauncher.escapeSpecialCharacter(" --master yarn 
--conf spark.yarn.dist.archives=" + sparkrZip +
                     " --conf spark.files=" + sparkFiles + " --conf 
spark.jars=" + sparkJars +
-                    " --conf spark.submit.deployMode='client'" +
-                    " --conf spark.yarn.isPython=true",
+                    " --conf spark.submit.deployMode=client" +
+                    " --conf spark.yarn.isPython=true"),
             interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF"));
   }
 
@@ -197,15 +198,15 @@ public class SparkInterpreterLauncherTest {
     assertEquals(sparkHome, interpreterProcess.getEnv().get("SPARK_HOME"));
 
     assertEquals("true", 
interpreterProcess.getEnv().get("ZEPPELIN_SPARK_YARN_CLUSTER"));
-    String sparkJars = "'jar_1'," +
+    String sparkJars = "jar_1," +
             zeppelinHome + "/interpreter/spark/scala-2.11/spark-scala-2.11-" + 
Util.getVersion() + ".jar";
     String sparkrZip = sparkHome + "/R/lib/sparkr.zip#sparkr";
-    String sparkFiles = "'file_1'," + zeppelinHome + 
"/conf/log4j_yarn_cluster.properties";
-    assertEquals(" --master yarn-cluster --conf spark.yarn.dist.archives=" + 
sparkrZip +
+    String sparkFiles = "file_1," + zeppelinHome + 
"/conf/log4j_yarn_cluster.properties";
+    assertEquals(InterpreterLauncher.escapeSpecialCharacter(" --master 
yarn-cluster --conf spark.yarn.dist.archives=" + sparkrZip +
                     " --conf spark.yarn.maxAppAttempts=1" +
                     " --conf spark.files=" + sparkFiles + " --conf 
spark.jars=" + sparkJars +
                     " --conf spark.yarn.isPython=true" +
-                    " --conf spark.yarn.submit.waitAppCompletion=false",
+                    " --conf spark.yarn.submit.waitAppCompletion=false"),
             interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF"));
   }
 
@@ -239,16 +240,16 @@ public class SparkInterpreterLauncherTest {
     assertTrue(interpreterProcess.getEnv().size() >= 3);
     assertEquals(sparkHome, interpreterProcess.getEnv().get("SPARK_HOME"));
     assertEquals("true", 
interpreterProcess.getEnv().get("ZEPPELIN_SPARK_YARN_CLUSTER"));
-    String sparkJars = "'jar_1'," +
+    String sparkJars = "jar_1," +
             Paths.get(localRepoPath.toAbsolutePath().toString(), 
"test.jar").toString() + "," +
             zeppelinHome + "/interpreter/spark/scala-2.11/spark-scala-2.11-" + 
Util.getVersion() + ".jar";
     String sparkrZip = sparkHome + "/R/lib/sparkr.zip#sparkr";
-    String sparkFiles = "'file_1'," + zeppelinHome + 
"/conf/log4j_yarn_cluster.properties";
-    assertEquals(" --master yarn --conf spark.yarn.dist.archives=" + sparkrZip 
+
+    String sparkFiles = "file_1," + zeppelinHome + 
"/conf/log4j_yarn_cluster.properties";
+    assertEquals(InterpreterLauncher.escapeSpecialCharacter(" --master yarn 
--conf spark.yarn.dist.archives=" + sparkrZip +
             " --conf spark.yarn.maxAppAttempts=1" +
             " --conf spark.files=" + sparkFiles + " --conf spark.jars=" + 
sparkJars +
-            " --conf spark.submit.deployMode='cluster' --conf 
spark.yarn.isPython=true" +
-            " --conf spark.yarn.submit.waitAppCompletion=false --proxy-user 
user1",
+            " --conf spark.submit.deployMode=cluster --conf 
spark.yarn.isPython=true" +
+            " --conf spark.yarn.submit.waitAppCompletion=false --proxy-user 
user1"),
             interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF"));
     Files.deleteIfExists(Paths.get(localRepoPath.toAbsolutePath().toString(), 
"test.jar"));
     FileUtils.deleteDirectory(localRepoPath.toFile());
@@ -263,7 +264,7 @@ public class SparkInterpreterLauncherTest {
     properties.setProperty("property_1", "value_1");
     properties.setProperty("master", "yarn");
     properties.setProperty("spark.submit.deployMode", "cluster");
-    properties.setProperty("spark.files", "file_1");
+    properties.setProperty("spark.files", "{}");
     properties.setProperty("spark.jars", "jar_1");
 
     InterpreterOption option = new InterpreterOption();
@@ -284,15 +285,16 @@ public class SparkInterpreterLauncherTest {
     assertEquals(sparkHome, interpreterProcess.getEnv().get("SPARK_HOME"));
     assertEquals("true", 
interpreterProcess.getEnv().get("ZEPPELIN_SPARK_YARN_CLUSTER"));
 
-    String sparkJars = "'jar_1'," +
+    String sparkJars = "jar_1," +
             zeppelinHome + "/interpreter/spark/scala-2.11/spark-scala-2.11-" + 
Util.getVersion() + ".jar";
     String sparkrZip = sparkHome + "/R/lib/sparkr.zip#sparkr";
-    String sparkFiles = "'file_1'," + zeppelinHome + 
"/conf/log4j_yarn_cluster.properties";
-    assertEquals(" --master yarn --conf spark.yarn.dist.archives=" + sparkrZip 
+
+    // escape special characters
+    String sparkFiles = "{}," + zeppelinHome + 
"/conf/log4j_yarn_cluster.properties";
+    assertEquals(InterpreterLauncher.escapeSpecialCharacter(" --master yarn 
--conf spark.yarn.dist.archives=" + sparkrZip +
                     " --conf spark.yarn.maxAppAttempts=1" +
                     " --conf spark.files=" + sparkFiles + " --conf 
spark.jars=" + sparkJars +
-                    " --conf spark.submit.deployMode='cluster' --conf 
spark.yarn.isPython=true" +
-                    " --conf spark.yarn.submit.waitAppCompletion=false 
--proxy-user user1",
+                    " --conf spark.submit.deployMode=cluster --conf 
spark.yarn.isPython=true" +
+                    " --conf spark.yarn.submit.waitAppCompletion=false 
--proxy-user user1"),
             interpreterProcess.getEnv().get("ZEPPELIN_SPARK_CONF"));
     FileUtils.deleteDirectory(localRepoPath.toFile());
   }

Reply via email to