Repository: spark
Updated Branches:
  refs/heads/master 32da87dfa -> e1d72f2c0


[SPARK-25264][K8S] Fix comma-delineated arguments passed into PythonRunner and 
RRunner

## What changes were proposed in this pull request?

Fixes the issue brought up in 
https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/issues/273 where 
the arguments were being comma-delineated, which was incorrect wrt to the 
PythonRunner and RRunner.

## How was this patch tested?

Modified unit test to test this change.

Author: Ilan Filonenko <i...@cornell.edu>

Closes #22257 from ifilonenko/SPARK-25264.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e1d72f2c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e1d72f2c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e1d72f2c

Branch: refs/heads/master
Commit: e1d72f2c07ecd6f1880299e9373daa21cb032017
Parents: 32da87d
Author: Ilan Filonenko <i...@cornell.edu>
Authored: Fri Aug 31 15:46:45 2018 -0700
Committer: mcheah <mch...@palantir.com>
Committed: Fri Aug 31 15:46:45 2018 -0700

----------------------------------------------------------------------
 .../deploy/k8s/features/bindings/PythonDriverFeatureStep.scala   | 3 ++-
 .../spark/deploy/k8s/features/bindings/RDriverFeatureStep.scala  | 3 ++-
 .../k8s/features/bindings/PythonDriverFeatureStepSuite.scala     | 4 ++--
 .../deploy/k8s/features/bindings/RDriverFeatureStepSuite.scala   | 4 ++--
 4 files changed, 8 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/e1d72f2c/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/bindings/PythonDriverFeatureStep.scala
----------------------------------------------------------------------
diff --git 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/bindings/PythonDriverFeatureStep.scala
 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/bindings/PythonDriverFeatureStep.scala
index c20bcac..406944a 100644
--- 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/bindings/PythonDriverFeatureStep.scala
+++ 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/bindings/PythonDriverFeatureStep.scala
@@ -30,11 +30,12 @@ private[spark] class PythonDriverFeatureStep(
   override def configurePod(pod: SparkPod): SparkPod = {
     val roleConf = kubernetesConf.roleSpecificConf
     require(roleConf.mainAppResource.isDefined, "PySpark Main Resource must be 
defined")
+    // Delineation is done by " " because that is input into PythonRunner
     val maybePythonArgs = Option(roleConf.appArgs).filter(_.nonEmpty).map(
       pyArgs =>
         new EnvVarBuilder()
           .withName(ENV_PYSPARK_ARGS)
-          .withValue(pyArgs.mkString(","))
+          .withValue(pyArgs.mkString(" "))
           .build())
     val maybePythonFiles = kubernetesConf.pyFiles().map(
       // Dilineation by ":" is to append the PySpark Files to the PYTHONPATH

http://git-wip-us.apache.org/repos/asf/spark/blob/e1d72f2c/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/bindings/RDriverFeatureStep.scala
----------------------------------------------------------------------
diff --git 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/bindings/RDriverFeatureStep.scala
 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/bindings/RDriverFeatureStep.scala
index b33b86e..11b09b3 100644
--- 
a/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/bindings/RDriverFeatureStep.scala
+++ 
b/resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/bindings/RDriverFeatureStep.scala
@@ -30,11 +30,12 @@ private[spark] class RDriverFeatureStep(
   override def configurePod(pod: SparkPod): SparkPod = {
     val roleConf = kubernetesConf.roleSpecificConf
     require(roleConf.mainAppResource.isDefined, "R Main Resource must be 
defined")
+    // Delineation is done by " " because that is input into RRunner
     val maybeRArgs = Option(roleConf.appArgs).filter(_.nonEmpty).map(
       rArgs =>
         new EnvVarBuilder()
           .withName(ENV_R_ARGS)
-          .withValue(rArgs.mkString(","))
+          .withValue(rArgs.mkString(" "))
           .build())
     val envSeq =
       Seq(new EnvVarBuilder()

http://git-wip-us.apache.org/repos/asf/spark/blob/e1d72f2c/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/bindings/PythonDriverFeatureStepSuite.scala
----------------------------------------------------------------------
diff --git 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/bindings/PythonDriverFeatureStepSuite.scala
 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/bindings/PythonDriverFeatureStepSuite.scala
index a5dac68..c14af1d 100644
--- 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/bindings/PythonDriverFeatureStepSuite.scala
+++ 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/bindings/PythonDriverFeatureStepSuite.scala
@@ -44,7 +44,7 @@ class PythonDriverFeatureStepSuite extends SparkFunSuite {
         Some(PythonMainAppResource("local:///main.py")),
         "test-app",
         "python-runner",
-        Seq("5 7")),
+        Seq("5", "7", "9")),
       appResourceNamePrefix = "",
       appId = "",
       roleLabels = Map.empty,
@@ -66,7 +66,7 @@ class PythonDriverFeatureStepSuite extends SparkFunSuite {
       .toMap
     assert(envs(ENV_PYSPARK_PRIMARY) === expectedMainResource)
     assert(envs(ENV_PYSPARK_FILES) === expectedPySparkFiles)
-    assert(envs(ENV_PYSPARK_ARGS) === "5 7")
+    assert(envs(ENV_PYSPARK_ARGS) === "5 7 9")
     assert(envs(ENV_PYSPARK_MAJOR_PYTHON_VERSION) === "2")
   }
   test("Python Step testing empty pyfiles") {

http://git-wip-us.apache.org/repos/asf/spark/blob/e1d72f2c/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/bindings/RDriverFeatureStepSuite.scala
----------------------------------------------------------------------
diff --git 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/bindings/RDriverFeatureStepSuite.scala
 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/bindings/RDriverFeatureStepSuite.scala
index 8fdf91e..ace0faa 100644
--- 
a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/bindings/RDriverFeatureStepSuite.scala
+++ 
b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/deploy/k8s/features/bindings/RDriverFeatureStepSuite.scala
@@ -38,7 +38,7 @@ class RDriverFeatureStepSuite extends SparkFunSuite {
         Some(RMainAppResource(mainResource)),
         "test-app",
         "r-runner",
-        Seq("5 7")),
+        Seq("5", "7", "9")),
       appResourceNamePrefix = "",
       appId = "",
       roleLabels = Map.empty,
@@ -58,6 +58,6 @@ class RDriverFeatureStepSuite extends SparkFunSuite {
       .map(env => (env.getName, env.getValue))
       .toMap
     assert(envs(ENV_R_PRIMARY) === expectedMainResource)
-    assert(envs(ENV_R_ARGS) === "5 7")
+    assert(envs(ENV_R_ARGS) === "5 7 9")
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to