Repository: spark
Updated Branches:
  refs/heads/master 2d2f607bf -> f4fd7432f


[SPARK-16125][YARN] Fix not test yarn cluster mode correctly in YarnClusterSuite

## What changes were proposed in this pull request?

Since SPARK-13220(Deprecate "yarn-client" and "yarn-cluster"), YarnClusterSuite 
doesn't test "yarn cluster" mode correctly.
This pull request fixes it.

## How was this patch tested?
Unit test

(If this patch involves UI changes, please attach a screenshot; otherwise, 
remove this)

Author: peng.zhang <peng.zh...@xiaomi.com>

Closes #13836 from renozhang/SPARK-16125-test-yarn-cluster-mode.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f4fd7432
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f4fd7432
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f4fd7432

Branch: refs/heads/master
Commit: f4fd7432fb9cf7b197ccada1378c4f2a6d427522
Parents: 2d2f607
Author: peng.zhang <peng.zh...@xiaomi.com>
Authored: Fri Jun 24 08:28:32 2016 +0100
Committer: Sean Owen <so...@cloudera.com>
Committed: Fri Jun 24 08:28:32 2016 +0100

----------------------------------------------------------------------
 core/src/test/scala/org/apache/spark/util/UtilsSuite.scala       | 3 ++-
 python/pyspark/context.py                                        | 4 ----
 .../src/main/scala/org/apache/spark/repl/SparkILoop.scala        | 2 --
 .../scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala    | 2 +-
 4 files changed, 3 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f4fd7432/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala 
b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index e3a8e83..df279b5 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -754,7 +754,8 @@ class UtilsSuite extends SparkFunSuite with 
ResetSystemProperties with Logging {
 
   test("isDynamicAllocationEnabled") {
     val conf = new SparkConf()
-    conf.set("spark.master", "yarn-client")
+    conf.set("spark.master", "yarn")
+    conf.set("spark.submit.deployMode", "client")
     assert(Utils.isDynamicAllocationEnabled(conf) === false)
     assert(Utils.isDynamicAllocationEnabled(
       conf.set("spark.dynamicAllocation.enabled", "false")) === false)

http://git-wip-us.apache.org/repos/asf/spark/blob/f4fd7432/python/pyspark/context.py
----------------------------------------------------------------------
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index aec0215..7217a99 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -155,10 +155,6 @@ class SparkContext(object):
         self.appName = self._conf.get("spark.app.name")
         self.sparkHome = self._conf.get("spark.home", None)
 
-        # Let YARN know it's a pyspark app, so it distributes needed libraries.
-        if self.master == "yarn-client":
-            self._conf.set("spark.yarn.isPython", "true")
-
         for (k, v) in self._conf.getAll():
             if k.startswith("spark.executorEnv."):
                 varName = k[len("spark.executorEnv."):]

http://git-wip-us.apache.org/repos/asf/spark/blob/f4fd7432/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
----------------------------------------------------------------------
diff --git 
a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala 
b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 8fcab38..e871004 100644
--- a/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -943,8 +943,6 @@ class SparkILoop(
       })
 
   private def process(settings: Settings): Boolean = savingContextLoader {
-    if (getMaster() == "yarn-client") System.setProperty("SPARK_YARN_MODE", 
"true")
-
     this.settings = settings
     createInterpreter()
 

http://git-wip-us.apache.org/repos/asf/spark/blob/f4fd7432/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
----------------------------------------------------------------------
diff --git 
a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala 
b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
index 4ce33e0..6b20dea 100644
--- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
+++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/YarnClusterSuite.scala
@@ -312,7 +312,7 @@ private object YarnClusterDriver extends Logging with 
Matchers {
 
     // If we are running in yarn-cluster mode, verify that driver logs links 
and present and are
     // in the expected format.
-    if (conf.get("spark.master") == "yarn-cluster") {
+    if (conf.get("spark.submit.deployMode") == "cluster") {
       assert(listener.driverLogs.nonEmpty)
       val driverLogs = listener.driverLogs.get
       assert(driverLogs.size === 2)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to