Repository: spark
Updated Branches:
  refs/heads/master d81a71357 -> 99fe8993f


[SPARK-12994][CORE] It is not necessary to create ExecutorAllocationM…

…anager in local mode

Author: Jeff Zhang <zjf...@apache.org>

Closes #10914 from zjffdu/SPARK-12994.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/99fe8993
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/99fe8993
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/99fe8993

Branch: refs/heads/master
Commit: 99fe8993f51d3c72cd95eb0825b090dd4d4cd2cd
Parents: d81a713
Author: Jeff Zhang <zjf...@apache.org>
Authored: Mon Feb 29 12:08:37 2016 +0000
Committer: Sean Owen <srowen@percolateur.local>
Committed: Mon Feb 29 12:08:37 2016 +0000

----------------------------------------------------------------------
 .../scala/org/apache/spark/SparkContext.scala    |  6 +-----
 .../main/scala/org/apache/spark/util/Utils.scala | 19 +++++++++++++++++--
 .../scala/org/apache/spark/util/UtilsSuite.scala |  3 +++
 3 files changed, 21 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/99fe8993/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index a1fa266..0e8b735 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -244,7 +244,7 @@ class SparkContext(config: SparkConf) extends Logging with 
ExecutorAllocationCli
   private[spark] def eventLogDir: Option[URI] = _eventLogDir
   private[spark] def eventLogCodec: Option[String] = _eventLogCodec
 
-  def isLocal: Boolean = (master == "local" || master.startsWith("local["))
+  def isLocal: Boolean = Utils.isLocalMaster(_conf)
 
   /**
    * @return true if context is stopped or in the midst of stopping.
@@ -526,10 +526,6 @@ class SparkContext(config: SparkConf) extends Logging with 
ExecutorAllocationCli
 
     // Optionally scale number of executors dynamically based on workload. 
Exposed for testing.
     val dynamicAllocationEnabled = Utils.isDynamicAllocationEnabled(_conf)
-    if (!dynamicAllocationEnabled && 
_conf.getBoolean("spark.dynamicAllocation.enabled", false)) {
-      logWarning("Dynamic Allocation and num executors both set, thus dynamic 
allocation disabled.")
-    }
-
     _executorAllocationManager =
       if (dynamicAllocationEnabled) {
         Some(new ExecutorAllocationManager(this, listenerBus, _conf))

http://git-wip-us.apache.org/repos/asf/spark/blob/99fe8993/core/src/main/scala/org/apache/spark/util/Utils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala 
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index e0c9bf0..6103a10 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -2195,6 +2195,16 @@ private[spark] object Utils extends Logging {
     isInDirectory(parent, child.getParentFile)
   }
 
+
+  /**
+   *
+   * @return whether it is local mode
+   */
+  def isLocalMaster(conf: SparkConf): Boolean = {
+    val master = conf.get("spark.master", "")
+    master == "local" || master.startsWith("local[")
+  }
+
   /**
    * Return whether dynamic allocation is enabled in the given conf
    * Dynamic allocation and explicitly setting the number of executors are 
inherently
@@ -2202,8 +2212,13 @@ private[spark] object Utils extends Logging {
    * the latter should override the former (SPARK-9092).
    */
   def isDynamicAllocationEnabled(conf: SparkConf): Boolean = {
-    conf.getBoolean("spark.dynamicAllocation.enabled", false) &&
-      conf.getInt("spark.executor.instances", 0) == 0
+    val numExecutor = conf.getInt("spark.executor.instances", 0)
+    val dynamicAllocationEnabled = 
conf.getBoolean("spark.dynamicAllocation.enabled", false)
+    if (numExecutor != 0 && dynamicAllocationEnabled) {
+      logWarning("Dynamic Allocation and num executors both set, thus dynamic 
allocation disabled.")
+    }
+    numExecutor == 0 && dynamicAllocationEnabled &&
+      (!isLocalMaster(conf) || 
conf.getBoolean("spark.dynamicAllocation.testing", false))
   }
 
   def tryWithResource[R <: Closeable, T](createResource: => R)(f: R => T): T = 
{

http://git-wip-us.apache.org/repos/asf/spark/blob/99fe8993/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala 
b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
index 7c6778b..412c0ac 100644
--- a/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/UtilsSuite.scala
@@ -722,6 +722,7 @@ class UtilsSuite extends SparkFunSuite with 
ResetSystemProperties with Logging {
 
   test("isDynamicAllocationEnabled") {
     val conf = new SparkConf()
+    conf.set("spark.master", "yarn-client")
     assert(Utils.isDynamicAllocationEnabled(conf) === false)
     assert(Utils.isDynamicAllocationEnabled(
       conf.set("spark.dynamicAllocation.enabled", "false")) === false)
@@ -731,6 +732,8 @@ class UtilsSuite extends SparkFunSuite with 
ResetSystemProperties with Logging {
       conf.set("spark.executor.instances", "1")) === false)
     assert(Utils.isDynamicAllocationEnabled(
       conf.set("spark.executor.instances", "0")) === true)
+    assert(Utils.isDynamicAllocationEnabled(conf.set("spark.master", "local")) 
=== false)
+    
assert(Utils.isDynamicAllocationEnabled(conf.set("spark.dynamicAllocation.testing",
 "true")))
   }
 
   test("encodeFileNameToURIRawPath") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to