Repository: spark
Updated Branches:
  refs/heads/master 80c6d35a3 -> 4a2b15f0a


[SPARK-24241][SUBMIT] Do not fail fast when dynamic resource allocation enabled 
with 0 executor

## What changes were proposed in this pull request?
```
~/spark-2.3.0-bin-hadoop2.7$ bin/spark-sql --num-executors 0 --conf 
spark.dynamicAllocation.enabled=true
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option PermSize=1024m; 
support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=1024m; 
support was removed in 8.0
Error: Number of executors must be a positive number
Run with --help for usage help or --verbose for debug output
```

Actually, we could start up with min executor number with 0 before if 
dynamically

## How was this patch tested?

ut added

Author: Kent Yao <yaooq...@hotmail.com>

Closes #21290 from yaooqinn/SPARK-24241.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4a2b15f0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4a2b15f0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4a2b15f0

Branch: refs/heads/master
Commit: 4a2b15f0af400c71b7f20b2048f38a8b74d43dfa
Parents: 80c6d35
Author: Kent Yao <yaooq...@hotmail.com>
Authored: Tue May 15 16:04:17 2018 +0800
Committer: jerryshao <ss...@hortonworks.com>
Committed: Tue May 15 16:04:17 2018 +0800

----------------------------------------------------------------------
 .../spark/deploy/SparkSubmitArguments.scala     |  7 +++++--
 .../apache/spark/deploy/SparkSubmitSuite.scala  | 20 ++++++++++++++++++++
 2 files changed, 25 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/4a2b15f0/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala 
b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index 0733fdb..fed4e0a 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -36,7 +36,6 @@ import org.apache.spark.launcher.SparkSubmitArgumentsParser
 import org.apache.spark.network.util.JavaUtils
 import org.apache.spark.util.Utils
 
-
 /**
  * Parses and encapsulates arguments from the spark-submit script.
  * The env argument is used for testing.
@@ -76,6 +75,7 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], 
env: Map[String, S
   var proxyUser: String = null
   var principal: String = null
   var keytab: String = null
+  private var dynamicAllocationEnabled: Boolean = false
 
   // Standalone cluster mode only
   var supervise: Boolean = false
@@ -198,6 +198,8 @@ private[deploy] class SparkSubmitArguments(args: 
Seq[String], env: Map[String, S
     queue = 
Option(queue).orElse(sparkProperties.get("spark.yarn.queue")).orNull
     keytab = 
Option(keytab).orElse(sparkProperties.get("spark.yarn.keytab")).orNull
     principal = 
Option(principal).orElse(sparkProperties.get("spark.yarn.principal")).orNull
+    dynamicAllocationEnabled =
+      
sparkProperties.get("spark.dynamicAllocation.enabled").exists("true".equalsIgnoreCase)
 
     // Try to set main class from JAR if no --class argument is given
     if (mainClass == null && !isPython && !isR && primaryResource != null) {
@@ -274,7 +276,8 @@ private[deploy] class SparkSubmitArguments(args: 
Seq[String], env: Map[String, S
     if (totalExecutorCores != null && 
Try(totalExecutorCores.toInt).getOrElse(-1) <= 0) {
       error("Total executor cores must be a positive number")
     }
-    if (numExecutors != null && Try(numExecutors.toInt).getOrElse(-1) <= 0) {
+    if (!dynamicAllocationEnabled &&
+      numExecutors != null && Try(numExecutors.toInt).getOrElse(-1) <= 0) {
       error("Number of executors must be a positive number")
     }
     if (pyFiles != null && !isPython) {

http://git-wip-us.apache.org/repos/asf/spark/blob/4a2b15f0/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
index 7451e07..4328695 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitSuite.scala
@@ -180,6 +180,26 @@ class SparkSubmitSuite
     appArgs.toString should include ("thequeue")
   }
 
+  test("SPARK-24241: do not fail fast if executor num is 0 when dynamic 
allocation is enabled") {
+    val clArgs1 = Seq(
+      "--name", "myApp",
+      "--class", "Foo",
+      "--num-executors", "0",
+      "--conf", "spark.dynamicAllocation.enabled=true",
+      "thejar.jar")
+    new SparkSubmitArguments(clArgs1)
+
+    val clArgs2 = Seq(
+      "--name", "myApp",
+      "--class", "Foo",
+      "--num-executors", "0",
+      "--conf", "spark.dynamicAllocation.enabled=false",
+      "thejar.jar")
+
+    val e = intercept[SparkException](new SparkSubmitArguments(clArgs2))
+    assert(e.getMessage.contains("Number of executors must be a positive 
number"))
+  }
+
   test("specify deploy mode through configuration") {
     val clArgs = Seq(
       "--master", "yarn",


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to