This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 3363c2af3f6a [MINOR][CORE] Validate spark.deploy.defaultCores value 
during config setting
3363c2af3f6a is described below

commit 3363c2af3f6a59363135451d251f25e328a4fddf
Author: Dongjoon Hyun <dh...@apple.com>
AuthorDate: Sat Nov 4 00:23:33 2023 -0700

    [MINOR][CORE] Validate spark.deploy.defaultCores value during config setting
    
    ### What changes were proposed in this pull request?
    
    This aims to move `spark.deploy.defaultCores` validation logic to config 
setting.
    
    ### Why are the changes needed?
    
    In order to ensure the value range by early checking.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No. `Spark Master` will fail to start in both cases, *before* and *after*.
    
    ### How was this patch tested?
    
    Manual review.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #43655 from dongjoon-hyun/spark.deploy.defaultCores.
    
    Authored-by: Dongjoon Hyun <dh...@apple.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 core/src/main/scala/org/apache/spark/deploy/master/Master.scala   | 5 +----
 core/src/main/scala/org/apache/spark/internal/config/Deploy.scala | 1 +
 2 files changed, 2 insertions(+), 4 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala 
b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
index 63d981c5fde8..e63d72ebb40d 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/Master.scala
@@ -24,7 +24,7 @@ import java.util.concurrent.{ScheduledFuture, TimeUnit}
 import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
 import scala.util.Random
 
-import org.apache.spark.{SecurityManager, SparkConf, SparkException}
+import org.apache.spark.{SecurityManager, SparkConf}
 import org.apache.spark.deploy.{ApplicationDescription, DriverDescription, 
ExecutorState}
 import org.apache.spark.deploy.DeployMessages._
 import org.apache.spark.deploy.master.DriverState.DriverState
@@ -119,9 +119,6 @@ private[deploy] class Master(
   // Default maxCores for applications that don't specify it (i.e. pass 
Int.MaxValue)
   private val defaultCores = conf.get(DEFAULT_CORES)
   val reverseProxy = conf.get(UI_REVERSE_PROXY)
-  if (defaultCores < 1) {
-    throw new SparkException(s"${DEFAULT_CORES.key} must be positive")
-  }
   val historyServerUrl = conf.get(MASTER_UI_HISTORY_SERVER_URL)
 
   // Alternative application submission gateway that is stable across Spark 
versions
diff --git a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala 
b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala
index 906ec0fc9973..7b35e92022ae 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/Deploy.scala
@@ -73,6 +73,7 @@ private[spark] object Deploy {
   val DEFAULT_CORES = ConfigBuilder("spark.deploy.defaultCores")
     .version("0.9.0")
     .intConf
+    .checkValue(_ > 0, "spark.deploy.defaultCores must be positive.")
     .createWithDefault(Int.MaxValue)
 
   val MAX_DRIVERS = ConfigBuilder("spark.deploy.maxDrivers")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to