This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new 94a043d6629 [SPARK-45670][CORE][3.3] SparkSubmit does not support 
`--total-executor-cores` when deploying on K8s
94a043d6629 is described below

commit 94a043d6629af2c1ddd439e84b2685ae2827a852
Author: Cheng Pan <cheng...@apache.org>
AuthorDate: Fri Oct 27 15:24:28 2023 +0900

    [SPARK-45670][CORE][3.3] SparkSubmit does not support 
`--total-executor-cores` when deploying on K8s
    
    This is the cherry-pick of https://github.com/apache/spark/pull/43536 for 
branch-3.3
    
    ### What changes were proposed in this pull request?
    
    Remove Kubernetes from the support list of `--total-executor-cores` in 
SparkSubmit
    
    ### Why are the changes needed?
    
    `--total-executor-cores` does not take effect in Spark on K8s, [the
    comments from original 
PR](https://github.com/apache/spark/pull/19717#discussion_r154568773) also 
proves that
    
    ### Does this PR introduce _any_ user-facing change?
    
    The output of `spark-submit --help` changed
    
    ```patch
    ...
    -  Spark standalone, Mesos and Kubernetes only:
    +  Spark standalone and Mesos only:
        --total-executor-cores NUM  Total cores for all executors.
    ...
    ```
    ### How was this patch tested?
    
    Pass GA and review.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #43548 from pan3793/SPARK-45670-3.3.
    
    Authored-by: Cheng Pan <cheng...@apache.org>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala          | 2 +-
 core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala 
b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
index d0f7805efea..93e15c9d7de 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
@@ -647,7 +647,7 @@ private[spark] class SparkSubmit extends Logging {
         confKey = EXECUTOR_CORES.key),
       OptionAssigner(args.executorMemory, STANDALONE | MESOS | YARN | 
KUBERNETES, ALL_DEPLOY_MODES,
         confKey = EXECUTOR_MEMORY.key),
-      OptionAssigner(args.totalExecutorCores, STANDALONE | MESOS | KUBERNETES, 
ALL_DEPLOY_MODES,
+      OptionAssigner(args.totalExecutorCores, STANDALONE | MESOS, 
ALL_DEPLOY_MODES,
         confKey = CORES_MAX.key),
       OptionAssigner(args.files, LOCAL | STANDALONE | MESOS | KUBERNETES, 
ALL_DEPLOY_MODES,
         confKey = FILES.key),
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala 
b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
index 9a5123f218a..a9184b7642c 100644
--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
@@ -550,7 +550,7 @@ private[deploy] class SparkSubmitArguments(args: 
Seq[String], env: Map[String, S
         |  --kill SUBMISSION_ID        If given, kills the driver specified.
         |  --status SUBMISSION_ID      If given, requests the status of the 
driver specified.
         |
-        | Spark standalone, Mesos and Kubernetes only:
+        | Spark standalone and Mesos only:
         |  --total-executor-cores NUM  Total cores for all executors.
         |
         | Spark standalone, YARN and Kubernetes only:


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to