This is an automated email from the ASF dual-hosted git repository.

mridulm80 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 2d7dc7c  [SPARK-36705][FOLLOW-UP] Fix unnecessary logWarning when 
PUSH_BASED_SHUFFLE_ENABLED is set to false
2d7dc7c is described below

commit 2d7dc7c7ce6d524a232f37927ca179f162ad9971
Author: Minchu Yang <miny...@minyang-mn3.linkedin.biz>
AuthorDate: Mon Sep 13 23:23:33 2021 -0500

    [SPARK-36705][FOLLOW-UP] Fix unnecessary logWarning when 
PUSH_BASED_SHUFFLE_ENABLED is set to false
    
    ### What changes were proposed in this pull request?
    
    Only throw logWarning when `PUSH_BASED_SHUFFLE_ENABLED` is set to true and 
`canDoPushBasedShuffle` is false
    
    ### Why are the changes needed?
    
    Currently, this logWarning will still be printed out even when 
`PUSH_BASED_SHUFFLE_ENABLED` is set to false, which is unnecessary.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Passed existing UT.
    
    Closes #33984 from rmcyang/SPARK-36705-follow-up.
    
    Authored-by: Minchu Yang <miny...@minyang-mn3.linkedin.biz>
    Signed-off-by: Mridul Muralidharan <mridul<at>gmail.com>
---
 .../main/scala/org/apache/spark/util/Utils.scala   | 38 ++++++++++++----------
 1 file changed, 21 insertions(+), 17 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala 
b/core/src/main/scala/org/apache/spark/util/Utils.scala
index bbff56c..f894b83 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -2604,23 +2604,27 @@ private[spark] object Utils extends Logging {
    *   - serializer(such as KryoSerializer) supports relocation of serialized 
objects
    */
   def isPushBasedShuffleEnabled(conf: SparkConf): Boolean = {
-    val serializer = 
Utils.classForName(conf.get(SERIALIZER)).getConstructor(classOf[SparkConf])
-      .newInstance(conf).asInstanceOf[Serializer]
-    val canDoPushBasedShuffle =
-      conf.get(PUSH_BASED_SHUFFLE_ENABLED) &&
-        (conf.get(IS_TESTING).getOrElse(false) ||
-          (conf.get(SHUFFLE_SERVICE_ENABLED) &&
-            conf.get(SparkLauncher.SPARK_MASTER, null) == "yarn" &&
-            // TODO: [SPARK-36744] needs to support IO encryption for 
push-based shuffle
-            !conf.get(IO_ENCRYPTION_ENABLED) &&
-            serializer.supportsRelocationOfSerializedObjects))
-
-    if (!canDoPushBasedShuffle) {
-      logWarning("Push-based shuffle can only be enabled when the application 
is submitted" +
-        "to run in YARN mode, with external shuffle service enabled, IO 
encryption disabled, and" +
-        "relocation of serialized objects supported.")
-    }
-    canDoPushBasedShuffle
+    val pushBasedShuffleEnabled = conf.get(PUSH_BASED_SHUFFLE_ENABLED)
+    if (pushBasedShuffleEnabled) {
+      val serializer = 
Utils.classForName(conf.get(SERIALIZER)).getConstructor(classOf[SparkConf])
+        .newInstance(conf).asInstanceOf[Serializer]
+      val canDoPushBasedShuffle = conf.get(IS_TESTING).getOrElse(false) ||
+        (conf.get(SHUFFLE_SERVICE_ENABLED) &&
+          conf.get(SparkLauncher.SPARK_MASTER, null) == "yarn" &&
+          // TODO: [SPARK-36744] needs to support IO encryption for push-based 
shuffle
+          !conf.get(IO_ENCRYPTION_ENABLED) &&
+          serializer.supportsRelocationOfSerializedObjects)
+
+      if (!canDoPushBasedShuffle) {
+        logWarning("Push-based shuffle can only be enabled when the 
application is submitted " +
+          "to run in YARN mode, with external shuffle service enabled, IO 
encryption disabled, " +
+          "and relocation of serialized objects supported.")
+      }
+
+      canDoPushBasedShuffle
+    } else {
+      false
+    }
   }
 
   /**

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to