This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 0042b676c232 [SPARK-47965][CORE] Avoid orNull in TypedConfigBuilder 
and OptionalConfigEntry
0042b676c232 is described below

commit 0042b676c23220a73f2672aa42d5306d3878bd05
Author: Hyukjin Kwon <gurwls...@apache.org>
AuthorDate: Wed Apr 24 19:37:04 2024 +0900

    [SPARK-47965][CORE] Avoid orNull in TypedConfigBuilder and 
OptionalConfigEntry
    
    ### What changes were proposed in this pull request?
    
    This PR proposes to avoid orNull in `TypedConfigBuilder`. Keys and values 
cannot be set `null` anyway, see `RuntimeConfig` and `SparkConf`.
    
    Also, uses `ConfigEntry.UNDEFINED` in `OptionalConfigEntry` instead of 
`null`.
    
    ### Why are the changes needed?
    
    For code cleanup.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    CI in this PR should verify them.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #46197 from HyukjinKwon/SPARK-47965.
    
    Authored-by: Hyukjin Kwon <gurwls...@apache.org>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../org/apache/spark/internal/config/ConfigBuilder.scala   |  3 ++-
 .../org/apache/spark/internal/config/ConfigEntry.scala     |  2 +-
 .../apache/spark/internal/config/ConfigEntrySuite.scala    | 14 +-------------
 .../main/scala/org/apache/spark/deploy/yarn/Client.scala   |  4 ++--
 .../main/scala/org/apache/spark/deploy/yarn/config.scala   |  4 ++--
 .../test/scala/org/apache/spark/sql/SetCommandSuite.scala  | 12 ------------
 .../org/apache/spark/sql/internal/SQLConfEntrySuite.scala  |  2 +-
 7 files changed, 9 insertions(+), 32 deletions(-)

diff --git 
a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala 
b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
index 1f19e9444d38..f50cc0f88842 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigBuilder.scala
@@ -94,7 +94,7 @@ private[spark] class TypedConfigBuilder[T](
   import ConfigHelpers._
 
   def this(parent: ConfigBuilder, converter: String => T) = {
-    this(parent, converter, Option(_).map(_.toString).orNull)
+    this(parent, converter, { v: T => v.toString })
   }
 
   /** Apply a transformation to the user-provided values of the config entry. 
*/
@@ -157,6 +157,7 @@ private[spark] class TypedConfigBuilder[T](
 
   /** Creates a [[ConfigEntry]] that has a default value. */
   def createWithDefault(default: T): ConfigEntry[T] = {
+    assert(default != null, "Use createOptional.")
     // Treat "String" as a special case, so that both createWithDefault and 
createWithDefaultString
     // behave the same w.r.t. variable expansion of default values.
     default match {
diff --git 
a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala 
b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
index a295ef06a637..c07f2528ee70 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/ConfigEntry.scala
@@ -227,7 +227,7 @@ private[spark] class OptionalConfigEntry[T](
     prependSeparator,
     alternatives,
     s => Some(rawValueConverter(s)),
-    v => v.map(rawStringConverter).orNull,
+    v => v.map(rawStringConverter).getOrElse(ConfigEntry.UNDEFINED),
     doc,
     isPublic,
     version
diff --git 
a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala 
b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
index 38063c47ec96..ae9973508405 100644
--- 
a/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/internal/config/ConfigEntrySuite.scala
@@ -196,12 +196,6 @@ class ConfigEntrySuite extends SparkFunSuite {
     assert(conversionError.getMessage === s"${conversionTest.key} should be 
double, but was abc")
   }
 
-  test("default value handling is null-safe") {
-    val conf = new SparkConf()
-    val stringConf = 
ConfigBuilder(testKey("string")).stringConf.createWithDefault(null)
-    assert(conf.get(stringConf) === null)
-  }
-
   test("variable expansion of spark config entries") {
     val env = Map("ENV1" -> "env1")
     val conf = new SparkConfWithEnv(env)
@@ -220,7 +214,7 @@ class ConfigEntrySuite extends SparkFunSuite {
 
     val refConf = ConfigBuilder(testKey("configReferenceTest"))
       .stringConf
-      .createWithDefault(null)
+      .createWithDefault("")
 
     def ref(entry: ConfigEntry[_]): String = "${" + entry.key + "}"
 
@@ -250,12 +244,6 @@ class ConfigEntrySuite extends SparkFunSuite {
     // Make sure SparkConf's env override works.
     conf.set(refConf, "${env:ENV1}")
     assert(conf.get(refConf) === env("ENV1"))
-
-    // Conf with null default value is not expanded.
-    val nullConf = ConfigBuilder(testKey("nullString"))
-      .stringConf
-      .createWithDefault(null)
-    testEntryRef(nullConf, ref(nullConf))
   }
 
   test("conf entry : default function") {
diff --git 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
index 214a79559755..8d6a014d16bf 100644
--- 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
+++ 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/Client.scala
@@ -1683,8 +1683,8 @@ private[spark] object Client extends Logging {
   def getClusterPath(conf: SparkConf, path: String): String = {
     val localPath = conf.get(GATEWAY_ROOT_PATH)
     val clusterPath = conf.get(REPLACEMENT_ROOT_PATH)
-    if (localPath != null && clusterPath != null) {
-      path.replace(localPath, clusterPath)
+    if (localPath.isDefined && clusterPath.isDefined) {
+      path.replace(localPath.get, clusterPath.get)
     } else {
       path
     }
diff --git 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala
 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala
index c11961008019..51e5e0bfb908 100644
--- 
a/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala
+++ 
b/resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/config.scala
@@ -121,14 +121,14 @@ package object config extends Logging {
       "with the corresponding path in cluster machines.")
     .version("1.5.0")
     .stringConf
-    .createWithDefault(null)
+    .createOptional
 
   private[spark] val REPLACEMENT_ROOT_PATH = 
ConfigBuilder("spark.yarn.config.replacementPath")
     .doc(s"Path to use as a replacement for ${GATEWAY_ROOT_PATH.key} when 
launching processes " +
       "in the YARN cluster.")
     .version("1.5.0")
     .stringConf
-    .createWithDefault(null)
+    .createOptional
 
   private[spark] val QUEUE_NAME = ConfigBuilder("spark.yarn.queue")
     .version("1.0.0")
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SetCommandSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SetCommandSuite.scala
index 2c803ceffe95..a8b359f308a2 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SetCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SetCommandSuite.scala
@@ -83,18 +83,6 @@ class SetCommandSuite extends QueryTest with 
SharedSparkSession with ResetSystem
     spark.sessionState.conf.clear()
   }
 
-  test("SPARK-19218 `SET -v` should not fail with null value configuration") {
-    import SQLConf._
-    val confEntry = 
buildConf("spark.test").doc("doc").stringConf.createWithDefault(null)
-
-    try {
-      val result = sql("SET -v").collect()
-      assert(result === result.sortBy(_.getString(0)))
-    } finally {
-      SQLConf.unregister(confEntry)
-    }
-  }
-
   test("SET commands with illegal or inappropriate argument") {
     spark.sessionState.conf.clear()
     // Set negative mapred.reduce.tasks for automatically determining
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala
index cd6894ee4371..26011af37bf4 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfEntrySuite.scala
@@ -107,7 +107,7 @@ class SQLConfEntrySuite extends SparkFunSuite {
 
   test("stringConf") {
     val key = "spark.sql.SQLConfEntrySuite.string"
-    val confEntry = buildConf(key).stringConf.createWithDefault(null)
+    val confEntry = buildConf(key).stringConf.createWithDefault("")
     assert(conf.getConf(confEntry, "abc") === "abc")
 
     conf.setConf(confEntry, "abcd")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to