This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new ec4d0ced0b9 [SPARK-40163][SQL] feat: SparkSession.config(Map)
ec4d0ced0b9 is described below

commit ec4d0ced0b912df16adff24196ac3c533588307f
Author: seunggabi <seungg...@naver.com>
AuthorDate: Sun Aug 21 14:28:49 2022 -0500

    [SPARK-40163][SQL] feat: SparkSession.config(Map)
    
    > https://issues.apache.org/jira/browse/SPARK-40163
    
    ### What changes were proposed in this pull request?
    - as-is
    ```kotlin
        private fun config(builder: SparkSession.Builder): SparkSession.Builder 
{
            val map = YamlUtils.read(this::class.java, "spark", Extension.YAML)
    
            var b = builder
            map.keys.forEach {
                val k = it
                val v = map[k]
    
                b = when (v) {
                    is Long -> b.config(k, v)
                    is String -> b.config(k, v)
                    is Double -> b.config(k, v)
                    is Boolean -> b.config(k, v)
                    else -> b
                }
            }
    
            return b
        }
    }
    ```
    - to-be
    ```kotlin
        private fun config(builder: SparkSession.Builder): SparkSession.Builder 
{
            val map = YamlUtils.read(this::class.java, "spark", Extension.YAML)
    
            return b.config(map)
        }
    }
    ```
    
    ### Why are the changes needed?
    - string, boolean, long, double -> toString
    - so this is simple code!
    
    ### Does this PR introduce _any_ user-facing change?
    
    ### How was this patch tested?
    - added test code
    
    Closes #37478 from seunggabi/feat/spark-session-config-by-object.
    
    Authored-by: seunggabi <seungg...@naver.com>
    Signed-off-by: Sean Owen <sro...@gmail.com>
---
 .../scala/org/apache/spark/sql/SparkSession.scala  | 25 ++++++++++
 .../apache/spark/sql/JavaSparkSessionSuite.java    | 56 ++++++++++++++++++++++
 .../spark/sql/SparkSessionBuilderSuite.scala       | 18 +++++++
 3 files changed, 99 insertions(+)

diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
index 5b212c3d595..4f5c89a796a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -859,6 +859,31 @@ object SparkSession extends Logging {
       this
     }
 
+    /**
+     * Sets a config option. Options set using this method are automatically 
propagated to
+     * both `SparkConf` and SparkSession's own configuration.
+     *
+     * @since 3.4.0
+     */
+    def config(map: Map[String, Any]): Builder = synchronized {
+      map.foreach {
+        kv: (String, Any) => {
+          options += kv._1 -> kv._2.toString
+        }
+      }
+      this
+    }
+
+    /**
+     * Sets a config option. Options set using this method are automatically 
propagated to
+     * both `SparkConf` and SparkSession's own configuration.
+     *
+     * @since 3.4.0
+     */
+    def config(map: java.util.Map[String, Any]): Builder = synchronized {
+      config(map.asScala.toMap)
+    }
+
     /**
      * Sets a list of config options based on the given `SparkConf`.
      *
diff --git 
a/sql/core/src/test/java/test/org/apache/spark/sql/JavaSparkSessionSuite.java 
b/sql/core/src/test/java/test/org/apache/spark/sql/JavaSparkSessionSuite.java
new file mode 100644
index 00000000000..00f744f4d86
--- /dev/null
+++ 
b/sql/core/src/test/java/test/org/apache/spark/sql/JavaSparkSessionSuite.java
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package test.org.apache.spark.sql;
+
+import org.apache.spark.sql.*;
+import org.junit.After;
+import org.junit.Test;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class JavaSparkSessionSuite {
+  private SparkSession spark;
+
+  @After
+  public void tearDown() {
+    spark.stop();
+    spark = null;
+  }
+
+  @Test
+  public void config() {
+    // SPARK-40163: SparkSession.config(Map)
+    Map<String, Object> map = new HashMap<String, Object>() {{
+      put("string", "");
+      put("boolean", true);
+      put("double", 0.0);
+      put("long", 0L);
+    }};
+
+    spark = SparkSession.builder()
+      .master("local[*]")
+      .appName("testing")
+      .config(map)
+      .getOrCreate();
+
+    for (Map.Entry<String, Object> e : map.entrySet()) {
+      assert(spark.conf().get(e.getKey()).equals(e.getValue().toString()));
+    }
+  }
+}
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
index 7789d168792..54e83ac1fcb 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
@@ -569,4 +569,22 @@ class SparkSessionBuilderSuite extends SparkFunSuite with 
Eventually {
       !logs.exists(_.contains("spark.sql.ansi.enabled\"")),
       s"'spark.sql.ansi.enabled' existed in:\n${logs.mkString("\n")}")
   }
+
+  test("SPARK-40163: SparkSession.config(Map)") {
+    val map: Map[String, Any] = Map(
+      "string" -> "",
+      "boolean" -> true,
+      "double" -> 0.0,
+      "long" -> 0L
+    )
+
+    val session = SparkSession.builder()
+      .master("local")
+      .config(map)
+      .getOrCreate()
+
+    for (e <- map) {
+      assert(session.conf.get(e._1) == e._2.toString)
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to