Repository: spark
Updated Branches:
  refs/heads/master 990efad1c -> 1ba967b25


[SPARK-21588][SQL] SQLContext.getConf(key, null) should return null

## What changes were proposed in this pull request?

In SQLContext.get(key,null) for a key that is not defined in the conf, and 
doesn't have a default value defined, throws a NPE. Int happens only when conf 
has a value converter

Added null check on defaultValue inside SQLConf.getConfString to avoid calling 
entry.valueConverter(defaultValue)

## How was this patch tested?
Added unit test

Author: vinodkc <vinod.kc...@gmail.com>

Closes #18852 from vinodkc/br_Fix_SPARK-21588.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/1ba967b2
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/1ba967b2
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/1ba967b2

Branch: refs/heads/master
Commit: 1ba967b25e6d88be2db7a4e100ac3ead03a2ade9
Parents: 990efad
Author: vinodkc <vinod.kc...@gmail.com>
Authored: Sat Aug 5 23:04:39 2017 -0700
Committer: gatorsmile <gatorsm...@gmail.com>
Committed: Sat Aug 5 23:04:39 2017 -0700

----------------------------------------------------------------------
 .../scala/org/apache/spark/sql/internal/SQLConf.scala    | 10 ++++++----
 .../org/apache/spark/sql/internal/SQLConfSuite.scala     | 11 +++++++++++
 2 files changed, 17 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/1ba967b2/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index a819cdd..ecb941c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -1228,10 +1228,12 @@ class SQLConf extends Serializable with Logging {
    * not set yet, return `defaultValue`.
    */
   def getConfString(key: String, defaultValue: String): String = {
-    val entry = sqlConfEntries.get(key)
-    if (entry != null && defaultValue != "<undefined>") {
-      // Only verify configs in the SQLConf object
-      entry.valueConverter(defaultValue)
+    if (defaultValue != null && defaultValue != "<undefined>") {
+      val entry = sqlConfEntries.get(key)
+      if (entry != null) {
+        // Only verify configs in the SQLConf object
+        entry.valueConverter(defaultValue)
+      }
     }
     Option(settings.get(key)).getOrElse(defaultValue)
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/1ba967b2/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index a283ff9..948f179 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -270,4 +270,15 @@ class SQLConfSuite extends QueryTest with SharedSQLContext 
{
     val e2 = 
intercept[AnalysisException](spark.conf.unset(SCHEMA_STRING_LENGTH_THRESHOLD.key))
     assert(e2.message.contains("Cannot modify the value of a static config"))
   }
+
+  test("SPARK-21588 SQLContext.getConf(key, null) should return null") {
+    withSQLConf(SQLConf.SHUFFLE_PARTITIONS.key -> "1") {
+      assert("1" == spark.conf.get(SQLConf.SHUFFLE_PARTITIONS.key, null))
+      assert("1" == spark.conf.get(SQLConf.SHUFFLE_PARTITIONS.key, 
"<undefined>"))
+    }
+
+    assert(spark.conf.getOption("spark.sql.nonexistent").isEmpty)
+    assert(null == spark.conf.get("spark.sql.nonexistent", null))
+    assert("<undefined>" == spark.conf.get("spark.sql.nonexistent", 
"<undefined>"))
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to