Repository: spark
Updated Branches:
  refs/heads/master 415d0a859 -> 676803963


[SPARK-12926][SQL] SQLContext to display warning message when non-sql configs 
are being set

Users unknowingly try to set core Spark configs in SQLContext but later realise 
that it didn't work. eg. sqlContext.sql("SET 
spark.shuffle.memoryFraction=0.4"). This PR adds a warning message when such 
operations are done.

Author: Tejas Patil <tej...@fb.com>

Closes #10849 from tejasapatil/SPARK-12926.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/67680396
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/67680396
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/67680396

Branch: refs/heads/master
Commit: 676803963fcc08aa988aa6f14be3751314e006ca
Parents: 415d0a8
Author: Tejas Patil <tej...@fb.com>
Authored: Thu Jan 28 13:45:28 2016 -0800
Committer: Michael Armbrust <mich...@databricks.com>
Committed: Thu Jan 28 13:45:28 2016 -0800

----------------------------------------------------------------------
 .../src/main/scala/org/apache/spark/sql/SQLConf.scala | 14 +++++++++++---
 1 file changed, 11 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/67680396/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
index c9ba670..eb9da0b 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLConf.scala
@@ -24,6 +24,7 @@ import scala.collection.JavaConverters._
 
 import org.apache.parquet.hadoop.ParquetOutputCommitter
 
+import org.apache.spark.Logging
 import org.apache.spark.sql.catalyst.CatalystConf
 import org.apache.spark.sql.catalyst.parser.ParserConf
 import org.apache.spark.util.Utils
@@ -519,7 +520,7 @@ private[spark] object SQLConf {
  *
  * SQLConf is thread-safe (internally synchronized, so safe to be used in 
multiple threads).
  */
-private[sql] class SQLConf extends Serializable with CatalystConf with 
ParserConf {
+private[sql] class SQLConf extends Serializable with CatalystConf with 
ParserConf with Logging {
   import SQLConf._
 
   /** Only low degree of contention is expected for conf, thus NOT using 
ConcurrentHashMap. */
@@ -628,7 +629,7 @@ private[sql] class SQLConf extends Serializable with 
CatalystConf with ParserCon
       // Only verify configs in the SQLConf object
       entry.valueConverter(value)
     }
-    settings.put(key, value)
+    setConfWithCheck(key, value)
   }
 
   /** Set the given Spark SQL configuration property. */
@@ -636,7 +637,7 @@ private[sql] class SQLConf extends Serializable with 
CatalystConf with ParserCon
     require(entry != null, "entry cannot be null")
     require(value != null, s"value cannot be null for key: ${entry.key}")
     require(sqlConfEntries.get(entry.key) == entry, s"$entry is not 
registered")
-    settings.put(entry.key, entry.stringConverter(value))
+    setConfWithCheck(entry.key, entry.stringConverter(value))
   }
 
   /** Return the value of Spark SQL configuration property for the given key. 
*/
@@ -699,6 +700,13 @@ private[sql] class SQLConf extends Serializable with 
CatalystConf with ParserCon
     }.toSeq
   }
 
+  private def setConfWithCheck(key: String, value: String): Unit = {
+    if (key.startsWith("spark.") && !key.startsWith("spark.sql.")) {
+      logWarning(s"Attempt to set non-Spark SQL config in SQLConf: key = $key, 
value = $value")
+    }
+    settings.put(key, value)
+  }
+
   private[spark] def unsetConf(key: String): Unit = {
     settings.remove(key)
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to