Repository: spark
Updated Branches:
  refs/heads/master 61b80d552 -> 28ad0f7b0


[SPARK-15681][CORE] allow lowercase or mixed case log level string when calling 
sc.setLogLevel

## What changes were proposed in this pull request?
Currently `SparkContext API setLogLevel(level: String) `can not handle lower 
case or mixed case input string. But `org.apache.log4j.Level.toLevel` can take 
lowercase or mixed case.

This PR is to allow case-insensitive user input for the log level.

## How was this patch tested?
A unit testcase is added.

Author: Xin Wu <xi...@us.ibm.com>

Closes #13422 from xwu0226/reset_loglevel.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/28ad0f7b
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/28ad0f7b
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/28ad0f7b

Branch: refs/heads/master
Commit: 28ad0f7b0dc7bf24fac251c4f131aca74ba1c1d2
Parents: 61b80d5
Author: Xin Wu <xi...@us.ibm.com>
Authored: Fri Jun 3 14:26:48 2016 -0700
Committer: Marcelo Vanzin <van...@cloudera.com>
Committed: Fri Jun 3 14:26:48 2016 -0700

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/SparkContext.scala  | 16 +++++++++-------
 .../scala/org/apache/spark/SparkContextSuite.scala  | 15 +++++++++++++++
 2 files changed, 24 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/28ad0f7b/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 5aba2a8..33b11ed 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -20,7 +20,7 @@ package org.apache.spark
 import java.io._
 import java.lang.reflect.Constructor
 import java.net.URI
-import java.util.{Arrays, Properties, ServiceLoader, UUID}
+import java.util.{Arrays, Locale, Properties, ServiceLoader, UUID}
 import java.util.concurrent.ConcurrentMap
 import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger, 
AtomicReference}
 
@@ -356,12 +356,12 @@ class SparkContext(config: SparkConf) extends Logging 
with ExecutorAllocationCli
    * Valid log levels include: ALL, DEBUG, ERROR, FATAL, INFO, OFF, TRACE, WARN
    */
   def setLogLevel(logLevel: String) {
-    val validLevels = Seq("ALL", "DEBUG", "ERROR", "FATAL", "INFO", "OFF", 
"TRACE", "WARN")
-    if (!validLevels.contains(logLevel)) {
-      throw new IllegalArgumentException(
-        s"Supplied level $logLevel did not match one of: 
${validLevels.mkString(",")}")
-    }
-    Utils.setLogLevel(org.apache.log4j.Level.toLevel(logLevel))
+    // let's allow lowcase or mixed case too
+    val upperCased = logLevel.toUpperCase(Locale.ENGLISH)
+    require(SparkContext.VALID_LOG_LEVELS.contains(upperCased),
+      s"Supplied level $logLevel did not match one of:" +
+        s" ${SparkContext.VALID_LOG_LEVELS.mkString(",")}")
+    Utils.setLogLevel(org.apache.log4j.Level.toLevel(upperCased))
   }
 
   try {
@@ -2179,6 +2179,8 @@ class SparkContext(config: SparkConf) extends Logging 
with ExecutorAllocationCli
  * various Spark features.
  */
 object SparkContext extends Logging {
+  private val VALID_LOG_LEVELS =
+    Set("ALL", "DEBUG", "ERROR", "FATAL", "INFO", "OFF", "TRACE", "WARN")
 
   /**
    * Lock that guards access to global variables that track SparkContext 
construction.

http://git-wip-us.apache.org/repos/asf/spark/blob/28ad0f7b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala 
b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index ae66513..4fa3cab 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -363,4 +363,19 @@ class SparkContextSuite extends SparkFunSuite with 
LocalSparkContext {
     sc.stop()
     assert(result == null)
   }
+
+  test("log level case-insensitive and reset log level") {
+    sc = new SparkContext(new 
SparkConf().setAppName("test").setMaster("local"))
+    val originalLevel = org.apache.log4j.Logger.getRootLogger().getLevel
+    try {
+      sc.setLogLevel("debug")
+      assert(org.apache.log4j.Logger.getRootLogger().getLevel === 
org.apache.log4j.Level.DEBUG)
+      sc.setLogLevel("INfo")
+      assert(org.apache.log4j.Logger.getRootLogger().getLevel === 
org.apache.log4j.Level.INFO)
+    } finally {
+      sc.setLogLevel(originalLevel.toString)
+      assert(org.apache.log4j.Logger.getRootLogger().getLevel === 
originalLevel)
+      sc.stop()
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to