Repository: spark
Updated Branches:
  refs/heads/master 857ecff1d -> 044971eca


[SPARK-16131] initialize internal logger lazily in Scala preferred way

## What changes were proposed in this pull request?

Initialize logger instance lazily in Scala preferred way

## How was this patch tested?

By running `./build/mvn clean test` locally

Author: Prajwal Tuladhar <p...@infynyxx.com>

Closes #13842 from infynyxx/spark_internal_logger.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/044971ec
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/044971ec
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/044971ec

Branch: refs/heads/master
Commit: 044971eca0ff3c2ce62afa665dbd3072d52cbbec
Parents: 857ecff
Author: Prajwal Tuladhar <p...@infynyxx.com>
Authored: Wed Jun 22 16:30:10 2016 -0700
Committer: Shixiong Zhu <shixi...@databricks.com>
Committed: Wed Jun 22 16:30:10 2016 -0700

----------------------------------------------------------------------
 .../scala/org/apache/spark/internal/Logging.scala     | 14 ++++----------
 .../cluster/CoarseGrainedSchedulerBackend.scala       |  2 --
 2 files changed, 4 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/044971ec/core/src/main/scala/org/apache/spark/internal/Logging.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/internal/Logging.scala 
b/core/src/main/scala/org/apache/spark/internal/Logging.scala
index 66a0cfe..c51050c 100644
--- a/core/src/main/scala/org/apache/spark/internal/Logging.scala
+++ b/core/src/main/scala/org/apache/spark/internal/Logging.scala
@@ -32,7 +32,10 @@ private[spark] trait Logging {
 
   // Make the log field transient so that objects with Logging can
   // be serialized and used on another machine
-  @transient private var log_ : Logger = null
+  @transient lazy val log: Logger = {
+    initializeLogIfNecessary(false)
+    LoggerFactory.getLogger(logName)
+  }
 
   // Method to get the logger name for this object
   protected def logName = {
@@ -40,15 +43,6 @@ private[spark] trait Logging {
     this.getClass.getName.stripSuffix("$")
   }
 
-  // Method to get or create the logger for this object
-  protected def log: Logger = {
-    if (log_ == null) {
-      initializeLogIfNecessary(false)
-      log_ = LoggerFactory.getLogger(logName)
-    }
-    log_
-  }
-
   // Log methods that take only a String
   protected def logInfo(msg: => String) {
     if (log.isInfoEnabled) log.info(msg)

http://git-wip-us.apache.org/repos/asf/spark/blob/044971ec/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
index 967c4d5..8259923 100644
--- 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
+++ 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
@@ -100,8 +100,6 @@ class CoarseGrainedSchedulerBackend(scheduler: 
TaskSchedulerImpl, val rpcEnv: Rp
     // instance across threads
     private val ser = SparkEnv.get.closureSerializer.newInstance()
 
-    override protected def log = CoarseGrainedSchedulerBackend.this.log
-
     protected val addressToExecutorId = new HashMap[RpcAddress, String]
 
     private val reviveThread =


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to