spark git commit: [SPARK-16131] initialize internal logger lazily in Scala preferred way

2016-06-22 Thread zsxwing
Repository: spark
Updated Branches:
  refs/heads/branch-2.0 1d3c56e77 -> e2eb8e002


[SPARK-16131] initialize internal logger lazily in Scala preferred way

## What changes were proposed in this pull request?

Initialize logger instance lazily in Scala preferred way

## How was this patch tested?

By running `./build/mvn clean test` locally

Author: Prajwal Tuladhar 

Closes #13842 from infynyxx/spark_internal_logger.

(cherry picked from commit 044971eca0ff3c2ce62afa665dbd3072d52cbbec)
Signed-off-by: Shixiong Zhu 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e2eb8e00
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e2eb8e00
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e2eb8e00

Branch: refs/heads/branch-2.0
Commit: e2eb8e002acb19fd266d2237baec31f74aa02ef8
Parents: 1d3c56e
Author: Prajwal Tuladhar 
Authored: Wed Jun 22 16:30:10 2016 -0700
Committer: Shixiong Zhu 
Committed: Wed Jun 22 16:30:18 2016 -0700

--
 .../scala/org/apache/spark/internal/Logging.scala | 14 --
 .../cluster/CoarseGrainedSchedulerBackend.scala   |  2 --
 2 files changed, 4 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/e2eb8e00/core/src/main/scala/org/apache/spark/internal/Logging.scala
--
diff --git a/core/src/main/scala/org/apache/spark/internal/Logging.scala 
b/core/src/main/scala/org/apache/spark/internal/Logging.scala
index 66a0cfe..c51050c 100644
--- a/core/src/main/scala/org/apache/spark/internal/Logging.scala
+++ b/core/src/main/scala/org/apache/spark/internal/Logging.scala
@@ -32,7 +32,10 @@ private[spark] trait Logging {
 
   // Make the log field transient so that objects with Logging can
   // be serialized and used on another machine
-  @transient private var log_ : Logger = null
+  @transient lazy val log: Logger = {
+initializeLogIfNecessary(false)
+LoggerFactory.getLogger(logName)
+  }
 
   // Method to get the logger name for this object
   protected def logName = {
@@ -40,15 +43,6 @@ private[spark] trait Logging {
 this.getClass.getName.stripSuffix("$")
   }
 
-  // Method to get or create the logger for this object
-  protected def log: Logger = {
-if (log_ == null) {
-  initializeLogIfNecessary(false)
-  log_ = LoggerFactory.getLogger(logName)
-}
-log_
-  }
-
   // Log methods that take only a String
   protected def logInfo(msg: => String) {
 if (log.isInfoEnabled) log.info(msg)

http://git-wip-us.apache.org/repos/asf/spark/blob/e2eb8e00/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
--
diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
index 967c4d5..8259923 100644
--- 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
+++ 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
@@ -100,8 +100,6 @@ class CoarseGrainedSchedulerBackend(scheduler: 
TaskSchedulerImpl, val rpcEnv: Rp
 // instance across threads
 private val ser = SparkEnv.get.closureSerializer.newInstance()
 
-override protected def log = CoarseGrainedSchedulerBackend.this.log
-
 protected val addressToExecutorId = new HashMap[RpcAddress, String]
 
 private val reviveThread =


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-16131] initialize internal logger lazily in Scala preferred way

2016-06-22 Thread zsxwing
Repository: spark
Updated Branches:
  refs/heads/master 857ecff1d -> 044971eca


[SPARK-16131] initialize internal logger lazily in Scala preferred way

## What changes were proposed in this pull request?

Initialize logger instance lazily in Scala preferred way

## How was this patch tested?

By running `./build/mvn clean test` locally

Author: Prajwal Tuladhar 

Closes #13842 from infynyxx/spark_internal_logger.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/044971ec
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/044971ec
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/044971ec

Branch: refs/heads/master
Commit: 044971eca0ff3c2ce62afa665dbd3072d52cbbec
Parents: 857ecff
Author: Prajwal Tuladhar 
Authored: Wed Jun 22 16:30:10 2016 -0700
Committer: Shixiong Zhu 
Committed: Wed Jun 22 16:30:10 2016 -0700

--
 .../scala/org/apache/spark/internal/Logging.scala | 14 --
 .../cluster/CoarseGrainedSchedulerBackend.scala   |  2 --
 2 files changed, 4 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/044971ec/core/src/main/scala/org/apache/spark/internal/Logging.scala
--
diff --git a/core/src/main/scala/org/apache/spark/internal/Logging.scala 
b/core/src/main/scala/org/apache/spark/internal/Logging.scala
index 66a0cfe..c51050c 100644
--- a/core/src/main/scala/org/apache/spark/internal/Logging.scala
+++ b/core/src/main/scala/org/apache/spark/internal/Logging.scala
@@ -32,7 +32,10 @@ private[spark] trait Logging {
 
   // Make the log field transient so that objects with Logging can
   // be serialized and used on another machine
-  @transient private var log_ : Logger = null
+  @transient lazy val log: Logger = {
+initializeLogIfNecessary(false)
+LoggerFactory.getLogger(logName)
+  }
 
   // Method to get the logger name for this object
   protected def logName = {
@@ -40,15 +43,6 @@ private[spark] trait Logging {
 this.getClass.getName.stripSuffix("$")
   }
 
-  // Method to get or create the logger for this object
-  protected def log: Logger = {
-if (log_ == null) {
-  initializeLogIfNecessary(false)
-  log_ = LoggerFactory.getLogger(logName)
-}
-log_
-  }
-
   // Log methods that take only a String
   protected def logInfo(msg: => String) {
 if (log.isInfoEnabled) log.info(msg)

http://git-wip-us.apache.org/repos/asf/spark/blob/044971ec/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
--
diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
index 967c4d5..8259923 100644
--- 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
+++ 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend.scala
@@ -100,8 +100,6 @@ class CoarseGrainedSchedulerBackend(scheduler: 
TaskSchedulerImpl, val rpcEnv: Rp
 // instance across threads
 private val ser = SparkEnv.get.closureSerializer.newInstance()
 
-override protected def log = CoarseGrainedSchedulerBackend.this.log
-
 protected val addressToExecutorId = new HashMap[RpcAddress, String]
 
 private val reviveThread =


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org