Repository: spark
Updated Branches:
  refs/heads/branch-1.5 9b6216171 -> d78f1bc45


[SPARK-12049][CORE] User JVM shutdown hook can cause deadlock at shutdown

Avoid potential deadlock with a user app's shutdown hook thread by more 
narrowly synchronizing access to 'hooks'

Author: Sean Owen <so...@cloudera.com>

Closes #10042 from srowen/SPARK-12049.

(cherry picked from commit 96bf468c7860be317c20ccacf259910968d2dc83)
Signed-off-by: Marcelo Vanzin <van...@cloudera.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/d78f1bc4
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/d78f1bc4
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/d78f1bc4

Branch: refs/heads/branch-1.5
Commit: d78f1bc45ff04637761bcfcc565ce40fb587a5a9
Parents: 9b62161
Author: Sean Owen <so...@cloudera.com>
Authored: Mon Nov 30 17:33:09 2015 -0800
Committer: Marcelo Vanzin <van...@cloudera.com>
Committed: Mon Nov 30 17:33:32 2015 -0800

----------------------------------------------------------------------
 .../apache/spark/util/ShutdownHookManager.scala | 33 ++++++++++----------
 1 file changed, 16 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/d78f1bc4/core/src/main/scala/org/apache/spark/util/ShutdownHookManager.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/util/ShutdownHookManager.scala 
b/core/src/main/scala/org/apache/spark/util/ShutdownHookManager.scala
index 091dc03..fddeceb 100644
--- a/core/src/main/scala/org/apache/spark/util/ShutdownHookManager.scala
+++ b/core/src/main/scala/org/apache/spark/util/ShutdownHookManager.scala
@@ -206,7 +206,7 @@ private[spark] object ShutdownHookManager extends Logging {
 private [util] class SparkShutdownHookManager {
 
   private val hooks = new PriorityQueue[SparkShutdownHook]()
-  private var shuttingDown = false
+  @volatile private var shuttingDown = false
 
   /**
    * Install a hook to run at shutdown and run all registered hooks in order. 
Hadoop 1.x does not
@@ -230,28 +230,27 @@ private [util] class SparkShutdownHookManager {
     }
   }
 
-  def runAll(): Unit = synchronized {
+  def runAll(): Unit = {
     shuttingDown = true
-    while (!hooks.isEmpty()) {
-      Try(Utils.logUncaughtExceptions(hooks.poll().run()))
+    var nextHook: SparkShutdownHook = null
+    while ({ nextHook = hooks.synchronized { hooks.poll() }; nextHook != null 
}) {
+      Try(Utils.logUncaughtExceptions(nextHook.run()))
     }
   }
 
-  def add(priority: Int, hook: () => Unit): AnyRef = synchronized {
-    checkState()
-    val hookRef = new SparkShutdownHook(priority, hook)
-    hooks.add(hookRef)
-    hookRef
-  }
-
-  def remove(ref: AnyRef): Boolean = synchronized {
-    hooks.remove(ref)
+  def add(priority: Int, hook: () => Unit): AnyRef = {
+    hooks.synchronized {
+      if (shuttingDown) {
+        throw new IllegalStateException("Shutdown hooks cannot be modified 
during shutdown.")
+      }
+      val hookRef = new SparkShutdownHook(priority, hook)
+      hooks.add(hookRef)
+      hookRef
+    }
   }
 
-  private def checkState(): Unit = {
-    if (shuttingDown) {
-      throw new IllegalStateException("Shutdown hooks cannot be modified 
during shutdown.")
-    }
+  def remove(ref: AnyRef): Boolean = {
+    hooks.synchronized { hooks.remove(ref) }
   }
 
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to