Repository: spark
Updated Branches:
  refs/heads/master 837055059 -> 6179a9483


SPARK-4783 [CORE] System.exit() calls in SparkContext disrupt applications 
embedding Spark

Avoid `System.exit(1)` in `TaskSchedulerImpl` and convert to `SparkException`; 
ensure scheduler calls `sc.stop()` even when this exception is thrown.

CC mateiz aarondav as those who may have last touched this code.

Author: Sean Owen <so...@cloudera.com>

Closes #5492 from srowen/SPARK-4783 and squashes the following commits:

60dc682 [Sean Owen] Avoid System.exit(1) in TaskSchedulerImpl and convert to 
SparkException; ensure scheduler calls sc.stop() even when this exception is 
thrown


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6179a948
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6179a948
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6179a948

Branch: refs/heads/master
Commit: 6179a948371897cecb7322ebda366c2de8ecaedd
Parents: 8370550
Author: Sean Owen <so...@cloudera.com>
Authored: Thu Apr 16 10:45:32 2015 +0100
Committer: Sean Owen <so...@cloudera.com>
Committed: Thu Apr 16 10:45:32 2015 +0100

----------------------------------------------------------------------
 .../org/apache/spark/scheduler/TaskSchedulerImpl.scala      | 5 ++---
 .../scheduler/cluster/SparkDeploySchedulerBackend.scala     | 9 ++++++---
 2 files changed, 8 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/6179a948/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala 
b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
index 2362cc7..ecc8bf1 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
@@ -394,7 +394,7 @@ private[spark] class TaskSchedulerImpl(
 
   def error(message: String) {
     synchronized {
-      if (activeTaskSets.size > 0) {
+      if (activeTaskSets.nonEmpty) {
         // Have each task set throw a SparkException with the error
         for ((taskSetId, manager) <- activeTaskSets) {
           try {
@@ -407,8 +407,7 @@ private[spark] class TaskSchedulerImpl(
         // No task sets are active but we still got an error. Just exit since 
this
         // must mean the error is during registration.
         // It might be good to do something smarter here in the future.
-        logError("Exiting due to error from cluster scheduler: " + message)
-        System.exit(1)
+        throw new SparkException(s"Exiting due to error from cluster 
scheduler: $message")
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/6179a948/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
index ed5b7c1..ccf1dc5 100644
--- 
a/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
+++ 
b/core/src/main/scala/org/apache/spark/scheduler/cluster/SparkDeploySchedulerBackend.scala
@@ -118,9 +118,12 @@ private[spark] class SparkDeploySchedulerBackend(
     notifyContext()
     if (!stopping) {
       logError("Application has been killed. Reason: " + reason)
-      scheduler.error(reason)
-      // Ensure the application terminates, as we can no longer run jobs.
-      sc.stop()
+      try {
+        scheduler.error(reason)
+      } finally {
+        // Ensure the application terminates, as we can no longer run jobs.
+        sc.stop()
+      }
     }
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to