Repository: spark
Updated Branches:
  refs/heads/master 90787de86 -> cdf9e9753


[SPARK-14505][CORE] Fix bug : creating two SparkContext objects in the same 
jvm, the first one will can not run any task!

After creating two SparkContext objects in the same jvm(the second one can not 
be created successfully!),
use the first one to run job will throw exception like below:

![image](https://cloud.githubusercontent.com/assets/7162889/14402832/0c8da2a6-fe73-11e5-8aba-68ee3ddaf605.png)

Author: Allen <yufan_1...@163.com>

Closes #12273 from the-sea/context-create-bug.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/cdf9e975
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/cdf9e975
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/cdf9e975

Branch: refs/heads/master
Commit: cdf9e9753df4e7f2fa4e972d1bfded4e22943c27
Parents: 90787de
Author: Allen <yufan_1...@163.com>
Authored: Sun May 1 15:39:14 2016 +0100
Committer: Sean Owen <so...@cloudera.com>
Committed: Sun May 1 15:39:14 2016 +0100

----------------------------------------------------------------------
 .../scala/org/apache/spark/SparkContext.scala   | 27 +++++++++-----------
 .../org/apache/spark/SparkContextSuite.scala    |  4 +++
 2 files changed, 16 insertions(+), 15 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/cdf9e975/core/src/main/scala/org/apache/spark/SparkContext.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index ed4408c..2cb3ed0 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -2216,21 +2216,7 @@ object SparkContext extends Logging {
       sc: SparkContext,
       allowMultipleContexts: Boolean): Unit = {
     SPARK_CONTEXT_CONSTRUCTOR_LOCK.synchronized {
-      contextBeingConstructed.foreach { otherContext =>
-        if (otherContext ne sc) {  // checks for reference equality
-          // Since otherContext might point to a partially-constructed 
context, guard against
-          // its creationSite field being null:
-          val otherContextCreationSite =
-            
Option(otherContext.creationSite).map(_.longForm).getOrElse("unknown location")
-          val warnMsg = "Another SparkContext is being constructed (or threw 
an exception in its" +
-            " constructor).  This may indicate an error, since only one 
SparkContext may be" +
-            " running in this JVM (see SPARK-2243)." +
-            s" The other SparkContext was created 
at:\n$otherContextCreationSite"
-          logWarning(warnMsg)
-        }
-
-        if (activeContext.get() != null) {
-          val ctx = activeContext.get()
+      Option(activeContext.get()).filter(_ ne sc).foreach { ctx =>
           val errMsg = "Only one SparkContext may be running in this JVM (see 
SPARK-2243)." +
             " To ignore this error, set spark.driver.allowMultipleContexts = 
true. " +
             s"The currently running SparkContext was created 
at:\n${ctx.creationSite.longForm}"
@@ -2241,6 +2227,17 @@ object SparkContext extends Logging {
             throw exception
           }
         }
+
+      contextBeingConstructed.filter(_ ne sc).foreach { otherContext =>
+        // Since otherContext might point to a partially-constructed context, 
guard against
+        // its creationSite field being null:
+        val otherContextCreationSite =
+          Option(otherContext.creationSite).map(_.longForm).getOrElse("unknown 
location")
+        val warnMsg = "Another SparkContext is being constructed (or threw an 
exception in its" +
+          " constructor).  This may indicate an error, since only one 
SparkContext may be" +
+          " running in this JVM (see SPARK-2243)." +
+          s" The other SparkContext was created at:\n$otherContextCreationSite"
+        logWarning(warnMsg)
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/spark/blob/cdf9e975/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala 
b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index 841fd02..a759f36 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -39,8 +39,12 @@ class SparkContextSuite extends SparkFunSuite with 
LocalSparkContext {
     val conf = new SparkConf().setAppName("test").setMaster("local")
       .set("spark.driver.allowMultipleContexts", "false")
     sc = new SparkContext(conf)
+    val envBefore = SparkEnv.get
     // A SparkContext is already running, so we shouldn't be able to create a 
second one
     intercept[SparkException] { new SparkContext(conf) }
+    val envAfter = SparkEnv.get
+    // SparkEnv and other context variables should be the same
+    assert(envBefore == envAfter)
     // After stopping the running context, we should be able to create a new 
one
     resetSparkContext()
     sc = new SparkContext(conf)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to