Repository: spark
Updated Branches:
  refs/heads/master 767d288b6 -> f0d3b58d9


[SPARK-11290][STREAMING][TEST-MAVEN] Fix the test for maven build

Should not create SparkContext in the constructor of `TrackStateRDDSuite`. This 
is a follow up PR for #9256 to fix the test for maven build.

Author: Shixiong Zhu <shixi...@databricks.com>

Closes #9668 from zsxwing/hotfix.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f0d3b58d
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f0d3b58d
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f0d3b58d

Branch: refs/heads/master
Commit: f0d3b58d91f43697397cdd7a7e7f38cbb7daaa31
Parents: 767d288
Author: Shixiong Zhu <shixi...@databricks.com>
Authored: Thu Nov 12 14:52:03 2015 -0800
Committer: Tathagata Das <tathagata.das1...@gmail.com>
Committed: Thu Nov 12 14:52:03 2015 -0800

----------------------------------------------------------------------
 .../apache/spark/streaming/rdd/TrackStateRDDSuite.scala | 12 +++++++++---
 1 file changed, 9 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f0d3b58d/streaming/src/test/scala/org/apache/spark/streaming/rdd/TrackStateRDDSuite.scala
----------------------------------------------------------------------
diff --git 
a/streaming/src/test/scala/org/apache/spark/streaming/rdd/TrackStateRDDSuite.scala
 
b/streaming/src/test/scala/org/apache/spark/streaming/rdd/TrackStateRDDSuite.scala
index fc5f266..f396b76 100644
--- 
a/streaming/src/test/scala/org/apache/spark/streaming/rdd/TrackStateRDDSuite.scala
+++ 
b/streaming/src/test/scala/org/apache/spark/streaming/rdd/TrackStateRDDSuite.scala
@@ -28,11 +28,17 @@ import org.apache.spark.{HashPartitioner, SparkConf, 
SparkContext, SparkFunSuite
 
 class TrackStateRDDSuite extends SparkFunSuite with BeforeAndAfterAll {
 
-  private var sc = new SparkContext(
-    new SparkConf().setMaster("local").setAppName("TrackStateRDDSuite"))
+  private var sc: SparkContext = null
+
+  override def beforeAll(): Unit = {
+    sc = new SparkContext(
+      new SparkConf().setMaster("local").setAppName("TrackStateRDDSuite"))
+  }
 
   override def afterAll(): Unit = {
-    sc.stop()
+    if (sc != null) {
+      sc.stop()
+    }
   }
 
   test("creation from pair RDD") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to