Repository: spark
Updated Branches:
  refs/heads/branch-2.0 d494a483a -> 879e8fd09


[SPARK-15878][CORE][TEST] fix cleanup in EventLoggingListenerSuite and 
ReplayListenerSuite

## What changes were proposed in this pull request?

These tests weren't properly using `LocalSparkContext` so weren't cleaning up 
correctly when tests failed.

## How was this patch tested?

Jenkins.

Author: Imran Rashid <iras...@cloudera.com>

Closes #13602 from squito/SPARK-15878_cleanup_replaylistener.

(cherry picked from commit 8cc22b0085475a188f229536b4f83988ae889a8e)
Signed-off-by: Sean Owen <so...@cloudera.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/879e8fd0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/879e8fd0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/879e8fd0

Branch: refs/heads/branch-2.0
Commit: 879e8fd09477fc78d66c9da9e0e117a513b0b046
Parents: d494a48
Author: Imran Rashid <iras...@cloudera.com>
Authored: Sun Jun 12 12:54:57 2016 +0100
Committer: Sean Owen <so...@cloudera.com>
Committed: Sun Jun 12 12:55:17 2016 +0100

----------------------------------------------------------------------
 .../org/apache/spark/scheduler/EventLoggingListenerSuite.scala | 2 +-
 .../scala/org/apache/spark/scheduler/ReplayListenerSuite.scala | 6 +++---
 2 files changed, 4 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/879e8fd0/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
 
b/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
index 176d893..c4c80b5 100644
--- 
a/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
@@ -181,7 +181,7 @@ class EventLoggingListenerSuite extends SparkFunSuite with 
LocalSparkContext wit
     // into SPARK-6688.
     val conf = getLoggingConf(testDirPath, compressionCodec)
       .set("spark.hadoop.fs.defaultFS", "unsupported://example.com")
-    val sc = new SparkContext("local-cluster[2,2,1024]", "test", conf)
+    sc = new SparkContext("local-cluster[2,2,1024]", "test", conf)
     assert(sc.eventLogger.isDefined)
     val eventLogger = sc.eventLogger.get
     val eventLogPath = eventLogger.logPath

http://git-wip-us.apache.org/repos/asf/spark/blob/879e8fd0/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala 
b/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala
index 35215c1..1732aca 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala
@@ -23,7 +23,7 @@ import java.net.URI
 import org.json4s.jackson.JsonMethods._
 import org.scalatest.BeforeAndAfter
 
-import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
+import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, 
SparkFunSuite}
 import org.apache.spark.deploy.SparkHadoopUtil
 import org.apache.spark.io.CompressionCodec
 import org.apache.spark.util.{JsonProtocol, JsonProtocolSuite, Utils}
@@ -31,7 +31,7 @@ import org.apache.spark.util.{JsonProtocol, 
JsonProtocolSuite, Utils}
 /**
  * Test whether ReplayListenerBus replays events from logs correctly.
  */
-class ReplayListenerSuite extends SparkFunSuite with BeforeAndAfter {
+class ReplayListenerSuite extends SparkFunSuite with BeforeAndAfter with 
LocalSparkContext {
   private val fileSystem = Utils.getHadoopFileSystem("/",
     SparkHadoopUtil.get.newConfiguration(new SparkConf()))
   private var testDir: File = _
@@ -101,7 +101,7 @@ class ReplayListenerSuite extends SparkFunSuite with 
BeforeAndAfter {
     fileSystem.mkdirs(logDirPath)
 
     val conf = EventLoggingListenerSuite.getLoggingConf(logDirPath, codecName)
-    val sc = new SparkContext("local-cluster[2,1,1024]", "Test replay", conf)
+    sc = new SparkContext("local-cluster[2,1,1024]", "Test replay", conf)
 
     // Run a few jobs
     sc.parallelize(1 to 100, 1).count()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to