Github user holdenk commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22337#discussion_r216114084
  
    --- Diff: 
external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/KafkaRDDSuite.scala
 ---
    @@ -44,20 +44,25 @@ class KafkaRDDSuite extends SparkFunSuite with 
BeforeAndAfterAll {
       private var sc: SparkContext = _
     
       override def beforeAll {
    +    super.beforeAll()
         sc = new SparkContext(sparkConf)
         kafkaTestUtils = new KafkaTestUtils
         kafkaTestUtils.setup()
       }
     
       override def afterAll {
    -    if (sc != null) {
    -      sc.stop
    -      sc = null
    -    }
    -
    -    if (kafkaTestUtils != null) {
    -      kafkaTestUtils.teardown()
    -      kafkaTestUtils = null
    +    try {
    +      if (sc != null) {
    +        sc.stop
    +        sc = null
    +      }
    +
    +      if (kafkaTestUtils != null) {
    +        kafkaTestUtils.teardown()
    +        kafkaTestUtils = null
    +      }
    +    } finally {
    +      super.afterAll()
    --- End diff --
    
    I'm confused by your comment, it looks like we try and stop the spark 
context first. Are you suggesting that we want to tear down kafka test utils 
even if the Spark `stop` context fails?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to