Github user tdas commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21385#discussion_r190069654
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/shuffle/ContinuousShuffleReadSuite.scala
 ---
    @@ -181,4 +211,80 @@ class ContinuousShuffleReadSuite extends StreamTest {
           readRowThread.join()
         }
       }
    +
    +  test("epoch only ends when all writers send markers") {
    +    val rdd = new ContinuousShuffleReadRDD(sparkContext, numPartitions = 
1, numShuffleWriters = 3)
    +    val endpoint = 
rdd.partitions(0).asInstanceOf[ContinuousShuffleReadPartition].endpoint
    +    send(
    +      endpoint,
    +      ReceiverRow(0, unsafeRow("writer0-row0")),
    +      ReceiverRow(1, unsafeRow("writer1-row0")),
    +      ReceiverRow(2, unsafeRow("writer2-row0")),
    +      ReceiverEpochMarker(0),
    +      ReceiverEpochMarker(2)
    +    )
    +
    +    val epoch = rdd.compute(rdd.partitions(0), ctx)
    +    val rows = (0 until 3).map(_ => epoch.next()).toSet
    +    assert(rows.map(_.getUTF8String(0).toString) ==
    +      Set("writer0-row0", "writer1-row0", "writer2-row0"))
    +
    +    // After checking the right rows, block until we get an epoch marker 
indicating there's no next.
    +    // (Also fail the assertion if for some reason we get a row.)
    +    val readEpochMarkerThread = new Thread {
    +      override def run(): Unit = {
    +        assert(!epoch.hasNext)
    +      }
    +    }
    +
    +    readEpochMarkerThread.start()
    +
    --- End diff --
    
    nit: remove empty line.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to