Github user zsxwing commented on a diff in the pull request:

    https://github.com/apache/spark/pull/12049#discussion_r58116999
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala ---
    @@ -81,4 +85,62 @@ class StreamSuite extends StreamTest with 
SharedSQLContext {
           AddData(inputData, 1, 2, 3, 4),
           CheckAnswer(2, 4))
       }
    +
    +  test("DataFrame reuse") {
    +    def assertDF(df: DataFrame) {
    +      withTempDir { outputDir =>
    +        withTempDir { checkpointDir =>
    +          val query = df.write.format("parquet")
    +            .option("checkpointLocation", checkpointDir.getAbsolutePath)
    +            .startStream(outputDir.getAbsolutePath)
    +          try {
    +            eventually(timeout(streamingTimeout)) {
    +              val outputDf = 
sqlContext.read.parquet(outputDir.getAbsolutePath).as[Long]
    +              checkDataset[Long](outputDf, (0L to 10L).toArray: _*)
    +            }
    +          } finally {
    +            query.stop()
    +          }
    +        }
    +      }
    +    }
    +
    +    val df = 
sqlContext.read.format(classOf[FakeDefaultSource].getName).stream()
    +    assertDF(df)
    +    assertDF(df)
    +    assertDF(df)
    --- End diff --
    
    > I was saying that when I ran it locally it passed (without the other 
fixes in this PR) when there were only two.
    
    Seems weird. I just tested this test with the master branch (without any 
fixed in this PR) and it did fail when there were only two.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to