Github user jiangxb1987 commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20022#discussion_r158584088
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala
 ---
    @@ -518,9 +519,46 @@ class DataFrameWindowFunctionsSuite extends QueryTest 
with SharedSQLContext {
           Seq(Row(3, "1", null, 3.0, 4.0, 3.0), Row(5, "1", false, 4.0, 5.0, 
5.0)))
       }
     
    +  test("Window spill with less than the inMemoryThreshold") {
    +    val df = Seq((1, "1"), (2, "2"), (1, "3"), (2, "4")).toDF("key", 
"value")
    +    val window = Window.partitionBy($"key").orderBy($"value")
    +
    +    withSQLConf(SQLConf.WINDOW_EXEC_BUFFER_IN_MEMORY_THRESHOLD.key -> "2",
    +      SQLConf.WINDOW_EXEC_BUFFER_SPILL_THRESHOLD.key -> "2") {
    +      assertNotSpilled(sparkContext, "select") {
    +        df.select($"key", sum("value").over(window)).collect()
    +      }
    +    }
    +  }
    +
    +  test("Window spill with more than the inMemoryThreshold but less than 
the spillThreshold") {
    +    val df = Seq((1, "1"), (2, "2"), (1, "3"), (2, "4")).toDF("key", 
"value")
    +    val window = Window.partitionBy($"key").orderBy($"value")
    +
    +    withSQLConf(SQLConf.WINDOW_EXEC_BUFFER_IN_MEMORY_THRESHOLD.key -> "1",
    +      SQLConf.WINDOW_EXEC_BUFFER_SPILL_THRESHOLD.key -> "2") {
    +      assertNotSpilled(sparkContext, "select") {
    +        df.select($"key", sum("value").over(window)).collect()
    +      }
    +    }
    +  }
    +
    +  test("Window spill with more than the inMemoryThreshold and 
spillThreshold") {
    +    val df = Seq((1, "1"), (2, "2"), (1, "3"), (2, "4")).toDF("key", 
"value")
    +    val window = Window.partitionBy($"key").orderBy($"value")
    +
    +    withSQLConf(SQLConf.WINDOW_EXEC_BUFFER_IN_MEMORY_THRESHOLD.key -> "1",
    +      SQLConf.WINDOW_EXEC_BUFFER_SPILL_THRESHOLD.key -> "1") {
    +      assertSpilled(sparkContext, "select") {
    +        df.select($"key", sum("value").over(window)).collect()
    +      }
    +    }
    +  }
    +
       test("SPARK-21258: complex object in combination with spilling") {
         // Make sure we trigger the spilling path.
    -    withSQLConf(SQLConf.WINDOW_EXEC_BUFFER_SPILL_THRESHOLD.key -> "17") {
    +    withSQLConf(SQLConf.WINDOW_EXEC_BUFFER_IN_MEMORY_THRESHOLD.key -> "0",
    --- End diff --
    
    Yeah, i mean, how about set it to 1 instead of 0?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to