Github user yucai commented on a diff in the pull request:

    https://github.com/apache/spark/pull/20851#discussion_r175684322
  
    --- Diff: 
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
 ---
    @@ -313,6 +316,36 @@ class ParquetFilterSuite extends QueryTest with 
ParquetTest with SharedSQLContex
         }
       }
     
    +  test("filter pushdown - date") {
    +    implicit class IntToDate(int: Int) {
    --- End diff --
    
    How about this way? It is from ORC's date push down.
    ```
      test("filter pushdown - date") {
        val dates = Seq("2017-08-18", "2017-08-19", "2017-08-20", 
"2017-08-21").map { day =>
          Date.valueOf(day)
        }
        withOrcDataFrame(dates.map(Tuple1(_))) { implicit df =>
          checkFilterPredicate('_1.isNull, PredicateLeaf.Operator.IS_NULL)
    
          checkFilterPredicate('_1 === dates(0), PredicateLeaf.Operator.EQUALS)
          checkFilterPredicate('_1 <=> dates(0), 
PredicateLeaf.Operator.NULL_SAFE_EQUALS)
    
          checkFilterPredicate('_1 < dates(1), PredicateLeaf.Operator.LESS_THAN)
          checkFilterPredicate('_1 > dates(2), 
PredicateLeaf.Operator.LESS_THAN_EQUALS)
          checkFilterPredicate('_1 <= dates(0), 
PredicateLeaf.Operator.LESS_THAN_EQUALS)
          checkFilterPredicate('_1 >= dates(3), 
PredicateLeaf.Operator.LESS_THAN)
    
          checkFilterPredicate(Literal(dates(0)) === '_1, 
PredicateLeaf.Operator.EQUALS)
          checkFilterPredicate(Literal(dates(0)) <=> '_1, 
PredicateLeaf.Operator.NULL_SAFE_EQUALS)
          checkFilterPredicate(Literal(dates(1)) > '_1, 
PredicateLeaf.Operator.LESS_THAN)
          checkFilterPredicate(Literal(dates(2)) < '_1, 
PredicateLeaf.Operator.LESS_THAN_EQUALS)
          checkFilterPredicate(Literal(dates(0)) >= '_1, 
PredicateLeaf.Operator.LESS_THAN_EQUALS)
          checkFilterPredicate(Literal(dates(3)) <= '_1, 
PredicateLeaf.Operator.LESS_THAN)
        }
      }
    ```


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to