Github user maropu commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21834#discussion_r204960461
  
    --- Diff: sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala 
---
    @@ -1341,6 +1352,70 @@ class JDBCSuite extends QueryTest
         checkAnswer(
           sql("select name, theid from queryOption"),
           Row("fred", 1) :: Nil)
    +  }
    +
    +  test("SPARK-22814 support date/timestamp types in partitionColumn") {
    +    val expectedResult = Seq(
    +      ("2018-07-06", "2018-07-06 05:50:00.0"),
    +      ("2018-07-06", "2018-07-06 08:10:08.0"),
    +      ("2018-07-08", "2018-07-08 13:32:01.0"),
    +      ("2018-07-12", "2018-07-12 09:51:15.0")
    +    ).map { case (date, timestamp) =>
    +      Row(Date.valueOf(date), Timestamp.valueOf(timestamp))
    +    }
    +
    +    // DataType partition column
    +    val df1 = spark.read.format("jdbc")
    +      .option("url", urlWithUserAndPass)
    +      .option("dbtable", "TEST.DATETIME")
    +      .option("partitionColumn", "d")
    +      .option("lowerBound", "2018-07-06")
    +      .option("upperBound", "2018-07-20")
    +      .option("numPartitions", 3)
    +      .load()
     
    +    df1.logicalPlan match {
    +      case LogicalRelation(JDBCRelation(_, parts, _), _, _, _) =>
    +        val whereClauses = 
parts.map(_.asInstanceOf[JDBCPartition].whereClause).toSet
    +        assert(whereClauses === Set(
    +          """"D" < '2018-07-10' or "D" is null""",
    +          """"D" >= '2018-07-10' AND "D" < '2018-07-14'""",
    +          """"D" >= '2018-07-14'"""))
    +    }
    +    checkAnswer(df1, expectedResult)
    +
    +    // TimestampType partition column
    +    val df2 = spark.read.format("jdbc")
    +      .option("url", urlWithUserAndPass)
    +      .option("dbtable", "TEST.DATETIME")
    +      .option("partitionColumn", "t")
    +      .option("lowerBound", "2018-07-04 03:30:00.0")
    +      .option("upperBound", "2018-07-27 14:11:05.0")
    +      .option("numPartitions", 2)
    +      .load()
    +
    +    df2.logicalPlan match {
    +      case LogicalRelation(JDBCRelation(_, parts, _), _, _, _) =>
    +        val whereClauses = 
parts.map(_.asInstanceOf[JDBCPartition].whereClause).toSet
    +        assert(whereClauses === Set(
    +          """"T" < '2018-07-15 20:50:32.5' or "T" is null""",
    --- End diff --
    
    ok


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to