[ 
https://issues.apache.org/jira/browse/SPARK-12258?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Ian updated SPARK-12258:
------------------------
    Description: 
{code}
  test("Timestamp UDF and Null value") {
    hiveContext.runSqlHive("CREATE TABLE ts_test (ts TIMESTAMP) STORED AS 
TEXTFILE")
    hiveContext.runSqlHive("INSERT INTO TABLE ts_test VALUES(Null)")
    hiveContext.udf.register("dummy",
      (ts: Timestamp) => ts
    )
    val result = hiveContext.sql("SELECT dummy(ts) FROM 
ts_test").collect().mkString("\n")
    assertResult("[null]")(result)
  }
{code}

  was:
  test("Timestamp UDF and Null value") {
    hiveContext.runSqlHive("CREATE TABLE ts_test (ts TIMESTAMP) STORED AS 
TEXTFILE")
    hiveContext.runSqlHive("INSERT INTO TABLE ts_test VALUES(Null)")
    hiveContext.udf.register("dummy",
      (ts: Timestamp) => ts
    )
    val result = hiveContext.sql("SELECT dummy(ts) FROM 
ts_test").collect().mkString("\n")
    assertResult("[null]")(result)
  }



> Hive Timestamp UDF is binded with '1969-12-31 15:59:59.999999' for null value
> -----------------------------------------------------------------------------
>
>                 Key: SPARK-12258
>                 URL: https://issues.apache.org/jira/browse/SPARK-12258
>             Project: Spark
>          Issue Type: Bug
>    Affects Versions: 1.6.0
>            Reporter: Ian
>
> {code}
>   test("Timestamp UDF and Null value") {
>     hiveContext.runSqlHive("CREATE TABLE ts_test (ts TIMESTAMP) STORED AS 
> TEXTFILE")
>     hiveContext.runSqlHive("INSERT INTO TABLE ts_test VALUES(Null)")
>     hiveContext.udf.register("dummy",
>       (ts: Timestamp) => ts
>     )
>     val result = hiveContext.sql("SELECT dummy(ts) FROM 
> ts_test").collect().mkString("\n")
>     assertResult("[null]")(result)
>   }
> {code}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to