Repository: spark
Updated Branches:
  refs/heads/master 244010624 -> 07414afac


[SPARK-11537] [SQL] fix negative hours/minutes/seconds

Currently, if the Timestamp is before epoch (1970/01/01), the hours, minutes 
and seconds will be negative (also rounding up).

Author: Davies Liu <dav...@databricks.com>

Closes #9502 from davies/neg_hour.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/07414afa
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/07414afa
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/07414afa

Branch: refs/heads/master
Commit: 07414afac9a100ede1dee5f3d45a657802c8bd2a
Parents: 2440106
Author: Davies Liu <dav...@databricks.com>
Authored: Thu Nov 5 17:02:22 2015 -0800
Committer: Davies Liu <davies....@gmail.com>
Committed: Thu Nov 5 17:02:22 2015 -0800

----------------------------------------------------------------------
 .../spark/sql/catalyst/util/DateTimeUtils.scala | 23 +++++++++++++-------
 .../sql/catalyst/util/DateTimeUtilsSuite.scala  | 13 +++++++++++
 2 files changed, 28 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/07414afa/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index 781ed16..f5fff90 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -393,28 +393,35 @@ object DateTimeUtils {
   }
 
   /**
+   * Returns the microseconds since year zero (-17999) from microseconds since 
epoch.
+   */
+  def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
+    microsec + toYearZero * MICROS_PER_DAY
+  }
+
+  /**
    * Returns the hour value of a given timestamp value. The timestamp is 
expressed in microseconds.
    */
-  def getHours(timestamp: SQLTimestamp): Int = {
-    val localTs = (timestamp / 1000) + defaultTimeZone.getOffset(timestamp / 
1000)
-    ((localTs / 1000 / 3600) % 24).toInt
+  def getHours(microsec: SQLTimestamp): Int = {
+    val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
+    ((localTs / MICROS_PER_SECOND / 3600) % 24).toInt
   }
 
   /**
    * Returns the minute value of a given timestamp value. The timestamp is 
expressed in
    * microseconds.
    */
-  def getMinutes(timestamp: SQLTimestamp): Int = {
-    val localTs = (timestamp / 1000) + defaultTimeZone.getOffset(timestamp / 
1000)
-    ((localTs / 1000 / 60) % 60).toInt
+  def getMinutes(microsec: SQLTimestamp): Int = {
+    val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
+    ((localTs / MICROS_PER_SECOND / 60) % 60).toInt
   }
 
   /**
    * Returns the second value of a given timestamp value. The timestamp is 
expressed in
    * microseconds.
    */
-  def getSeconds(timestamp: SQLTimestamp): Int = {
-    ((timestamp / 1000 / 1000) % 60).toInt
+  def getSeconds(microsec: SQLTimestamp): Int = {
+    ((absoluteMicroSecond(microsec) / MICROS_PER_SECOND) % 60).toInt
   }
 
   private[this] def isLeapYear(year: Int): Boolean = {

http://git-wip-us.apache.org/repos/asf/spark/blob/07414afa/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index 4633594..64d15e6 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -358,6 +358,19 @@ class DateTimeUtilsSuite extends SparkFunSuite {
     assert(getSeconds(c.getTimeInMillis * 1000) === 9)
   }
 
+  test("hours / miniute / seconds") {
+    Seq(Timestamp.valueOf("2015-06-11 10:12:35.789"),
+      Timestamp.valueOf("2015-06-11 20:13:40.789"),
+      Timestamp.valueOf("1900-06-11 12:14:50.789"),
+      Timestamp.valueOf("1700-02-28 12:14:50.123456")).foreach { t =>
+      val us = fromJavaTimestamp(t)
+      assert(toJavaTimestamp(us) === t)
+      assert(getHours(us) === t.getHours)
+      assert(getMinutes(us) === t.getMinutes)
+      assert(getSeconds(us) === t.getSeconds)
+    }
+  }
+
   test("get day in year") {
     val c = Calendar.getInstance()
     c.set(2015, 2, 18, 0, 0, 0)


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to