spark git commit: [SPARK-11537] [SQL] fix negative hours/minutes/seconds

2015-11-05 Thread davies
Repository: spark
Updated Branches:
  refs/heads/branch-1.6 37c59f0ba -> 83b957a35


[SPARK-11537] [SQL] fix negative hours/minutes/seconds

Currently, if the Timestamp is before epoch (1970/01/01), the hours, minutes 
and seconds will be negative (also rounding up).

Author: Davies Liu 

Closes #9502 from davies/neg_hour.

(cherry picked from commit 07414afac9a100ede1dee5f3d45a657802c8bd2a)
Signed-off-by: Davies Liu 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/83b957a3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/83b957a3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/83b957a3

Branch: refs/heads/branch-1.6
Commit: 83b957a35bd4cd260a48af8cd602a2a67d08b4b9
Parents: 37c59f0
Author: Davies Liu 
Authored: Thu Nov 5 17:02:22 2015 -0800
Committer: Davies Liu 
Committed: Thu Nov 5 17:02:31 2015 -0800

--
 .../spark/sql/catalyst/util/DateTimeUtils.scala | 23 +---
 .../sql/catalyst/util/DateTimeUtilsSuite.scala  | 13 +++
 2 files changed, 28 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/83b957a3/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
--
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index 781ed16..f5fff90 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -393,28 +393,35 @@ object DateTimeUtils {
   }
 
   /**
+   * Returns the microseconds since year zero (-17999) from microseconds since 
epoch.
+   */
+  def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
+microsec + toYearZero * MICROS_PER_DAY
+  }
+
+  /**
* Returns the hour value of a given timestamp value. The timestamp is 
expressed in microseconds.
*/
-  def getHours(timestamp: SQLTimestamp): Int = {
-val localTs = (timestamp / 1000) + defaultTimeZone.getOffset(timestamp / 
1000)
-((localTs / 1000 / 3600) % 24).toInt
+  def getHours(microsec: SQLTimestamp): Int = {
+val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
+((localTs / MICROS_PER_SECOND / 3600) % 24).toInt
   }
 
   /**
* Returns the minute value of a given timestamp value. The timestamp is 
expressed in
* microseconds.
*/
-  def getMinutes(timestamp: SQLTimestamp): Int = {
-val localTs = (timestamp / 1000) + defaultTimeZone.getOffset(timestamp / 
1000)
-((localTs / 1000 / 60) % 60).toInt
+  def getMinutes(microsec: SQLTimestamp): Int = {
+val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
+((localTs / MICROS_PER_SECOND / 60) % 60).toInt
   }
 
   /**
* Returns the second value of a given timestamp value. The timestamp is 
expressed in
* microseconds.
*/
-  def getSeconds(timestamp: SQLTimestamp): Int = {
-((timestamp / 1000 / 1000) % 60).toInt
+  def getSeconds(microsec: SQLTimestamp): Int = {
+((absoluteMicroSecond(microsec) / MICROS_PER_SECOND) % 60).toInt
   }
 
   private[this] def isLeapYear(year: Int): Boolean = {

http://git-wip-us.apache.org/repos/asf/spark/blob/83b957a3/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
--
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index 4633594..64d15e6 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -358,6 +358,19 @@ class DateTimeUtilsSuite extends SparkFunSuite {
 assert(getSeconds(c.getTimeInMillis * 1000) === 9)
   }
 
+  test("hours / miniute / seconds") {
+Seq(Timestamp.valueOf("2015-06-11 10:12:35.789"),
+  Timestamp.valueOf("2015-06-11 20:13:40.789"),
+  Timestamp.valueOf("1900-06-11 12:14:50.789"),
+  Timestamp.valueOf("1700-02-28 12:14:50.123456")).foreach { t =>
+  val us = fromJavaTimestamp(t)
+  assert(toJavaTimestamp(us) === t)
+  assert(getHours(us) === t.getHours)
+  assert(getMinutes(us) === t.getMinutes)
+  assert(getSeconds(us) === t.getSeconds)
+}
+  }
+
   test("get day in year") {
 val c = Calendar.getInstance()
 c.set(2015, 2, 18, 0, 0, 0)


-
To unsubscribe, 

spark git commit: [SPARK-11537] [SQL] fix negative hours/minutes/seconds

2015-11-05 Thread davies
Repository: spark
Updated Branches:
  refs/heads/branch-1.5 dac83094f -> b8b1fbfc8


[SPARK-11537] [SQL] fix negative hours/minutes/seconds

Currently, if the Timestamp is before epoch (1970/01/01), the hours, minutes 
and seconds will be negative (also rounding up).

Author: Davies Liu 

Closes #9502 from davies/neg_hour.

(cherry picked from commit 07414afac9a100ede1dee5f3d45a657802c8bd2a)
Signed-off-by: Davies Liu 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b8b1fbfc
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b8b1fbfc
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b8b1fbfc

Branch: refs/heads/branch-1.5
Commit: b8b1fbfc8514c69059d619e3377ac3d9afac356c
Parents: dac8309
Author: Davies Liu 
Authored: Thu Nov 5 17:02:22 2015 -0800
Committer: Davies Liu 
Committed: Thu Nov 5 17:02:46 2015 -0800

--
 .../spark/sql/catalyst/util/DateTimeUtils.scala | 23 +---
 .../sql/catalyst/util/DateTimeUtilsSuite.scala  | 13 +++
 2 files changed, 28 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/b8b1fbfc/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
--
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index 687ca00..e9f0689 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -400,28 +400,35 @@ object DateTimeUtils {
   }
 
   /**
+   * Returns the microseconds since year zero (-17999) from microseconds since 
epoch.
+   */
+  def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
+microsec + toYearZero * MICROS_PER_DAY
+  }
+
+  /**
* Returns the hour value of a given timestamp value. The timestamp is 
expressed in microseconds.
*/
-  def getHours(timestamp: SQLTimestamp): Int = {
-val localTs = (timestamp / 1000) + defaultTimeZone.getOffset(timestamp / 
1000)
-((localTs / 1000 / 3600) % 24).toInt
+  def getHours(microsec: SQLTimestamp): Int = {
+val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
+((localTs / MICROS_PER_SECOND / 3600) % 24).toInt
   }
 
   /**
* Returns the minute value of a given timestamp value. The timestamp is 
expressed in
* microseconds.
*/
-  def getMinutes(timestamp: SQLTimestamp): Int = {
-val localTs = (timestamp / 1000) + defaultTimeZone.getOffset(timestamp / 
1000)
-((localTs / 1000 / 60) % 60).toInt
+  def getMinutes(microsec: SQLTimestamp): Int = {
+val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
+((localTs / MICROS_PER_SECOND / 60) % 60).toInt
   }
 
   /**
* Returns the second value of a given timestamp value. The timestamp is 
expressed in
* microseconds.
*/
-  def getSeconds(timestamp: SQLTimestamp): Int = {
-((timestamp / 1000 / 1000) % 60).toInt
+  def getSeconds(microsec: SQLTimestamp): Int = {
+((absoluteMicroSecond(microsec) / MICROS_PER_SECOND) % 60).toInt
   }
 
   private[this] def isLeapYear(year: Int): Boolean = {

http://git-wip-us.apache.org/repos/asf/spark/blob/b8b1fbfc/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
--
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index 6b9a11f..d6d860f 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -326,6 +326,19 @@ class DateTimeUtilsSuite extends SparkFunSuite {
 assert(getSeconds(c.getTimeInMillis * 1000) === 9)
   }
 
+  test("hours / miniute / seconds") {
+Seq(Timestamp.valueOf("2015-06-11 10:12:35.789"),
+  Timestamp.valueOf("2015-06-11 20:13:40.789"),
+  Timestamp.valueOf("1900-06-11 12:14:50.789"),
+  Timestamp.valueOf("1700-02-28 12:14:50.123456")).foreach { t =>
+  val us = fromJavaTimestamp(t)
+  assert(toJavaTimestamp(us) === t)
+  assert(getHours(us) === t.getHours)
+  assert(getMinutes(us) === t.getMinutes)
+  assert(getSeconds(us) === t.getSeconds)
+}
+  }
+
   test("get day in year") {
 val c = Calendar.getInstance()
 c.set(2015, 2, 18, 0, 0, 0)


-
To unsubscribe, 

spark git commit: [SPARK-11537] [SQL] fix negative hours/minutes/seconds

2015-11-05 Thread davies
Repository: spark
Updated Branches:
  refs/heads/master 244010624 -> 07414afac


[SPARK-11537] [SQL] fix negative hours/minutes/seconds

Currently, if the Timestamp is before epoch (1970/01/01), the hours, minutes 
and seconds will be negative (also rounding up).

Author: Davies Liu 

Closes #9502 from davies/neg_hour.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/07414afa
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/07414afa
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/07414afa

Branch: refs/heads/master
Commit: 07414afac9a100ede1dee5f3d45a657802c8bd2a
Parents: 2440106
Author: Davies Liu 
Authored: Thu Nov 5 17:02:22 2015 -0800
Committer: Davies Liu 
Committed: Thu Nov 5 17:02:22 2015 -0800

--
 .../spark/sql/catalyst/util/DateTimeUtils.scala | 23 +---
 .../sql/catalyst/util/DateTimeUtilsSuite.scala  | 13 +++
 2 files changed, 28 insertions(+), 8 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/07414afa/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
--
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index 781ed16..f5fff90 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -393,28 +393,35 @@ object DateTimeUtils {
   }
 
   /**
+   * Returns the microseconds since year zero (-17999) from microseconds since 
epoch.
+   */
+  def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
+microsec + toYearZero * MICROS_PER_DAY
+  }
+
+  /**
* Returns the hour value of a given timestamp value. The timestamp is 
expressed in microseconds.
*/
-  def getHours(timestamp: SQLTimestamp): Int = {
-val localTs = (timestamp / 1000) + defaultTimeZone.getOffset(timestamp / 
1000)
-((localTs / 1000 / 3600) % 24).toInt
+  def getHours(microsec: SQLTimestamp): Int = {
+val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
+((localTs / MICROS_PER_SECOND / 3600) % 24).toInt
   }
 
   /**
* Returns the minute value of a given timestamp value. The timestamp is 
expressed in
* microseconds.
*/
-  def getMinutes(timestamp: SQLTimestamp): Int = {
-val localTs = (timestamp / 1000) + defaultTimeZone.getOffset(timestamp / 
1000)
-((localTs / 1000 / 60) % 60).toInt
+  def getMinutes(microsec: SQLTimestamp): Int = {
+val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
+((localTs / MICROS_PER_SECOND / 60) % 60).toInt
   }
 
   /**
* Returns the second value of a given timestamp value. The timestamp is 
expressed in
* microseconds.
*/
-  def getSeconds(timestamp: SQLTimestamp): Int = {
-((timestamp / 1000 / 1000) % 60).toInt
+  def getSeconds(microsec: SQLTimestamp): Int = {
+((absoluteMicroSecond(microsec) / MICROS_PER_SECOND) % 60).toInt
   }
 
   private[this] def isLeapYear(year: Int): Boolean = {

http://git-wip-us.apache.org/repos/asf/spark/blob/07414afa/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
--
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index 4633594..64d15e6 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -358,6 +358,19 @@ class DateTimeUtilsSuite extends SparkFunSuite {
 assert(getSeconds(c.getTimeInMillis * 1000) === 9)
   }
 
+  test("hours / miniute / seconds") {
+Seq(Timestamp.valueOf("2015-06-11 10:12:35.789"),
+  Timestamp.valueOf("2015-06-11 20:13:40.789"),
+  Timestamp.valueOf("1900-06-11 12:14:50.789"),
+  Timestamp.valueOf("1700-02-28 12:14:50.123456")).foreach { t =>
+  val us = fromJavaTimestamp(t)
+  assert(toJavaTimestamp(us) === t)
+  assert(getHours(us) === t.getHours)
+  assert(getMinutes(us) === t.getMinutes)
+  assert(getSeconds(us) === t.getSeconds)
+}
+  }
+
   test("get day in year") {
 val c = Calendar.getInstance()
 c.set(2015, 2, 18, 0, 0, 0)


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.o