spark git commit: [SPARK-11752] [SQL] fix timezone problem for DateTimeUtils.getSeconds

2015-11-16 Thread davies
Repository: spark
Updated Branches:
  refs/heads/branch-1.5 b767ceeb2 -> bf79a171e


[SPARK-11752] [SQL] fix timezone problem for DateTimeUtils.getSeconds

code snippet to reproduce it:
```
TimeZone.setDefault(TimeZone.getTimeZone("Asia/Shanghai"))
val t = Timestamp.valueOf("1900-06-11 12:14:50.789")
val us = fromJavaTimestamp(t)
assert(getSeconds(us) === t.getSeconds)
```

it will be good to add a regression test for it, but the reproducing code need 
to change the default timezone, and even we change it back, the `lazy val 
defaultTimeZone` in `DataTimeUtils` is fixed.

Author: Wenchen Fan 

Closes #9728 from cloud-fan/seconds.

(cherry picked from commit 06f1fdba6d1425afddfc1d45a20dbe9bede15e7a)
Signed-off-by: Davies Liu 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bf79a171
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bf79a171
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bf79a171

Branch: refs/heads/branch-1.5
Commit: bf79a171e93c63d48dcc2fe066440f6ed61c9cb1
Parents: b767cee
Author: Wenchen Fan 
Authored: Mon Nov 16 08:58:40 2015 -0800
Committer: Davies Liu 
Committed: Mon Nov 16 09:00:29 2015 -0800

--
 .../spark/sql/catalyst/util/DateTimeUtils.scala   | 14 --
 .../spark/sql/catalyst/util/DateTimeUtilsSuite.scala  |  2 +-
 2 files changed, 9 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/bf79a171/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
--
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index e9f0689..c6a2780 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -402,16 +402,19 @@ object DateTimeUtils {
   /**
* Returns the microseconds since year zero (-17999) from microseconds since 
epoch.
*/
-  def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
+  private def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
 microsec + toYearZero * MICROS_PER_DAY
   }
 
+  private def localTimestamp(microsec: SQLTimestamp): SQLTimestamp = {
+absoluteMicroSecond(microsec) + defaultTimeZone.getOffset(microsec / 1000) 
* 1000L
+  }
+
   /**
* Returns the hour value of a given timestamp value. The timestamp is 
expressed in microseconds.
*/
   def getHours(microsec: SQLTimestamp): Int = {
-val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
-((localTs / MICROS_PER_SECOND / 3600) % 24).toInt
+((localTimestamp(microsec) / MICROS_PER_SECOND / 3600) % 24).toInt
   }
 
   /**
@@ -419,8 +422,7 @@ object DateTimeUtils {
* microseconds.
*/
   def getMinutes(microsec: SQLTimestamp): Int = {
-val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
-((localTs / MICROS_PER_SECOND / 60) % 60).toInt
+((localTimestamp(microsec) / MICROS_PER_SECOND / 60) % 60).toInt
   }
 
   /**
@@ -428,7 +430,7 @@ object DateTimeUtils {
* microseconds.
*/
   def getSeconds(microsec: SQLTimestamp): Int = {
-((absoluteMicroSecond(microsec) / MICROS_PER_SECOND) % 60).toInt
+((localTimestamp(microsec) / MICROS_PER_SECOND) % 60).toInt
   }
 
   private[this] def isLeapYear(year: Int): Boolean = {

http://git-wip-us.apache.org/repos/asf/spark/blob/bf79a171/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
--
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index d6d860f..b35d400 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -326,7 +326,7 @@ class DateTimeUtilsSuite extends SparkFunSuite {
 assert(getSeconds(c.getTimeInMillis * 1000) === 9)
   }
 
-  test("hours / miniute / seconds") {
+  test("hours / minutes / seconds") {
 Seq(Timestamp.valueOf("2015-06-11 10:12:35.789"),
   Timestamp.valueOf("2015-06-11 20:13:40.789"),
   Timestamp.valueOf("1900-06-11 12:14:50.789"),


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-11752] [SQL] fix timezone problem for DateTimeUtils.getSeconds

2015-11-16 Thread davies
Repository: spark
Updated Branches:
  refs/heads/master 0e79604ae -> 06f1fdba6


[SPARK-11752] [SQL] fix timezone problem for DateTimeUtils.getSeconds

code snippet to reproduce it:
```
TimeZone.setDefault(TimeZone.getTimeZone("Asia/Shanghai"))
val t = Timestamp.valueOf("1900-06-11 12:14:50.789")
val us = fromJavaTimestamp(t)
assert(getSeconds(us) === t.getSeconds)
```

it will be good to add a regression test for it, but the reproducing code need 
to change the default timezone, and even we change it back, the `lazy val 
defaultTimeZone` in `DataTimeUtils` is fixed.

Author: Wenchen Fan 

Closes #9728 from cloud-fan/seconds.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/06f1fdba
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/06f1fdba
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/06f1fdba

Branch: refs/heads/master
Commit: 06f1fdba6d1425afddfc1d45a20dbe9bede15e7a
Parents: 0e79604
Author: Wenchen Fan 
Authored: Mon Nov 16 08:58:40 2015 -0800
Committer: Davies Liu 
Committed: Mon Nov 16 08:58:40 2015 -0800

--
 .../spark/sql/catalyst/util/DateTimeUtils.scala   | 14 --
 .../spark/sql/catalyst/util/DateTimeUtilsSuite.scala  |  2 +-
 2 files changed, 9 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/06f1fdba/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
--
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index deff8a5..8fb3f41 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -395,16 +395,19 @@ object DateTimeUtils {
   /**
* Returns the microseconds since year zero (-17999) from microseconds since 
epoch.
*/
-  def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
+  private def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
 microsec + toYearZero * MICROS_PER_DAY
   }
 
+  private def localTimestamp(microsec: SQLTimestamp): SQLTimestamp = {
+absoluteMicroSecond(microsec) + defaultTimeZone.getOffset(microsec / 1000) 
* 1000L
+  }
+
   /**
* Returns the hour value of a given timestamp value. The timestamp is 
expressed in microseconds.
*/
   def getHours(microsec: SQLTimestamp): Int = {
-val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
-((localTs / MICROS_PER_SECOND / 3600) % 24).toInt
+((localTimestamp(microsec) / MICROS_PER_SECOND / 3600) % 24).toInt
   }
 
   /**
@@ -412,8 +415,7 @@ object DateTimeUtils {
* microseconds.
*/
   def getMinutes(microsec: SQLTimestamp): Int = {
-val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
-((localTs / MICROS_PER_SECOND / 60) % 60).toInt
+((localTimestamp(microsec) / MICROS_PER_SECOND / 60) % 60).toInt
   }
 
   /**
@@ -421,7 +423,7 @@ object DateTimeUtils {
* microseconds.
*/
   def getSeconds(microsec: SQLTimestamp): Int = {
-((absoluteMicroSecond(microsec) / MICROS_PER_SECOND) % 60).toInt
+((localTimestamp(microsec) / MICROS_PER_SECOND) % 60).toInt
   }
 
   private[this] def isLeapYear(year: Int): Boolean = {

http://git-wip-us.apache.org/repos/asf/spark/blob/06f1fdba/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
--
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index 64d15e6..60d4542 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -358,7 +358,7 @@ class DateTimeUtilsSuite extends SparkFunSuite {
 assert(getSeconds(c.getTimeInMillis * 1000) === 9)
   }
 
-  test("hours / miniute / seconds") {
+  test("hours / minutes / seconds") {
 Seq(Timestamp.valueOf("2015-06-11 10:12:35.789"),
   Timestamp.valueOf("2015-06-11 20:13:40.789"),
   Timestamp.valueOf("1900-06-11 12:14:50.789"),


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org



spark git commit: [SPARK-11752] [SQL] fix timezone problem for DateTimeUtils.getSeconds

2015-11-16 Thread davies
Repository: spark
Updated Branches:
  refs/heads/branch-1.6 c37ed52ec -> 949c9b7c6


[SPARK-11752] [SQL] fix timezone problem for DateTimeUtils.getSeconds

code snippet to reproduce it:
```
TimeZone.setDefault(TimeZone.getTimeZone("Asia/Shanghai"))
val t = Timestamp.valueOf("1900-06-11 12:14:50.789")
val us = fromJavaTimestamp(t)
assert(getSeconds(us) === t.getSeconds)
```

it will be good to add a regression test for it, but the reproducing code need 
to change the default timezone, and even we change it back, the `lazy val 
defaultTimeZone` in `DataTimeUtils` is fixed.

Author: Wenchen Fan 

Closes #9728 from cloud-fan/seconds.

(cherry picked from commit 06f1fdba6d1425afddfc1d45a20dbe9bede15e7a)
Signed-off-by: Davies Liu 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/949c9b7c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/949c9b7c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/949c9b7c

Branch: refs/heads/branch-1.6
Commit: 949c9b7c660c37bbe543adb260380afb1258089e
Parents: c37ed52
Author: Wenchen Fan 
Authored: Mon Nov 16 08:58:40 2015 -0800
Committer: Davies Liu 
Committed: Mon Nov 16 08:58:50 2015 -0800

--
 .../spark/sql/catalyst/util/DateTimeUtils.scala   | 14 --
 .../spark/sql/catalyst/util/DateTimeUtilsSuite.scala  |  2 +-
 2 files changed, 9 insertions(+), 7 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/949c9b7c/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
--
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index deff8a5..8fb3f41 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -395,16 +395,19 @@ object DateTimeUtils {
   /**
* Returns the microseconds since year zero (-17999) from microseconds since 
epoch.
*/
-  def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
+  private def absoluteMicroSecond(microsec: SQLTimestamp): SQLTimestamp = {
 microsec + toYearZero * MICROS_PER_DAY
   }
 
+  private def localTimestamp(microsec: SQLTimestamp): SQLTimestamp = {
+absoluteMicroSecond(microsec) + defaultTimeZone.getOffset(microsec / 1000) 
* 1000L
+  }
+
   /**
* Returns the hour value of a given timestamp value. The timestamp is 
expressed in microseconds.
*/
   def getHours(microsec: SQLTimestamp): Int = {
-val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
-((localTs / MICROS_PER_SECOND / 3600) % 24).toInt
+((localTimestamp(microsec) / MICROS_PER_SECOND / 3600) % 24).toInt
   }
 
   /**
@@ -412,8 +415,7 @@ object DateTimeUtils {
* microseconds.
*/
   def getMinutes(microsec: SQLTimestamp): Int = {
-val localTs = absoluteMicroSecond(microsec) + 
defaultTimeZone.getOffset(microsec / 1000) * 1000L
-((localTs / MICROS_PER_SECOND / 60) % 60).toInt
+((localTimestamp(microsec) / MICROS_PER_SECOND / 60) % 60).toInt
   }
 
   /**
@@ -421,7 +423,7 @@ object DateTimeUtils {
* microseconds.
*/
   def getSeconds(microsec: SQLTimestamp): Int = {
-((absoluteMicroSecond(microsec) / MICROS_PER_SECOND) % 60).toInt
+((localTimestamp(microsec) / MICROS_PER_SECOND) % 60).toInt
   }
 
   private[this] def isLeapYear(year: Int): Boolean = {

http://git-wip-us.apache.org/repos/asf/spark/blob/949c9b7c/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
--
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index 64d15e6..60d4542 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -358,7 +358,7 @@ class DateTimeUtilsSuite extends SparkFunSuite {
 assert(getSeconds(c.getTimeInMillis * 1000) === 9)
   }
 
-  test("hours / miniute / seconds") {
+  test("hours / minutes / seconds") {
 Seq(Timestamp.valueOf("2015-06-11 10:12:35.789"),
   Timestamp.valueOf("2015-06-11 20:13:40.789"),
   Timestamp.valueOf("1900-06-11 12:14:50.789"),


-
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org