This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e3411a8  [SPARK-26720][SQL] Remove DateTimeUtils methods based on 
system default time zone
e3411a8 is described below

commit e3411a82c3296571e45e6d61fca7d9d5acc0aac9
Author: Maxim Gekk <max.g...@gmail.com>
AuthorDate: Fri Jan 25 17:06:22 2019 +0800

    [SPARK-26720][SQL] Remove DateTimeUtils methods based on system default 
time zone
    
    ## What changes were proposed in this pull request?
    
    In the PR, I propose to remove the following methods from `DateTimeUtils`:
    - `timestampAddInterval` and `stringToTimestamp` - used only in test suites
    - `truncTimestamp`, `getSeconds`, `getMinutes`, `getHours` - those methods 
assume system default time zone. They are not used in Spark.
    
    ## How was this patch tested?
    
    This was tested by `DateTimeUtilsSuite` and `UnsafeArraySuite`.
    
    Closes #23643 from MaxGekk/unused-date-time-utils.
    
    Authored-by: Maxim Gekk <max.g...@gmail.com>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../spark/sql/catalyst/util/DateTimeUtils.scala    | 43 --------------------
 .../sql/catalyst/util/DateTimeUtilsSuite.scala     | 46 +++++++++++-----------
 .../spark/sql/catalyst/util/UnsafeArraySuite.scala |  5 ++-
 3 files changed, 27 insertions(+), 67 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index 8676479..911750e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -219,10 +219,6 @@ object DateTimeUtils {
    * `T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]-[h]h:[m]m`
    * `T[h]h:[m]m:[s]s.[ms][ms][ms][us][us][us]+[h]h:[m]m`
    */
-  def stringToTimestamp(s: UTF8String): Option[SQLTimestamp] = {
-    stringToTimestamp(s, defaultTimeZone())
-  }
-
   def stringToTimestamp(s: UTF8String, timeZone: TimeZone): 
Option[SQLTimestamp] = {
     if (s == null) {
       return None
@@ -453,10 +449,6 @@ object DateTimeUtils {
     microsec + toYearZero * MICROS_PER_DAY
   }
 
-  private def localTimestamp(microsec: SQLTimestamp): SQLTimestamp = {
-    localTimestamp(microsec, defaultTimeZone())
-  }
-
   private def localTimestamp(microsec: SQLTimestamp, timeZone: TimeZone): 
SQLTimestamp = {
     absoluteMicroSecond(microsec) + timeZone.getOffset(microsec / 1000) * 1000L
   }
@@ -464,13 +456,6 @@ object DateTimeUtils {
   /**
    * Returns the hour value of a given timestamp value. The timestamp is 
expressed in microseconds.
    */
-  def getHours(microsec: SQLTimestamp): Int = {
-    ((localTimestamp(microsec) / MICROS_PER_SECOND / 3600) % 24).toInt
-  }
-
-  /**
-   * Returns the hour value of a given timestamp value. The timestamp is 
expressed in microseconds.
-   */
   def getHours(microsec: SQLTimestamp, timeZone: TimeZone): Int = {
     ((localTimestamp(microsec, timeZone) / MICROS_PER_SECOND / 3600) % 
24).toInt
   }
@@ -479,14 +464,6 @@ object DateTimeUtils {
    * Returns the minute value of a given timestamp value. The timestamp is 
expressed in
    * microseconds.
    */
-  def getMinutes(microsec: SQLTimestamp): Int = {
-    ((localTimestamp(microsec) / MICROS_PER_SECOND / 60) % 60).toInt
-  }
-
-  /**
-   * Returns the minute value of a given timestamp value. The timestamp is 
expressed in
-   * microseconds.
-   */
   def getMinutes(microsec: SQLTimestamp, timeZone: TimeZone): Int = {
     ((localTimestamp(microsec, timeZone) / MICROS_PER_SECOND / 60) % 60).toInt
   }
@@ -495,14 +472,6 @@ object DateTimeUtils {
    * Returns the second value of a given timestamp value. The timestamp is 
expressed in
    * microseconds.
    */
-  def getSeconds(microsec: SQLTimestamp): Int = {
-    ((localTimestamp(microsec) / MICROS_PER_SECOND) % 60).toInt
-  }
-
-  /**
-   * Returns the second value of a given timestamp value. The timestamp is 
expressed in
-   * microseconds.
-   */
   def getSeconds(microsec: SQLTimestamp, timeZone: TimeZone): Int = {
     ((localTimestamp(microsec, timeZone) / MICROS_PER_SECOND) % 60).toInt
   }
@@ -616,14 +585,6 @@ object DateTimeUtils {
    * Add timestamp and full interval.
    * Returns a timestamp value, expressed in microseconds since 1.1.1970 
00:00:00.
    */
-  def timestampAddInterval(start: SQLTimestamp, months: Int, microseconds: 
Long): SQLTimestamp = {
-    timestampAddInterval(start, months, microseconds, defaultTimeZone())
-  }
-
-  /**
-   * Add timestamp and full interval.
-   * Returns a timestamp value, expressed in microseconds since 1.1.1970 
00:00:00.
-   */
   def timestampAddInterval(
       start: SQLTimestamp,
       months: Int,
@@ -802,10 +763,6 @@ object DateTimeUtils {
     truncated * MICROS_PER_MILLIS
   }
 
-  def truncTimestamp(d: SQLTimestamp, level: Int): SQLTimestamp = {
-    truncTimestamp(d, level, defaultTimeZone())
-  }
-
   /**
    * Returns the truncate level, could be TRUNC_YEAR, TRUNC_MONTH, 
TRUNC_TO_DAY, TRUNC_TO_HOUR,
    * TRUNC_TO_MINUTE, TRUNC_TO_SECOND, TRUNC_TO_WEEK, TRUNC_TO_QUARTER or 
TRUNC_INVALID,
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index ef34150..0c78e8f 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -30,6 +30,7 @@ import org.apache.spark.unsafe.types.UTF8String
 class DateTimeUtilsSuite extends SparkFunSuite {
 
   val TimeZonePST = TimeZone.getTimeZone("PST")
+  private def defaultTz = DateTimeUtils.defaultTimeZone()
 
   private[this] def getInUTCDays(localDate: LocalDate): Int = {
     val epochSeconds = 
localDate.atStartOfDay(TimeZoneUTC.toZoneId).toEpochSecond
@@ -39,7 +40,8 @@ class DateTimeUtilsSuite extends SparkFunSuite {
   test("nanoseconds truncation") {
     val tf = TimestampFormatter(DateTimeUtils.defaultTimeZone())
     def checkStringToTimestamp(originalTime: String, expectedParsedTime: 
String) {
-      val parsedTimestampOp = 
DateTimeUtils.stringToTimestamp(UTF8String.fromString(originalTime))
+      val parsedTimestampOp = DateTimeUtils.stringToTimestamp(
+        UTF8String.fromString(originalTime), defaultTz)
       assert(parsedTimestampOp.isDefined, "timestamp with nanoseconds was not 
parsed correctly")
       assert(DateTimeUtils.timestampToString(tf, parsedTimestampOp.get) === 
expectedParsedTime)
     }
@@ -328,11 +330,11 @@ class DateTimeUtilsSuite extends SparkFunSuite {
 
     // Test stringToTimestamp
     assert(stringToTimestamp(
-      UTF8String.fromString("2015-02-29 00:00:00")).isEmpty)
+      UTF8String.fromString("2015-02-29 00:00:00"), defaultTz).isEmpty)
     assert(stringToTimestamp(
-      UTF8String.fromString("2015-04-31 00:00:00")).isEmpty)
-    assert(stringToTimestamp(UTF8String.fromString("2015-02-29")).isEmpty)
-    assert(stringToTimestamp(UTF8String.fromString("2015-04-31")).isEmpty)
+      UTF8String.fromString("2015-04-31 00:00:00"), defaultTz).isEmpty)
+    assert(stringToTimestamp(UTF8String.fromString("2015-02-29"), 
defaultTz).isEmpty)
+    assert(stringToTimestamp(UTF8String.fromString("2015-04-31"), 
defaultTz).isEmpty)
   }
 
   test("hours") {
@@ -441,7 +443,7 @@ class DateTimeUtilsSuite extends SparkFunSuite {
     c2.set(2000, 1, 29, 10, 30, 0)
     c2.set(Calendar.MILLISECOND, 123)
     val ts2 = c2.getTimeInMillis * 1000L
-    assert(timestampAddInterval(ts1, 36, 123000) === ts2)
+    assert(timestampAddInterval(ts1, 36, 123000, defaultTimeZone()) === ts2)
 
     val c3 = Calendar.getInstance(TimeZonePST)
     c3.set(1997, 1, 27, 16, 0, 0)
@@ -558,20 +560,20 @@ class DateTimeUtilsSuite extends SparkFunSuite {
       val truncated =
         DateTimeUtils.truncTimestamp(inputTS, level, timezone)
       val expectedTS =
-        DateTimeUtils.stringToTimestamp(UTF8String.fromString(expected))
+        DateTimeUtils.stringToTimestamp(UTF8String.fromString(expected), 
defaultTz)
       assert(truncated === expectedTS.get)
     }
 
     val defaultInputTS =
-      
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-05T09:32:05.359"))
+      
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-05T09:32:05.359"),
 defaultTz)
     val defaultInputTS1 =
-      
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-31T20:32:05.359"))
+      
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-31T20:32:05.359"),
 defaultTz)
     val defaultInputTS2 =
-      
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-04-01T02:32:05.359"))
+      
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-04-01T02:32:05.359"),
 defaultTz)
     val defaultInputTS3 =
-      
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-30T02:32:05.359"))
+      
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-30T02:32:05.359"),
 defaultTz)
     val defaultInputTS4 =
-      
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-29T02:32:05.359"))
+      
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-29T02:32:05.359"),
 defaultTz)
 
     testTrunc(DateTimeUtils.TRUNC_TO_YEAR, "2015-01-01T00:00:00", 
defaultInputTS.get)
     testTrunc(DateTimeUtils.TRUNC_TO_MONTH, "2015-03-01T00:00:00", 
defaultInputTS.get)
@@ -590,16 +592,16 @@ class DateTimeUtilsSuite extends SparkFunSuite {
 
     for (tz <- DateTimeTestUtils.ALL_TIMEZONES) {
       DateTimeTestUtils.withDefaultTimeZone(tz) {
-        val inputTS =
-          
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-05T09:32:05.359"))
-        val inputTS1 =
-          
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-31T20:32:05.359"))
-        val inputTS2 =
-          
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-04-01T02:32:05.359"))
-        val inputTS3 =
-          
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-30T02:32:05.359"))
-        val inputTS4 =
-          
DateTimeUtils.stringToTimestamp(UTF8String.fromString("2015-03-29T02:32:05.359"))
+        val inputTS = DateTimeUtils.stringToTimestamp(
+          UTF8String.fromString("2015-03-05T09:32:05.359"), defaultTz)
+        val inputTS1 = DateTimeUtils.stringToTimestamp(
+          UTF8String.fromString("2015-03-31T20:32:05.359"), defaultTz)
+        val inputTS2 = DateTimeUtils.stringToTimestamp(
+          UTF8String.fromString("2015-04-01T02:32:05.359"), defaultTz)
+        val inputTS3 = DateTimeUtils.stringToTimestamp(
+          UTF8String.fromString("2015-03-30T02:32:05.359"), defaultTz)
+        val inputTS4 = DateTimeUtils.stringToTimestamp(
+          UTF8String.fromString("2015-03-29T02:32:05.359"), defaultTz)
 
         testTrunc(DateTimeUtils.TRUNC_TO_YEAR, "2015-01-01T00:00:00", 
inputTS.get, tz)
         testTrunc(DateTimeUtils.TRUNC_TO_MONTH, "2015-03-01T00:00:00", 
inputTS.get, tz)
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/UnsafeArraySuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/UnsafeArraySuite.scala
index 755c889..9d1eaa1 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/UnsafeArraySuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/UnsafeArraySuite.scala
@@ -36,9 +36,10 @@ class UnsafeArraySuite extends SparkFunSuite {
   val dateArray = Array(
     DateTimeUtils.stringToDate(UTF8String.fromString("1970-1-1")).get,
     DateTimeUtils.stringToDate(UTF8String.fromString("2016-7-26")).get)
+  private def defaultTz = DateTimeUtils.defaultTimeZone()
   val timestampArray = Array(
-    DateTimeUtils.stringToTimestamp(UTF8String.fromString("1970-1-1 
00:00:00")).get,
-    DateTimeUtils.stringToTimestamp(UTF8String.fromString("2016-7-26 
00:00:00")).get)
+    DateTimeUtils.stringToTimestamp(UTF8String.fromString("1970-1-1 
00:00:00"), defaultTz).get,
+    DateTimeUtils.stringToTimestamp(UTF8String.fromString("2016-7-26 
00:00:00"), defaultTz).get)
   val decimalArray4_1 = Array(
     BigDecimal("123.4").setScale(1, BigDecimal.RoundingMode.FLOOR),
     BigDecimal("567.8").setScale(1, BigDecimal.RoundingMode.FLOOR))


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to