This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
     new 9103c1f  [SPARK-36055][SQL] Assign pretty SQL string to TimestampNTZ 
literals
9103c1f is described below

commit 9103c1fe2332a60424077ca9ecffb24afa440c55
Author: Gengliang Wang <gengli...@apache.org>
AuthorDate: Thu Jul 8 21:42:50 2021 +0300

    [SPARK-36055][SQL] Assign pretty SQL string to TimestampNTZ literals
    
    ### What changes were proposed in this pull request?
    
    Currently the TimestampNTZ literals shows only long value instead of 
timestamp string in its SQL string and toString result.
    Before changes (with default timestamp type as TIMESTAMP_NTZ)
    ```
    – !query
    select timestamp '2019-01-01\t'
    – !query schema
    struct<1546300800000000:timestamp_ntz>
    ```
    
    After changes:
    ```
    – !query
    select timestamp '2019-01-01\t'
    – !query schema
    struct<TIMESTAMP_NTZ '2019-01-01 00:00:00':timestamp_ntz>
    ```
    ### Why are the changes needed?
    
    Make the schema of TimestampNTZ literals readable.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Unit test
    
    Closes #33269 from gengliangwang/ntzLiteralString.
    
    Authored-by: Gengliang Wang <gengli...@apache.org>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
    (cherry picked from commit ee945e99cc1d3979a2c24077a9ae786ce50bbe81)
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../spark/sql/catalyst/expressions/literals.scala      |  6 +++++-
 .../catalyst/expressions/LiteralExpressionSuite.scala  |  9 +++++++++
 .../sql-tests/results/timestampNTZ/datetime.sql.out    | 18 +++++++++---------
 3 files changed, 23 insertions(+), 10 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
index a2270eb..ee40909 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
@@ -28,7 +28,7 @@ import java.lang.{Short => JavaShort}
 import java.math.{BigDecimal => JavaBigDecimal}
 import java.nio.charset.StandardCharsets
 import java.sql.{Date, Timestamp}
-import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period}
+import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period, 
ZoneOffset}
 import java.util
 import java.util.Objects
 import javax.xml.bind.DatatypeConverter
@@ -352,6 +352,8 @@ case class Literal (value: Any, dataType: DataType) extends 
LeafExpression {
           DateFormatter().format(value.asInstanceOf[Int])
         case TimestampType =>
           
TimestampFormatter.getFractionFormatter(timeZoneId).format(value.asInstanceOf[Long])
+        case TimestampNTZType =>
+          
TimestampFormatter.getFractionFormatter(ZoneOffset.UTC).format(value.asInstanceOf[Long])
         case DayTimeIntervalType(startField, endField) =>
           toDayTimeIntervalString(value.asInstanceOf[Long], ANSI_STYLE, 
startField, endField)
         case YearMonthIntervalType(startField, endField) =>
@@ -473,6 +475,8 @@ case class Literal (value: Any, dataType: DataType) extends 
LeafExpression {
       s"DATE '$toString'"
     case (v: Long, TimestampType) =>
       s"TIMESTAMP '$toString'"
+    case (v: Long, TimestampNTZType) =>
+      s"TIMESTAMP_NTZ '$toString'"
     case (i: CalendarInterval, CalendarIntervalType) =>
       s"INTERVAL '${i.toString}'"
     case (v: Array[Byte], BinaryType) => 
s"X'${DatatypeConverter.printHexBinary(v)}'"
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
index 50b7263..4081e13 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
@@ -362,6 +362,15 @@ class LiteralExpressionSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     }
   }
 
+  test("SPARK-36055: TimestampNTZ toString") {
+    assert(Literal.default(TimestampNTZType).toString === "1970-01-01 
00:00:00")
+    withTimeZones(sessionTimeZone = "GMT+01:00", systemTimeZone = "GMT-08:00") 
{
+      val timestamp = LocalDateTime.of(2021, 2, 3, 16, 50, 3, 456000000)
+      val literalStr = Literal.create(timestamp).toString
+      assert(literalStr === "2021-02-03 16:50:03.456")
+    }
+  }
+
   test("SPARK-35664: construct literals from java.time.LocalDateTime") {
     Seq(
       LocalDateTime.of(1, 1, 1, 0, 0, 0, 0),
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/datetime.sql.out 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/datetime.sql.out
index 131ad01..19e1a7d 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/datetime.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/datetime.sql.out
@@ -259,7 +259,7 @@ struct<DATE '2019-01-01':date>
 -- !query
 select timestamp '2019-01-01\t'
 -- !query schema
-struct<1546300800000000:timestamp_ntz>
+struct<TIMESTAMP_NTZ '2019-01-01 00:00:00':timestamp_ntz>
 -- !query output
 2019-01-01 00:00:00
 
@@ -295,7 +295,7 @@ select timestamp '2019-01-01中文'
 -- !query
 select timestamp'2011-11-11 11:11:11' + interval '2' day
 -- !query schema
-struct<1321009871000000 + INTERVAL '2 days':timestamp_ntz>
+struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' + INTERVAL '2 days':timestamp_ntz>
 -- !query output
 2011-11-13 11:11:11
 
@@ -303,7 +303,7 @@ struct<1321009871000000 + INTERVAL '2 days':timestamp_ntz>
 -- !query
 select timestamp'2011-11-11 11:11:11' - interval '2' day
 -- !query schema
-struct<1321009871000000 - INTERVAL '2 days':timestamp_ntz>
+struct<TIMESTAMP_NTZ '2011-11-11 11:11:11' - INTERVAL '2 days':timestamp_ntz>
 -- !query output
 2011-11-09 11:11:11
 
@@ -360,7 +360,7 @@ cannot resolve '1 + (- INTERVAL '2 seconds')' due to data 
type mismatch: argumen
 -- !query
 select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
 -- !query schema
-struct<(DATE '2020-01-01' - 1570356672345678):interval day to second>
+struct<(DATE '2020-01-01' - TIMESTAMP_NTZ '2019-10-06 
10:11:12.345678'):interval day to second>
 -- !query output
 86 13:48:47.654322000
 
@@ -368,7 +368,7 @@ struct<(DATE '2020-01-01' - 1570356672345678):interval day 
to second>
 -- !query
 select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
 -- !query schema
-struct<(1570356672345678 - DATE '2020-01-01'):interval day to second>
+struct<(TIMESTAMP_NTZ '2019-10-06 10:11:12.345678' - DATE 
'2020-01-01'):interval day to second>
 -- !query output
 -86 13:48:47.654322000
 
@@ -376,7 +376,7 @@ struct<(1570356672345678 - DATE '2020-01-01'):interval day 
to second>
 -- !query
 select timestamp'2019-10-06 10:11:12.345678' - null
 -- !query schema
-struct<(1570356672345678 - NULL):interval day to second>
+struct<(TIMESTAMP_NTZ '2019-10-06 10:11:12.345678' - NULL):interval day to 
second>
 -- !query output
 NULL
 
@@ -384,7 +384,7 @@ NULL
 -- !query
 select null - timestamp'2019-10-06 10:11:12.345678'
 -- !query schema
-struct<(NULL - 1570356672345678):interval day to second>
+struct<(NULL - TIMESTAMP_NTZ '2019-10-06 10:11:12.345678'):interval day to 
second>
 -- !query output
 NULL
 
@@ -535,7 +535,7 @@ select date_add(timestamp'2011-11-11', 1)
 struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'date_add(1320969600000000, 1)' due to data type mismatch: 
argument 1 requires date type, however, '1320969600000000' is of timestamp_ntz 
type.; line 1 pos 7
+cannot resolve 'date_add(TIMESTAMP_NTZ '2011-11-11 00:00:00', 1)' due to data 
type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ 
'2011-11-11 00:00:00'' is of timestamp_ntz type.; line 1 pos 7
 
 
 -- !query
@@ -569,7 +569,7 @@ select date_sub(timestamp'2011-11-11', 1)
 struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
-cannot resolve 'date_sub(1320969600000000, 1)' due to data type mismatch: 
argument 1 requires date type, however, '1320969600000000' is of timestamp_ntz 
type.; line 1 pos 7
+cannot resolve 'date_sub(TIMESTAMP_NTZ '2011-11-11 00:00:00', 1)' due to data 
type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ 
'2011-11-11 00:00:00'' is of timestamp_ntz type.; line 1 pos 7
 
 
 -- !query

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to