This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new d21ff13 [SPARK-35716][SQL] Support casting of timestamp without time zone to date type d21ff13 is described below commit d21ff1318f614fc207d9cd3c485e4337faa8e878 Author: Gengliang Wang <gengli...@apache.org> AuthorDate: Thu Jun 10 23:37:02 2021 +0300 [SPARK-35716][SQL] Support casting of timestamp without time zone to date type ### What changes were proposed in this pull request? Extend the Cast expression and support TimestampWithoutTZType in casting to DateType. ### Why are the changes needed? To conform the ANSI SQL standard which requires to support such casting. ### Does this PR introduce _any_ user-facing change? No, the new timestamp type is not released yet. ### How was this patch tested? Unit test Closes #32869 from gengliangwang/castToDate. Authored-by: Gengliang Wang <gengli...@apache.org> Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../org/apache/spark/sql/catalyst/expressions/Cast.scala | 9 +++++++++ .../org/apache/spark/sql/catalyst/expressions/CastSuite.scala | 11 +++++++++-- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index 8de19ba..fba17d3 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -72,6 +72,7 @@ object Cast { case (StringType, DateType) => true case (TimestampType, DateType) => true + case (TimestampWithoutTZType, DateType) => true case (StringType, CalendarIntervalType) => true case (StringType, DayTimeIntervalType) => true @@ -534,6 +535,8 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit // throw valid precision more than seconds, according to Hive. // Timestamp.nanos is in 0 to 999,999,999, no more than a second. buildCast[Long](_, t => microsToDays(t, zoneId)) + case TimestampWithoutTZType => + buildCast[Long](_, t => microsToDays(t, ZoneOffset.UTC)) } // IntervalConverter @@ -1204,6 +1207,11 @@ abstract class CastBase extends UnaryExpression with TimeZoneAwareExpression wit (c, evPrim, evNull) => code"""$evPrim = org.apache.spark.sql.catalyst.util.DateTimeUtils.microsToDays($c, $zid);""" + case TimestampWithoutTZType => + (c, evPrim, evNull) => + // scalastyle:off line.size.limit + code"$evPrim = org.apache.spark.sql.catalyst.util.DateTimeUtils.microsToDays($c, java.time.ZoneOffset.UTC);" + // scalastyle:on line.size.limit case _ => (c, evPrim, evNull) => code"$evNull = true;" } @@ -1953,6 +1961,7 @@ object AnsiCast { case (StringType, DateType) => true case (TimestampType, DateType) => true + case (TimestampWithoutTZType, DateType) => true case (_: NumericType, _: NumericType) => true case (StringType, _: NumericType) => true diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala index a4e4257..51a7740 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala @@ -18,7 +18,7 @@ package org.apache.spark.sql.catalyst.expressions import java.sql.{Date, Timestamp} -import java.time.{DateTimeException, Duration, LocalDateTime, Period} +import java.time.{DateTimeException, Duration, LocalDate, LocalDateTime, Period} import java.time.temporal.ChronoUnit import java.util.{Calendar, TimeZone} @@ -1256,10 +1256,17 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase { test("SPARK-35711: cast timestamp without time zone to timestamp with local time zone") { specialTs.foreach { s => - val dt = LocalDateTime.parse(s.replace(" ", "T")) + val dt = LocalDateTime.parse(s) checkEvaluation(cast(dt, TimestampType), DateTimeUtils.localDateTimeToMicros(dt)) } } + + test("SPARK-35716: cast timestamp without time zone to date type") { + specialTs.foreach { s => + val dt = LocalDateTime.parse(s) + checkEvaluation(cast(dt, DateType), LocalDate.parse(s.split("T")(0))) + } + } } /** --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org