This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new ee2bf788ea3 [SPARK-40008][SQL][FOLLOWUP] Fix codegen of casting integrals to ANSI intervals ee2bf788ea3 is described below commit ee2bf788ea3fa3970076e58a31713426f8680cc4 Author: Max Gekk <max.g...@gmail.com> AuthorDate: Wed Aug 10 19:45:53 2022 +0500 [SPARK-40008][SQL][FOLLOWUP] Fix codegen of casting integrals to ANSI intervals ### What changes were proposed in this pull request? This is a follow up of https://github.com/apache/spark/pull/37442 that fixes the codegen of casting integrals to ANSI intervals w/ multi units. The PR removes the assert: ```scala assert(it.startField == it.endField) ``` ### Why are the changes needed? To make codegen consistent to interpreted code, and fix the assert. ### Does this PR introduce _any_ user-facing change? Yes. Before: ```sql > select cast(dt as interval hour to second) from values(100Y) as t(dt); java.lang.AssertionError assertion failed ``` After: ```sql > select cast(dt as interval hour to second) from values(100Y) as t(dt); 0 00:01:40.000000000 ``` ### How was this patch tested? By running new tests: ``` $ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite -- -z cast.sql" ``` Closes #37464 from MaxGekk/cast-integral-to-interval-followup. Authored-by: Max Gekk <max.g...@gmail.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../org/apache/spark/sql/catalyst/expressions/Cast.scala | 2 -- sql/core/src/test/resources/sql-tests/inputs/cast.sql | 2 ++ .../test/resources/sql-tests/results/ansi/cast.sql.out | 16 ++++++++++++++++ .../src/test/resources/sql-tests/results/cast.sql.out | 16 ++++++++++++++++ 4 files changed, 34 insertions(+), 2 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala index 25e1889109e..8cd49b2374a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala @@ -1803,7 +1803,6 @@ case class Cast( $evPrim = $util.durationToMicros($util.microsToDuration($c), (byte)${it.endField}); """ case x: IntegralType => - assert(it.startField == it.endField) val util = IntervalUtils.getClass.getCanonicalName.stripSuffix("$") if (x == LongType) { (c, evPrim, _) => @@ -1834,7 +1833,6 @@ case class Cast( $evPrim = $util.periodToMonths($util.monthsToPeriod($c), (byte)${it.endField}); """ case x: IntegralType => - assert(it.startField == it.endField) val util = IntervalUtils.getClass.getCanonicalName.stripSuffix("$") if (x == LongType) { (c, evPrim, _) => diff --git a/sql/core/src/test/resources/sql-tests/inputs/cast.sql b/sql/core/src/test/resources/sql-tests/inputs/cast.sql index 34102a12507..fb92825c001 100644 --- a/sql/core/src/test/resources/sql-tests/inputs/cast.sql +++ b/sql/core/src/test/resources/sql-tests/inputs/cast.sql @@ -120,9 +120,11 @@ select cast(interval '1000000' second as smallint); -- cast integrals to ANSI intervals select cast(1Y as interval year); select cast(-122S as interval year to month); +select cast(ym as interval year to month) from values(-122S) as t(ym); select cast(1000 as interval month); select cast(-10L as interval second); select cast(100Y as interval hour to second); +select cast(dt as interval hour to second) from values(100Y) as t(dt); select cast(-1000S as interval day to second); select cast(10 as interval day); diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out index c4b454b135c..373e8c7b362 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out @@ -856,6 +856,14 @@ struct<CAST(-122 AS INTERVAL YEAR TO MONTH):interval year to month> -10-2 +-- !query +select cast(ym as interval year to month) from values(-122S) as t(ym) +-- !query schema +struct<ym:interval year to month> +-- !query output +-10-2 + + -- !query select cast(1000 as interval month) -- !query schema @@ -880,6 +888,14 @@ struct<CAST(100 AS INTERVAL HOUR TO SECOND):interval hour to second> 0 00:01:40.000000000 +-- !query +select cast(dt as interval hour to second) from values(100Y) as t(dt) +-- !query schema +struct<dt:interval hour to second> +-- !query output +0 00:01:40.000000000 + + -- !query select cast(-1000S as interval day to second) -- !query schema diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out b/sql/core/src/test/resources/sql-tests/results/cast.sql.out index 2b976914bfe..5e216045112 100644 --- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out @@ -684,6 +684,14 @@ struct<CAST(-122 AS INTERVAL YEAR TO MONTH):interval year to month> -10-2 +-- !query +select cast(ym as interval year to month) from values(-122S) as t(ym) +-- !query schema +struct<ym:interval year to month> +-- !query output +-10-2 + + -- !query select cast(1000 as interval month) -- !query schema @@ -708,6 +716,14 @@ struct<CAST(100 AS INTERVAL HOUR TO SECOND):interval hour to second> 0 00:01:40.000000000 +-- !query +select cast(dt as interval hour to second) from values(100Y) as t(dt) +-- !query schema +struct<dt:interval hour to second> +-- !query output +0 00:01:40.000000000 + + -- !query select cast(-1000S as interval day to second) -- !query schema --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org