This is an automated email from the ASF dual-hosted git repository. maxgekk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new e8d6992 [SPARK-35153][SQL] Make textual representation of ANSI interval operators more readable e8d6992 is described below commit e8d6992b664cf668ed5ad12f891b2341f0095c55 Author: Max Gekk <max.g...@gmail.com> AuthorDate: Tue Apr 20 23:13:40 2021 +0300 [SPARK-35153][SQL] Make textual representation of ANSI interval operators more readable ### What changes were proposed in this pull request? In the PR, I propose to override the `sql` and `toString` methods of the expressions that implement operators over ANSI intervals (`YearMonthIntervalType`/`DayTimeIntervalType`), and replace internal expression class names by operators like `*`, `/` and `-`. ### Why are the changes needed? Proposed methods should make the textual representation of such operators more readable, and potentially parsable by Spark SQL parser. ### Does this PR introduce _any_ user-facing change? Yes. This can influence on column names. ### How was this patch tested? By running existing test suites for interval and datetime expressions, and re-generating the `*.sql` tests: ``` $ build/sbt "sql/testOnly *SQLQueryTestSuite -- -z interval.sql" $ build/sbt "sql/testOnly *SQLQueryTestSuite -- -z datetime.sql" ``` Closes #32262 from MaxGekk/interval-operator-sql. Authored-by: Max Gekk <max.g...@gmail.com> Signed-off-by: Max Gekk <max.g...@gmail.com> --- .../catalyst/expressions/datetimeExpressions.scala | 6 ++++++ .../catalyst/expressions/intervalExpressions.scala | 4 ++++ .../sql-tests/results/ansi/datetime.sql.out | 12 +++++------ .../sql-tests/results/ansi/interval.sql.out | 4 ++-- .../sql-tests/results/datetime-legacy.sql.out | 12 +++++------ .../resources/sql-tests/results/datetime.sql.out | 12 +++++------ .../resources/sql-tests/results/interval.sql.out | 4 ++-- .../typeCoercion/native/decimalPrecision.sql.out | 24 +++++++++++----------- .../typeCoercion/native/promoteStrings.sql.out | 6 +++--- 9 files changed, 47 insertions(+), 37 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala index ba9d458..010b9b0 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala @@ -2556,6 +2556,9 @@ case class SubtractTimestamps( s"new org.apache.spark.unsafe.types.CalendarInterval(0, 0, $end - $start)") } + override def toString: String = s"($left - $right)" + override def sql: String = s"(${left.sql} - ${right.sql})" + override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): SubtractTimestamps = copy(left = newLeft, right = newRight) @@ -2611,6 +2614,9 @@ case class SubtractDates( }) } + override def toString: String = s"($left - $right)" + override def sql: String = s"(${left.sql} - ${right.sql})" + override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): SubtractDates = copy(left = newLeft, right = newRight) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala index 4311b38..b34bcaf 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala @@ -337,6 +337,7 @@ case class MultiplyYMInterval( } override def toString: String = s"($left * $right)" + override def sql: String = s"(${left.sql} * ${right.sql})" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): MultiplyYMInterval = @@ -383,6 +384,7 @@ case class MultiplyDTInterval( } override def toString: String = s"($left * $right)" + override def sql: String = s"(${left.sql} * ${right.sql})" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): MultiplyDTInterval = @@ -441,6 +443,7 @@ case class DivideYMInterval( } override def toString: String = s"($left / $right)" + override def sql: String = s"(${left.sql} / ${right.sql})" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): DivideYMInterval = @@ -488,6 +491,7 @@ case class DivideDTInterval( } override def toString: String = s"($left / $right)" + override def sql: String = s"(${left.sql} / ${right.sql})" override protected def withNewChildrenInternal( newLeft: Expression, newRight: Expression): DivideDTInterval = diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out index 4455b8a..c4dc693 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/datetime.sql.out @@ -359,7 +359,7 @@ cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argumen -- !query select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> +struct<(DATE '2020-01-01' - TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> -- !query output 86 13:48:47.654322000 @@ -367,7 +367,7 @@ struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.3456 -- !query select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01' -- !query schema -struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01-01'):day-time interval> +struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - DATE '2020-01-01'):day-time interval> -- !query output -86 13:48:47.654322000 @@ -375,7 +375,7 @@ struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01- -- !query select timestamp'2019-10-06 10:11:12.345678' - null -- !query schema -struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', NULL):day-time interval> +struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - NULL):day-time interval> -- !query output NULL @@ -383,7 +383,7 @@ NULL -- !query select null - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct<subtracttimestamps(NULL, TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> +struct<(NULL - TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> -- !query output NULL @@ -625,7 +625,7 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi -- !query select null - date '2019-10-06' -- !query schema -struct<subtractdates(NULL, DATE '2019-10-06'):day-time interval> +struct<(NULL - DATE '2019-10-06'):day-time interval> -- !query output NULL @@ -633,7 +633,7 @@ NULL -- !query select date '2001-10-01' - date '2001-09-28' -- !query schema -struct<subtractdates(DATE '2001-10-01', DATE '2001-09-28'):day-time interval> +struct<(DATE '2001-10-01' - DATE '2001-09-28'):day-time interval> -- !query output 3 00:00:00.000000000 diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out index d5ace9f..781a7d7 100644 --- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out @@ -5,7 +5,7 @@ -- !query select 3 * (timestamp'2019-10-15 10:11:12.001002' - date'2019-10-15') -- !query schema -struct<multiplydtinterval(subtracttimestamps(TIMESTAMP '2019-10-15 10:11:12.001002', DATE '2019-10-15'), 3):day-time interval> +struct<((TIMESTAMP '2019-10-15 10:11:12.001002' - DATE '2019-10-15') * 3):day-time interval> -- !query output 1 06:33:36.003006000 @@ -21,7 +21,7 @@ struct<multiply_interval(INTERVAL '4 months 14 days 0.000003 seconds', 1.5):inte -- !query select (timestamp'2019-10-15' - timestamp'2019-10-14') / 1.5 -- !query schema -struct<dividedtinterval(subtracttimestamps(TIMESTAMP '2019-10-15 00:00:00', TIMESTAMP '2019-10-14 00:00:00'), 1.5):day-time interval> +struct<((TIMESTAMP '2019-10-15 00:00:00' - TIMESTAMP '2019-10-14 00:00:00') / 1.5):day-time interval> -- !query output 0 16:00:00.000000000 diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out index ad312b2..b0d86bb 100644 --- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out @@ -336,7 +336,7 @@ cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argumen -- !query select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> +struct<(DATE '2020-01-01' - TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> -- !query output 86 13:48:47.654322000 @@ -344,7 +344,7 @@ struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.3456 -- !query select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01' -- !query schema -struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01-01'):day-time interval> +struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - DATE '2020-01-01'):day-time interval> -- !query output -86 13:48:47.654322000 @@ -352,7 +352,7 @@ struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01- -- !query select timestamp'2019-10-06 10:11:12.345678' - null -- !query schema -struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', NULL):day-time interval> +struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - NULL):day-time interval> -- !query output NULL @@ -360,7 +360,7 @@ NULL -- !query select null - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct<subtracttimestamps(NULL, TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> +struct<(NULL - TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> -- !query output NULL @@ -602,7 +602,7 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi -- !query select null - date '2019-10-06' -- !query schema -struct<subtractdates(NULL, DATE '2019-10-06'):day-time interval> +struct<(NULL - DATE '2019-10-06'):day-time interval> -- !query output NULL @@ -610,7 +610,7 @@ NULL -- !query select date '2001-10-01' - date '2001-09-28' -- !query schema -struct<subtractdates(DATE '2001-10-01', DATE '2001-09-28'):day-time interval> +struct<(DATE '2001-10-01' - DATE '2001-09-28'):day-time interval> -- !query output 3 00:00:00.000000000 diff --git a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out index e93f0c8..34632c7 100755 --- a/sql/core/src/test/resources/sql-tests/results/datetime.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/datetime.sql.out @@ -336,7 +336,7 @@ cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argumen -- !query select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> +struct<(DATE '2020-01-01' - TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> -- !query output 86 13:48:47.654322000 @@ -344,7 +344,7 @@ struct<subtracttimestamps(DATE '2020-01-01', TIMESTAMP '2019-10-06 10:11:12.3456 -- !query select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01' -- !query schema -struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01-01'):day-time interval> +struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - DATE '2020-01-01'):day-time interval> -- !query output -86 13:48:47.654322000 @@ -352,7 +352,7 @@ struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', DATE '2020-01- -- !query select timestamp'2019-10-06 10:11:12.345678' - null -- !query schema -struct<subtracttimestamps(TIMESTAMP '2019-10-06 10:11:12.345678', NULL):day-time interval> +struct<(TIMESTAMP '2019-10-06 10:11:12.345678' - NULL):day-time interval> -- !query output NULL @@ -360,7 +360,7 @@ NULL -- !query select null - timestamp'2019-10-06 10:11:12.345678' -- !query schema -struct<subtracttimestamps(NULL, TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> +struct<(NULL - TIMESTAMP '2019-10-06 10:11:12.345678'):day-time interval> -- !query output NULL @@ -602,7 +602,7 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi -- !query select null - date '2019-10-06' -- !query schema -struct<subtractdates(NULL, DATE '2019-10-06'):day-time interval> +struct<(NULL - DATE '2019-10-06'):day-time interval> -- !query output NULL @@ -610,7 +610,7 @@ NULL -- !query select date '2001-10-01' - date '2001-09-28' -- !query schema -struct<subtractdates(DATE '2001-10-01', DATE '2001-09-28'):day-time interval> +struct<(DATE '2001-10-01' - DATE '2001-09-28'):day-time interval> -- !query output 3 00:00:00.000000000 diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out index 4855252..d525d80 100644 --- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out @@ -5,7 +5,7 @@ -- !query select 3 * (timestamp'2019-10-15 10:11:12.001002' - date'2019-10-15') -- !query schema -struct<multiplydtinterval(subtracttimestamps(TIMESTAMP '2019-10-15 10:11:12.001002', DATE '2019-10-15'), 3):day-time interval> +struct<((TIMESTAMP '2019-10-15 10:11:12.001002' - DATE '2019-10-15') * 3):day-time interval> -- !query output 1 06:33:36.003006000 @@ -21,7 +21,7 @@ struct<multiply_interval(INTERVAL '4 months 14 days 0.000003 seconds', 1.5):inte -- !query select (timestamp'2019-10-15' - timestamp'2019-10-14') / 1.5 -- !query schema -struct<dividedtinterval(subtracttimestamps(TIMESTAMP '2019-10-15 00:00:00', TIMESTAMP '2019-10-14 00:00:00'), 1.5):day-time interval> +struct<((TIMESTAMP '2019-10-15 00:00:00' - TIMESTAMP '2019-10-14 00:00:00') / 1.5):day-time interval> -- !query output 0 16:00:00.000000000 diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out index 36a890b..58c34aa 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/decimalPrecision.sql.out @@ -1008,7 +1008,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(3, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtracttimestamps(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(3,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 -- !query @@ -1017,7 +1017,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(5, 0)) FRO struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtracttimestamps(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(5,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 -- !query @@ -1026,7 +1026,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(10, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtracttimestamps(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(10,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -1035,7 +1035,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - cast(1 as decimal(20, 0)) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtracttimestamps(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - CAST(1 AS DECIMAL(20,0)))' due to data type mismatch: argument 2 requires timestamp type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 -- !query @@ -1408,7 +1408,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtracttimestamps(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 -- !query @@ -1417,7 +1417,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtracttimestamps(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 -- !query @@ -1426,7 +1426,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtracttimestamps(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -1435,7 +1435,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00.0' as timestamp) FR struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtracttimestamps(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 -- !query @@ -1444,7 +1444,7 @@ SELECT cast(1 as decimal(3, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtractdates(CAST(1 AS DECIMAL(3,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(3,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(3,0))' is of decimal(3,0) type.; line 1 pos 7 -- !query @@ -1453,7 +1453,7 @@ SELECT cast(1 as decimal(5, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtractdates(CAST(1 AS DECIMAL(5,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(5,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(5,0))' is of decimal(5,0) type.; line 1 pos 7 -- !query @@ -1462,7 +1462,7 @@ SELECT cast(1 as decimal(10, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtractdates(CAST(1 AS DECIMAL(10,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(10,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(10,0))' is of decimal(10,0) type.; line 1 pos 7 -- !query @@ -1471,7 +1471,7 @@ SELECT cast(1 as decimal(20, 0)) - cast('2017-12-11 09:30:00' as date) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtractdates(CAST(1 AS DECIMAL(20,0)), CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 +cannot resolve '(CAST(1 AS DECIMAL(20,0)) - CAST('2017-12-11 09:30:00' AS DATE))' due to data type mismatch: argument 1 requires date type, however, 'CAST(1 AS DECIMAL(20,0))' is of decimal(20,0) type.; line 1 pos 7 -- !query diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out index 56c45fa..9c701ef 100644 --- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/promoteStrings.sql.out @@ -198,13 +198,13 @@ SELECT '1' - cast('2017-12-11 09:30:00.0' as timestamp) FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtracttimestamps('1', CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, ''1'' is of string type.; line 1 pos 7 +cannot resolve '('1' - CAST('2017-12-11 09:30:00.0' AS TIMESTAMP))' due to data type mismatch: argument 1 requires timestamp type, however, ''1'' is of string type.; line 1 pos 7 -- !query SELECT '1' - cast('2017-12-11 09:30:00' as date) FROM t -- !query schema -struct<subtractdates(1, CAST(2017-12-11 09:30:00 AS DATE)):day-time interval> +struct<(1 - CAST(2017-12-11 09:30:00 AS DATE)):day-time interval> -- !query output NULL @@ -781,7 +781,7 @@ SELECT cast('2017-12-11 09:30:00.0' as timestamp) - '1' FROM t struct<> -- !query output org.apache.spark.sql.AnalysisException -cannot resolve 'subtracttimestamps(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP), '1')' due to data type mismatch: argument 2 requires timestamp type, however, ''1'' is of string type.; line 1 pos 7 +cannot resolve '(CAST('2017-12-11 09:30:00.0' AS TIMESTAMP) - '1')' due to data type mismatch: argument 2 requires timestamp type, however, ''1'' is of string type.; line 1 pos 7 -- !query --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org