This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
     new 74bfbcd  [SPARK-36021][SQL] Parse interval literals should support 
more than 2 digits
74bfbcd is described below

commit 74bfbcd6430b1a5d274c660224851ce1562813e4
Author: Angerszhuuuu <angers....@gmail.com>
AuthorDate: Wed Jul 7 20:31:29 2021 +0300

    [SPARK-36021][SQL] Parse interval literals should support more than 2 digits
    
    ### What changes were proposed in this pull request?
    For case
    ```
    spark-sql> select interval '123456:12' minute to second;
    Error in query:
    requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$': 
123456:12, set spark.sql.legacy.fromDayTimeString.enabled to true to restore 
the behavior before Spark 3.0.(line 1, pos 16)
    
    == SQL ==
    select interval '123456:12' minute to second
    ----------------^^^
    ```
    
    we should support hour/minute/second when for more than 2 digits when parse 
interval literal string
    
    ### Why are the changes needed?
    Keep consistence
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Added UT
    
    Closes #33231 from AngersZhuuuu/SPARK-36021.
    
    Authored-by: Angerszhuuuu <angers....@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
    (cherry picked from commit ea3333a200e586043e29e2f3566b95b6943b811f)
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../spark/sql/catalyst/util/IntervalUtils.scala    | 144 +++++----------
 .../sql/catalyst/util/IntervalUtilsSuite.scala     |  25 +--
 .../test/resources/sql-tests/inputs/interval.sql   |  21 +++
 .../sql-tests/results/ansi/interval.sql.out        | 198 ++++++++++++++++++++-
 .../resources/sql-tests/results/interval.sql.out   | 198 ++++++++++++++++++++-
 .../sql-tests/results/postgreSQL/interval.sql.out  |  20 +--
 6 files changed, 467 insertions(+), 139 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
index ad87f2a..24bcad8 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
@@ -57,6 +57,12 @@ object IntervalUtils {
   }
   import IntervalUnit._
 
+  private val MAX_DAY = Long.MaxValue / MICROS_PER_DAY
+  private val MAX_HOUR = Long.MaxValue / MICROS_PER_HOUR
+  private val MAX_MINUTE = Long.MaxValue / MICROS_PER_MINUTE
+  private val MAX_SECOND = Long.MaxValue / MICROS_PER_SECOND
+  private val MIN_SECOND = Long.MinValue / MICROS_PER_SECOND
+
   def getYears(months: Int): Int = months / MONTHS_PER_YEAR
 
   def getYears(interval: CalendarInterval): Int = getYears(interval.months)
@@ -213,19 +219,25 @@ object IntervalUtils {
   }
 
   /**
-   * Parse YearMonth string in form: [+|-]YYYY-MM
+   * Parse year-month interval in form: [+|-]YYYY-MM
    *
    * adapted from HiveIntervalYearMonth.valueOf
    */
   def fromYearMonthString(input: String): CalendarInterval = {
+    fromYearMonthString(input, YM.YEAR, YM.MONTH)
+  }
+
+  /**
+   * Parse year-month interval in form: [+|-]YYYY-MM
+   *
+   * adapted from HiveIntervalYearMonth.valueOf
+   * Below interval conversion patterns are supported:
+   * - YEAR TO (YEAR|MONTH)
+   */
+  def fromYearMonthString(input: String, startField: Byte, endField: Byte): 
CalendarInterval = {
     require(input != null, "Interval year-month string must be not null")
-    input.trim match {
-      case yearMonthRegex(sign, yearStr, monthStr) =>
-        new CalendarInterval(toYMInterval(yearStr, monthStr, finalSign(sign)), 
0, 0)
-      case _ =>
-        throw new IllegalArgumentException(
-          s"Interval string does not match year-month format of 'y-m': $input")
-    }
+    val months = castStringToYMInterval(UTF8String.fromString(input), 
startField, endField)
+    new CalendarInterval(months, 0, 0)
   }
 
   private def safeToInterval[T](interval: String)(f: => T): T = {
@@ -397,7 +409,7 @@ object IntervalUtils {
       secondStr: String,
       sign: Int): Long = {
     var micros = 0L
-    val days = toLongWithRange(DAY, dayStr, 0, Int.MaxValue).toInt
+    val days = toLongWithRange(DAY, dayStr, 0, MAX_DAY).toInt
     micros = Math.addExact(micros, sign * days * MICROS_PER_DAY)
     val hours = toLongWithRange(HOUR, hourStr, 0, 23)
     micros = Math.addExact(micros, sign * hours * MICROS_PER_HOUR)
@@ -413,7 +425,7 @@ object IntervalUtils {
       secondStr: String,
       sign: Int): Long = {
     var micros = 0L
-    val hours = toLongWithRange(HOUR, hourStr, 0, 2562047788L)
+    val hours = toLongWithRange(HOUR, hourStr, 0, MAX_HOUR)
     micros = Math.addExact(micros, sign * hours * MICROS_PER_HOUR)
     val minutes = toLongWithRange(MINUTE, minuteStr, 0, 59)
     micros = Math.addExact(micros, sign * minutes * MICROS_PER_MINUTE)
@@ -426,14 +438,22 @@ object IntervalUtils {
       secondStr: String,
       sign: Int): Long = {
     var micros = 0L
-    val minutes = toLongWithRange(MINUTE, minuteStr, 0, 153722867280L)
+    val minutes = toLongWithRange(MINUTE, minuteStr, 0, MAX_MINUTE)
     micros = Math.addExact(micros, sign * minutes * MICROS_PER_MINUTE)
     micros = Math.addExact(micros, sign * parseSecondNano(secondStr))
     micros
   }
 
+  def castDayTimeStringToInterval(
+      input: String,
+      startField: Byte,
+      endField: Byte): CalendarInterval = {
+    val micros = castStringToDTInterval(UTF8String.fromString(input), 
startField, endField)
+    new CalendarInterval(0, (micros / MICROS_PER_DAY).toInt, micros % 
MICROS_PER_DAY)
+  }
+
   /**
-   * Parse dayTime string in form: [-]d HH:mm:ss.nnnnnnnnn and 
[-]HH:mm:ss.nnnnnnnnn
+   * Parse day-time interval in form: [-]d HH:mm:ss.nnnnnnnnn and 
[-]HH:mm:ss.nnnnnnnnn
    *
    * adapted from HiveIntervalDayTime.valueOf
    */
@@ -442,19 +462,21 @@ object IntervalUtils {
   }
 
   /**
-   * Parse dayTime string in form: [-]d HH:mm:ss.nnnnnnnnn and 
[-]HH:mm:ss.nnnnnnnnn
+   * Parse day-time interval in form: [-]d HH:mm:ss.nnnnnnnnn and 
[-]HH:mm:ss.nnnnnnnnn
    *
    * adapted from HiveIntervalDayTime.valueOf.
    * Below interval conversion patterns are supported:
-   * - DAY TO (HOUR|MINUTE|SECOND)
-   * - HOUR TO (MINUTE|SECOND)
-   * - MINUTE TO SECOND
+   * - DAY TO (DAY|HOUR|MINUTE|SECOND)
+   * - HOUR TO (HOUR|MINUTE|SECOND)
+   * - MINUTE TO (MINUTE|SECOND)
    */
   def fromDayTimeString(input: String, from: IntervalUnit, to: IntervalUnit): 
CalendarInterval = {
+    require(input != null, "Interval day-time string must be not null")
     if (SQLConf.get.getConf(SQLConf.LEGACY_FROM_DAYTIME_STRING)) {
       parseDayTimeLegacy(input, from, to)
     } else {
-      parseDayTime(input, from, to)
+      castDayTimeStringToInterval(
+        input, DT.stringToField(from.toString), DT.stringToField(to.toString))
     }
   }
 
@@ -480,7 +502,6 @@ object IntervalUtils {
       input: String,
       from: IntervalUnit,
       to: IntervalUnit): CalendarInterval = {
-    require(input != null, "Interval day-time string must be not null")
     assert(input.length == input.trim.length)
     val m = dayTimePatternLegacy.pattern.matcher(input)
     require(m.matches, s"Interval string must match day-time format of 'd 
h:m:s.n': $input, " +
@@ -535,79 +556,6 @@ object IntervalUtils {
     }
   }
 
-  private val signRe = "(?<sign>[+|-])"
-  private val dayRe = "(?<day>\\d+)"
-  private val hourRe = "(?<hour>\\d{1,2})"
-  private val minuteRe = "(?<minute>\\d{1,2})"
-  private val secondRe = "(?<second>(\\d{1,2})(\\.(\\d{1,9}))?)"
-
-  private val dayTimePattern = Map(
-    (MINUTE, SECOND) -> s"^$signRe?$minuteRe:$secondRe$$".r,
-    (HOUR, MINUTE) -> s"^$signRe?$hourRe:$minuteRe$$".r,
-    (HOUR, SECOND) -> s"^$signRe?$hourRe:$minuteRe:$secondRe$$".r,
-    (DAY, HOUR) -> s"^$signRe?$dayRe $hourRe$$".r,
-    (DAY, MINUTE) -> s"^$signRe?$dayRe $hourRe:$minuteRe$$".r,
-    (DAY, SECOND) -> s"^$signRe?$dayRe $hourRe:$minuteRe:$secondRe$$".r
-  )
-
-  private def unitsRange(start: IntervalUnit, end: IntervalUnit): 
Seq[IntervalUnit] = {
-    (start.id to end.id).map(IntervalUnit(_))
-  }
-
-  /**
-   * Parses an input string in the day-time format defined by the `from` and 
`to` bounds.
-   * It supports the following formats:
-   * - [+|-]D+ H[H]:m[m]:s[s][.SSSSSSSSS] for DAY TO SECOND
-   * - [+|-]D+ H[H]:m[m] for DAY TO MINUTE
-   * - [+|-]D+ H[H] for DAY TO HOUR
-   * - [+|-]H[H]:m[m]s[s][.SSSSSSSSS] for HOUR TO SECOND
-   * - [+|-]H[H]:m[m] for HOUR TO MINUTE
-   * - [+|-]m[m]:s[s][.SSSSSSSSS] for MINUTE TO SECOND
-   *
-   * Note: the seconds fraction is truncated to microseconds.
-   *
-   * @param input The input string to parse.
-   * @param from The interval unit from which the input string begins.
-   * @param to The interval unit at where the input string ends.
-   * @return an instance of `CalendarInterval` if the input string was parsed 
successfully
-   *         otherwise throws an exception.
-   * @throws IllegalArgumentException The input string has incorrect format 
and cannot be parsed.
-   * @throws ArithmeticException An interval unit value is out of valid range 
or the resulted
-   *                             interval fields `days` or `microseconds` are 
out of the valid
-   *                             ranges.
-   */
-  private def parseDayTime(
-      input: String,
-      from: IntervalUnit,
-      to: IntervalUnit): CalendarInterval = {
-    require(input != null, "Interval day-time string must be not null")
-    val regexp = dayTimePattern.get(from -> to)
-    require(regexp.isDefined, s"Cannot support (interval '$input' $from to 
$to) expression")
-    val pattern = regexp.get.pattern
-    val m = pattern.matcher(input.trim)
-    require(m.matches, s"Interval string must match day-time format of 
'$pattern': $input, " +
-      s"$fallbackNotice")
-    var micros: Long = 0L
-    var days: Int = 0
-    unitsRange(to, from).foreach {
-      case unit @ DAY =>
-        days = toLongWithRange(unit, m.group(unit.toString), 0, 
Int.MaxValue).toInt
-      case unit @ HOUR =>
-        val parsed = toLongWithRange(unit, m.group(unit.toString), 0, 23)
-        micros = Math.addExact(micros, parsed * MICROS_PER_HOUR)
-      case unit @ MINUTE =>
-        val parsed = toLongWithRange(unit, m.group(unit.toString), 0, 59)
-        micros = Math.addExact(micros, parsed * MICROS_PER_MINUTE)
-      case unit @ SECOND =>
-        micros = Math.addExact(micros, parseSecondNano(m.group(unit.toString)))
-      case _ =>
-        throw new IllegalArgumentException(
-          s"Cannot support (interval '$input' $from to $to) expression")
-    }
-    val sign = if (m.group("sign") != null && m.group("sign") == "-") -1 else 1
-    new CalendarInterval(0, sign * days, sign * micros)
-  }
-
   // Parses a string with nanoseconds, truncates the result and returns 
microseconds
   private def parseNanos(nanosStr: String, isNegative: Boolean): Long = {
     if (nanosStr != null) {
@@ -628,11 +576,7 @@ object IntervalUtils {
    */
   private def parseSecondNano(secondNano: String): Long = {
     def parseSeconds(secondsStr: String): Long = {
-      toLongWithRange(
-        SECOND,
-        secondsStr,
-        Long.MinValue / MICROS_PER_SECOND,
-        Long.MaxValue / MICROS_PER_SECOND) * MICROS_PER_SECOND
+      toLongWithRange(SECOND, secondsStr, MIN_SECOND, MAX_SECOND) * 
MICROS_PER_SECOND
     }
 
     secondNano.split("\\.") match {
@@ -1254,10 +1198,10 @@ object IntervalUtils {
         val minIntervalString = style match {
           case ANSI_STYLE =>
             val firstStr = startField match {
-              case DT.DAY => "-106751991"
-              case DT.HOUR => "-2562047788"
-              case DT.MINUTE => "-153722867280"
-              case DT.SECOND => "-9223372036854.775808"
+              case DT.DAY => s"-$MAX_DAY"
+              case DT.HOUR => s"-$MAX_HOUR"
+              case DT.MINUTE => s"-$MAX_MINUTE"
+              case DT.SECOND => s"-$MAX_SECOND.775808"
             }
             val followingStr = if (startField == endField) {
               ""
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
index 6e56a62..93e3ead 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/IntervalUtilsSuite.scala
@@ -327,41 +327,44 @@ class IntervalUtilsSuite extends SparkFunSuite with 
SQLHelper {
     check("12:40", HOUR, MINUTE, "12 hours 40 minutes")
     check("+12:40", HOUR, MINUTE, "12 hours 40 minutes")
     check("-12:40", HOUR, MINUTE, "-12 hours -40 minutes")
-    checkFail("5 12:40", HOUR, MINUTE, "must match day-time format")
+    checkFail("5 12:40", HOUR, MINUTE, "Interval string does not match 
day-time format")
 
     check("12:40:30.999999999", HOUR, SECOND, "12 hours 40 minutes 30.999999 
seconds")
     check("+12:40:30.123456789", HOUR, SECOND, "12 hours 40 minutes 30.123456 
seconds")
     check("-12:40:30.123456789", HOUR, SECOND, "-12 hours -40 minutes 
-30.123456 seconds")
-    checkFail("5 12:40:30", HOUR, SECOND, "must match day-time format")
-    checkFail("12:40:30.0123456789", HOUR, SECOND, "must match day-time 
format")
+    checkFail("5 12:40:30", HOUR, SECOND, "Interval string does not match 
day-time format")
+    checkFail("12:40:30.0123456789", HOUR, SECOND,
+      "Interval string does not match day-time format")
 
     check("40:30.123456789", MINUTE, SECOND, "40 minutes 30.123456 seconds")
     check("+40:30.123456789", MINUTE, SECOND, "40 minutes 30.123456 seconds")
     check("-40:30.123456789", MINUTE, SECOND, "-40 minutes -30.123456 seconds")
-    checkFail("12:40:30", MINUTE, SECOND, "must match day-time format")
+    checkFail("12:40:30", MINUTE, SECOND, "Interval string does not match 
day-time format")
 
     check("5 12", DAY, HOUR, "5 days 12 hours")
     check("+5 12", DAY, HOUR, "5 days 12 hours")
     check("-5 12", DAY, HOUR, "-5 days -12 hours")
-    checkFail("5 12:30", DAY, HOUR, "must match day-time format")
+    checkFail("5 12:30", DAY, HOUR, "Interval string does not match day-time 
format")
 
     check("5 12:40", DAY, MINUTE, "5 days 12 hours 40 minutes")
     check("+5 12:40", DAY, MINUTE, "5 days 12 hours 40 minutes")
     check("-5 12:40", DAY, MINUTE, "-5 days -12 hours -40 minutes")
-    checkFail("5 12", DAY, MINUTE, "must match day-time format")
+    checkFail("5 12", DAY, MINUTE, "Interval string does not match day-time 
format")
 
     check("5 12:40:30.123", DAY, SECOND, "5 days 12 hours 40 minutes 30.123 
seconds")
     check("+5 12:40:30.123456", DAY, SECOND, "5 days 12 hours 40 minutes 
30.123456 seconds")
     check("-5 12:40:30.123456789", DAY, SECOND, "-5 days -12 hours -40 minutes 
-30.123456 seconds")
-    checkFail("5 12", DAY, SECOND, "must match day-time format")
+    checkFail("5 12", DAY, SECOND, "Interval string does not match day-time 
format")
 
     checkFail("5 30:12:20", DAY, SECOND, "hour 30 outside range")
-    checkFail("5 30-12", DAY, SECOND, "must match day-time format")
-    checkFail("5 1:12:20", HOUR, MICROSECOND, "Cannot support (interval")
-
+    checkFail("5 30-12", DAY, SECOND, "Interval string does not match day-time 
format")
+    withClue("Expected to throw an exception for the invalid input") {
+      val e = intercept[NoSuchElementException](fromDayTimeString("5 1:12:20", 
HOUR, MICROSECOND))
+      assert(e.getMessage.contains("key not found: microsecond"))
+    }
     // whitespaces
     check("\t +5 12:40\t ", DAY, MINUTE, "5 days 12 hours 40 minutes")
-    checkFail("+5\t 12:40", DAY, MINUTE, "must match day-time format")
+    checkFail("+5\t 12:40", DAY, MINUTE, "Interval string does not match 
day-time format")
 
   }
 
diff --git a/sql/core/src/test/resources/sql-tests/inputs/interval.sql 
b/sql/core/src/test/resources/sql-tests/inputs/interval.sql
index fbcbf1c..2c054bd 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/interval.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/interval.sql
@@ -253,3 +253,24 @@ SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO 
SECOND) / -1;
 SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L;
 SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0;
 SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1.0D;
+
+SELECT INTERVAL '106751991 04' DAY TO HOUR;
+SELECT INTERVAL '106751991 04:00' DAY TO MINUTE;
+SELECT INTERVAL '106751991 04:00:54.775807' DAY TO SECOND;
+SELECT INTERVAL '2562047788:00' HOUR TO MINUTE;
+SELECT INTERVAL '2562047788:00:54.775807' HOUR TO SECOND;
+SELECT INTERVAL '153722867280:54.775807' MINUTE TO SECOND;
+SELECT INTERVAL '-106751991 04' DAY TO HOUR;
+SELECT INTERVAL '-106751991 04:00' DAY TO MINUTE;
+SELECT INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND;
+SELECT INTERVAL '-2562047788:00' HOUR TO MINUTE;
+SELECT INTERVAL '-2562047788:00:54.775808' HOUR TO SECOND;
+SELECT INTERVAL '-153722867280:54.775808' MINUTE TO SECOND;
+
+SELECT INTERVAL '106751992 04' DAY TO HOUR;
+SELECT INTERVAL '-106751992 04' DAY TO HOUR;
+SELECT INTERVAL '2562047789:00' HOUR TO MINUTE;
+SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE;
+SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND;
+SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND;
+
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index 42219d2..2468071 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 147
+-- Number of queries: 165
 
 
 -- !query
@@ -546,7 +546,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<day>\d+) (?<hour>\d{1,2})$': 20 15:40:32.99899999, set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]d h`, `INTERVAL 
[+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '20 15:40:32.99899999' day to hour
@@ -560,7 +560,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<day>\d+) (?<hour>\d{1,2}):(?<minute>\d{1,2})$': 20 
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]d h:m`, `INTERVAL 
[+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '20 15:40:32.99899999' day to minute
@@ -574,7 +574,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<hour>\d{1,2}):(?<minute>\d{1,2})$': 15:40:32.99899999, set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL 
[+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '15:40:32.99899999' hour to minute
@@ -588,7 +588,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<hour>\d{1,2}):(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$':
 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL 
[+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 
15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '15:40.99899999' hour to second
@@ -602,7 +602,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<hour>\d{1,2}):(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$':
 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL 
[+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 
15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '15:40' hour to second
@@ -616,7 +616,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$': 20 
40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL 
[+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 
40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '20 40:32.99899999' minute to second
@@ -1093,7 +1093,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-Interval string does not match year-month format of 'y-m': -   2-2     (line 
1, pos 16)
+Interval string does not match year-month format of `[+|-]d h`, `INTERVAL 
[+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: -  2-2     
(line 1, pos 16)
 
 == SQL ==
 select interval '-\t2-2\t' year to month
@@ -1115,7 +1115,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<day>\d+) 
(?<hour>\d{1,2}):(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$': 
+Interval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL 
[+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: 
 -      10       12:34:46.789   , set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
@@ -1470,3 +1470,183 @@ struct<>
 -- !query output
 java.lang.ArithmeticException
 not in range
+
+
+-- !query
+SELECT INTERVAL '106751991 04' DAY TO HOUR
+-- !query schema
+struct<INTERVAL '106751991 04' DAY TO HOUR:interval day to hour>
+-- !query output
+106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '106751991 04:00' DAY TO MINUTE
+-- !query schema
+struct<INTERVAL '106751991 04:00' DAY TO MINUTE:interval day to minute>
+-- !query output
+106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '106751991 04:00:54.775807' DAY TO SECOND
+-- !query schema
+struct<INTERVAL '106751991 04:00:54.775807' DAY TO SECOND:interval day to 
second>
+-- !query output
+106751991 04:00:54.775807000
+
+
+-- !query
+SELECT INTERVAL '2562047788:00' HOUR TO MINUTE
+-- !query schema
+struct<INTERVAL '2562047788:00' HOUR TO MINUTE:interval hour to minute>
+-- !query output
+106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '2562047788:00:54.775807' HOUR TO SECOND
+-- !query schema
+struct<INTERVAL '2562047788:00:54.775807' HOUR TO SECOND:interval hour to 
second>
+-- !query output
+106751991 04:00:54.775807000
+
+
+-- !query
+SELECT INTERVAL '153722867280:54.775807' MINUTE TO SECOND
+-- !query schema
+struct<INTERVAL '153722867280:54.775807' MINUTE TO SECOND:interval minute to 
second>
+-- !query output
+106751991 04:00:54.775807000
+
+
+-- !query
+SELECT INTERVAL '-106751991 04' DAY TO HOUR
+-- !query schema
+struct<INTERVAL '-106751991 04' DAY TO HOUR:interval day to hour>
+-- !query output
+-106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '-106751991 04:00' DAY TO MINUTE
+-- !query schema
+struct<INTERVAL '-106751991 04:00' DAY TO MINUTE:interval day to minute>
+-- !query output
+-106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND
+-- !query schema
+struct<INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND:interval day to 
second>
+-- !query output
+-106751991 04:00:54.775808000
+
+
+-- !query
+SELECT INTERVAL '-2562047788:00' HOUR TO MINUTE
+-- !query schema
+struct<INTERVAL '-2562047788:00' HOUR TO MINUTE:interval hour to minute>
+-- !query output
+-106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '-2562047788:00:54.775808' HOUR TO SECOND
+-- !query schema
+struct<INTERVAL '-2562047788:00:54.775808' HOUR TO SECOND:interval hour to 
second>
+-- !query output
+-106751991 04:00:54.775808000
+
+
+-- !query
+SELECT INTERVAL '-153722867280:54.775808' MINUTE TO SECOND
+-- !query schema
+struct<INTERVAL '-153722867280:54.775808' MINUTE TO SECOND:interval minute to 
second>
+-- !query output
+-106751991 04:00:54.775808000
+
+
+-- !query
+SELECT INTERVAL '106751992 04' DAY TO HOUR
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)
+
+== SQL ==
+SELECT INTERVAL '106751992 04' DAY TO HOUR
+----------------^^^
+
+
+-- !query
+SELECT INTERVAL '-106751992 04' DAY TO HOUR
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)
+
+== SQL ==
+SELECT INTERVAL '-106751992 04' DAY TO HOUR
+----------------^^^
+
+
+-- !query
+SELECT INTERVAL '2562047789:00' HOUR TO MINUTE
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 
16)
+
+== SQL ==
+SELECT INTERVAL '2562047789:00' HOUR TO MINUTE
+----------------^^^
+
+
+-- !query
+SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 
16)
+
+== SQL ==
+SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE
+----------------^^^
+
+
+-- !query
+SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: minute 153722867281 outside range [0, 153722867280](line 
1, pos 16)
+
+== SQL ==
+SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND
+----------------^^^
+
+
+-- !query
+SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: minute 153722867281 outside range [0, 153722867280](line 
1, pos 16)
+
+== SQL ==
+SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND
+----------------^^^
diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out 
b/sql/core/src/test/resources/sql-tests/results/interval.sql.out
index c6b54d7..f39c79b 100644
--- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 147
+-- Number of queries: 165
 
 
 -- !query
@@ -540,7 +540,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<day>\d+) (?<hour>\d{1,2})$': 20 15:40:32.99899999, set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]d h`, `INTERVAL 
[+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 20 
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '20 15:40:32.99899999' day to hour
@@ -554,7 +554,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<day>\d+) (?<hour>\d{1,2}):(?<minute>\d{1,2})$': 20 
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]d h:m`, `INTERVAL 
[+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 20 
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '20 15:40:32.99899999' day to minute
@@ -568,7 +568,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<hour>\d{1,2}):(?<minute>\d{1,2})$': 15:40:32.99899999, set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL 
[+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 
15:40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '15:40:32.99899999' hour to minute
@@ -582,7 +582,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<hour>\d{1,2}):(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$':
 15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL 
[+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 
15:40.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '15:40.99899999' hour to second
@@ -596,7 +596,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<hour>\d{1,2}):(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$':
 15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL 
[+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 
15:40, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '15:40' hour to second
@@ -610,7 +610,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$': 20 
40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL 
[+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 20 
40:32.99899999, set spark.sql.legacy.fromDayTimeString.enabled to true to 
restore the behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 select interval '20 40:32.99899999' minute to second
@@ -1087,7 +1087,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-Interval string does not match year-month format of 'y-m': -   2-2     (line 
1, pos 16)
+Interval string does not match year-month format of `[+|-]d h`, `INTERVAL 
[+|-]'[+|-]d h' DAY TO HOUR` when cast to interval year to month: -  2-2     
(line 1, pos 16)
 
 == SQL ==
 select interval '-\t2-2\t' year to month
@@ -1109,7 +1109,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<day>\d+) 
(?<hour>\d{1,2}):(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$': 
+Interval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL 
[+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: 
 -      10       12:34:46.789   , set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
@@ -1459,3 +1459,183 @@ struct<>
 -- !query output
 java.lang.ArithmeticException
 not in range
+
+
+-- !query
+SELECT INTERVAL '106751991 04' DAY TO HOUR
+-- !query schema
+struct<INTERVAL '106751991 04' DAY TO HOUR:interval day to hour>
+-- !query output
+106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '106751991 04:00' DAY TO MINUTE
+-- !query schema
+struct<INTERVAL '106751991 04:00' DAY TO MINUTE:interval day to minute>
+-- !query output
+106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '106751991 04:00:54.775807' DAY TO SECOND
+-- !query schema
+struct<INTERVAL '106751991 04:00:54.775807' DAY TO SECOND:interval day to 
second>
+-- !query output
+106751991 04:00:54.775807000
+
+
+-- !query
+SELECT INTERVAL '2562047788:00' HOUR TO MINUTE
+-- !query schema
+struct<INTERVAL '2562047788:00' HOUR TO MINUTE:interval hour to minute>
+-- !query output
+106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '2562047788:00:54.775807' HOUR TO SECOND
+-- !query schema
+struct<INTERVAL '2562047788:00:54.775807' HOUR TO SECOND:interval hour to 
second>
+-- !query output
+106751991 04:00:54.775807000
+
+
+-- !query
+SELECT INTERVAL '153722867280:54.775807' MINUTE TO SECOND
+-- !query schema
+struct<INTERVAL '153722867280:54.775807' MINUTE TO SECOND:interval minute to 
second>
+-- !query output
+106751991 04:00:54.775807000
+
+
+-- !query
+SELECT INTERVAL '-106751991 04' DAY TO HOUR
+-- !query schema
+struct<INTERVAL '-106751991 04' DAY TO HOUR:interval day to hour>
+-- !query output
+-106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '-106751991 04:00' DAY TO MINUTE
+-- !query schema
+struct<INTERVAL '-106751991 04:00' DAY TO MINUTE:interval day to minute>
+-- !query output
+-106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND
+-- !query schema
+struct<INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND:interval day to 
second>
+-- !query output
+-106751991 04:00:54.775808000
+
+
+-- !query
+SELECT INTERVAL '-2562047788:00' HOUR TO MINUTE
+-- !query schema
+struct<INTERVAL '-2562047788:00' HOUR TO MINUTE:interval hour to minute>
+-- !query output
+-106751991 04:00:00.000000000
+
+
+-- !query
+SELECT INTERVAL '-2562047788:00:54.775808' HOUR TO SECOND
+-- !query schema
+struct<INTERVAL '-2562047788:00:54.775808' HOUR TO SECOND:interval hour to 
second>
+-- !query output
+-106751991 04:00:54.775808000
+
+
+-- !query
+SELECT INTERVAL '-153722867280:54.775808' MINUTE TO SECOND
+-- !query schema
+struct<INTERVAL '-153722867280:54.775808' MINUTE TO SECOND:interval minute to 
second>
+-- !query output
+-106751991 04:00:54.775808000
+
+
+-- !query
+SELECT INTERVAL '106751992 04' DAY TO HOUR
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)
+
+== SQL ==
+SELECT INTERVAL '106751992 04' DAY TO HOUR
+----------------^^^
+
+
+-- !query
+SELECT INTERVAL '-106751992 04' DAY TO HOUR
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: day 106751992 outside range [0, 106751991](line 1, pos 16)
+
+== SQL ==
+SELECT INTERVAL '-106751992 04' DAY TO HOUR
+----------------^^^
+
+
+-- !query
+SELECT INTERVAL '2562047789:00' HOUR TO MINUTE
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 
16)
+
+== SQL ==
+SELECT INTERVAL '2562047789:00' HOUR TO MINUTE
+----------------^^^
+
+
+-- !query
+SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: hour 2562047789 outside range [0, 2562047788](line 1, pos 
16)
+
+== SQL ==
+SELECT INTERVAL '-2562047789:00' HOUR TO MINUTE
+----------------^^^
+
+
+-- !query
+SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: minute 153722867281 outside range [0, 153722867280](line 
1, pos 16)
+
+== SQL ==
+SELECT INTERVAL '153722867281:54.775808' MINUTE TO SECOND
+----------------^^^
+
+
+-- !query
+SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+requirement failed: minute 153722867281 outside range [0, 153722867280](line 
1, pos 16)
+
+== SQL ==
+SELECT INTERVAL '-153722867281:54.775808' MINUTE TO SECOND
+----------------^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out
index 62d4741..3f02398 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/interval.sql.out
@@ -105,7 +105,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<day>\d+) (?<hour>\d{1,2})$': 1 2:03, set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]d h`, `INTERVAL 
[+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03, set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 SELECT interval '1 2:03' day to hour
@@ -119,7 +119,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<day>\d+) (?<hour>\d{1,2})$': 1 2:03:04, set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]d h`, `INTERVAL 
[+|-]'[+|-]d h' DAY TO HOUR` when cast to interval day to hour: 1 2:03:04, set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 SELECT interval '1 2:03:04' day to hour
@@ -141,7 +141,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<day>\d+) (?<hour>\d{1,2}):(?<minute>\d{1,2})$': 1 2:03:04, 
set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]d h:m`, `INTERVAL 
[+|-]'[+|-]d h:m' DAY TO MINUTE` when cast to interval day to minute: 1 
2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 SELECT interval '1 2:03:04' day to minute
@@ -155,7 +155,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<day>\d+) 
(?<hour>\d{1,2}):(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$': 1 
2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]d h:m:s.n`, `INTERVAL 
[+|-]'[+|-]d h:m:s.n' DAY TO SECOND` when cast to interval day to second: 1 
2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 SELECT interval '1 2:03' day to second
@@ -177,7 +177,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<hour>\d{1,2}):(?<minute>\d{1,2})$': 1 2:03, set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL 
[+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 2:03, 
set spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 SELECT interval '1 2:03' hour to minute
@@ -191,7 +191,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<hour>\d{1,2}):(?<minute>\d{1,2})$': 1 2:03:04, set 
spark.sql.legacy.fromDayTimeString.enabled to true to restore the behavior 
before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]h:m`, `INTERVAL 
[+|-]'[+|-]h:m' HOUR TO MINUTE` when cast to interval hour to minute: 1 
2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 SELECT interval '1 2:03:04' hour to minute
@@ -205,7 +205,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<hour>\d{1,2}):(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$':
 1 2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL 
[+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 
2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 SELECT interval '1 2:03' hour to second
@@ -219,7 +219,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<hour>\d{1,2}):(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$':
 1 2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore 
the behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]h:m:s.n`, `INTERVAL 
[+|-]'[+|-]h:m:s.n' HOUR TO SECOND` when cast to interval hour to second: 1 
2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 SELECT interval '1 2:03:04' hour to second
@@ -233,7 +233,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$': 1 
2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL 
[+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 
2:03, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 SELECT interval '1 2:03' minute to second
@@ -247,7 +247,7 @@ struct<>
 -- !query output
 org.apache.spark.sql.catalyst.parser.ParseException
 
-requirement failed: Interval string must match day-time format of 
'^(?<sign>[+|-])?(?<minute>\d{1,2}):(?<second>(\d{1,2})(\.(\d{1,9}))?)$': 1 
2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
+Interval string does not match day-time format of `[+|-]m:s.n`, `INTERVAL 
[+|-]'[+|-]m:s.n' MINUTE TO SECOND` when cast to interval minute to second: 1 
2:03:04, set spark.sql.legacy.fromDayTimeString.enabled to true to restore the 
behavior before Spark 3.0.(line 1, pos 16)
 
 == SQL ==
 SELECT interval '1 2:03:04' minute to second

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to