This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new a3ea6b4e00d [SPARK-38908][SQL] Provide query context in runtime error 
of Casting from String to Number/Date/Timestamp/Boolean
a3ea6b4e00d is described below

commit a3ea6b4e00df1a6a5712db5dd228819044c09dc9
Author: Gengliang Wang <gengli...@apache.org>
AuthorDate: Fri Apr 15 19:20:31 2022 +0800

    [SPARK-38908][SQL] Provide query context in runtime error of Casting from 
String to Number/Date/Timestamp/Boolean
    
    ### What changes were proposed in this pull request?
    
    Provide query context in runtime error of Casting from String to 
Number/Date/Timestamp/Boolean.
    Casting Double/Float to Timestamp shares the same error method as casting 
String to Timestamp, so this PR also provides query context in its error.
    
    ### Why are the changes needed?
    
    Provide SQL query context of runtime errors to users, so that they can 
understand it better.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, improve the runtime error message of Casting from String to 
Number/Date/Timestamp/Boolean
    
    ### How was this patch tested?
    
    UT
    
    Closes #36206 from gengliangwang/castStringContext.
    
    Authored-by: Gengliang Wang <gengli...@apache.org>
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
    (cherry picked from commit 49fa2e0720d3ca681d817981cbc2c7b811de2706)
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
---
 core/src/main/resources/error/error-classes.json   |   2 +-
 project/MimaExcludes.scala                         |   6 +-
 .../spark/sql/catalyst/expressions/Cast.scala      |  74 ++++++----
 .../spark/sql/catalyst/util/DateTimeUtils.scala    |  16 +-
 .../spark/sql/catalyst/util/UTF8StringUtils.scala  |  16 +-
 .../spark/sql/errors/QueryExecutionErrors.scala    |  22 ++-
 .../scala/org/apache/spark/sql/types/Decimal.scala |   4 +-
 .../src/test/resources/sql-tests/inputs/cast.sql   |  11 ++
 .../resources/sql-tests/results/ansi/cast.sql.out  | 164 ++++++++++++++++++++-
 .../resources/sql-tests/results/ansi/date.sql.out  |   9 ++
 .../results/ansi/datetime-parsing-invalid.sql.out  |   6 +
 .../sql-tests/results/ansi/interval.sql.out        |  30 ++++
 .../results/ansi/string-functions.sql.out          |  14 +-
 .../test/resources/sql-tests/results/cast.sql.out  |  74 +++++++++-
 .../sql-tests/results/postgreSQL/boolean.sql.out   |  51 ++++++-
 .../sql-tests/results/postgreSQL/float4.sql.out    |  12 ++
 .../sql-tests/results/postgreSQL/float8.sql.out    |  12 ++
 .../sql-tests/results/postgreSQL/text.sql.out      |   6 +
 .../results/postgreSQL/window_part2.sql.out        |   5 +
 .../results/postgreSQL/window_part3.sql.out        |   6 +-
 .../results/postgreSQL/window_part4.sql.out        |   6 +-
 21 files changed, 484 insertions(+), 62 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json 
b/core/src/main/resources/error/error-classes.json
index 855d3c5cd6e..a0fa042fd48 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -106,7 +106,7 @@
     "sqlState" : "22023"
   },
   "INVALID_INPUT_SYNTAX_FOR_NUMERIC_TYPE" : {
-    "message" : [ "invalid input syntax for type numeric: %s. To return NULL 
instead, use 'try_cast'. If necessary set %s to false to bypass this error." ],
+    "message" : [ "invalid input syntax for type numeric: %s. To return NULL 
instead, use 'try_cast'. If necessary set %s to false to bypass this error.%s" 
],
     "sqlState" : "42000"
   },
   "INVALID_JSON_SCHEMA_MAPTYPE" : {
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index d832d68c999..8f3bd43ec65 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -64,7 +64,11 @@ object MimaExcludes {
     
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.connector.read.partitioning.ClusteredDistribution"),
     
ProblemFilters.exclude[MissingClassProblem]("org.apache.spark.sql.connector.read.partitioning.Distribution"),
     
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.connector.read.partitioning.Partitioning.*"),
-    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.connector.read.partitioning.Partitioning.*")
+    
ProblemFilters.exclude[ReversedMissingMethodProblem]("org.apache.spark.sql.connector.read.partitioning.Partitioning.*"),
+
+    // [SPARK-38908][SQL] Provide query context in runtime error of Casting 
from String to
+    // Number/Date/Timestamp/Boolean
+    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.sql.types.Decimal.fromStringANSI")
   )
 
   // Exclude rules for 3.2.x from 3.1.1
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index 03ecaecca06..e522c211cb2 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -467,7 +467,7 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
           false
         } else {
           if (ansiEnabled) {
-            throw QueryExecutionErrors.invalidInputSyntaxForBooleanError(s)
+            throw QueryExecutionErrors.invalidInputSyntaxForBooleanError(s, 
origin.context)
           } else {
             null
           }
@@ -499,7 +499,7 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
     case StringType =>
       buildCast[UTF8String](_, utfs => {
         if (ansiEnabled) {
-          DateTimeUtils.stringToTimestampAnsi(utfs, zoneId)
+          DateTimeUtils.stringToTimestampAnsi(utfs, zoneId, origin.context)
         } else {
           DateTimeUtils.stringToTimestamp(utfs, zoneId).orNull
         }
@@ -524,14 +524,14 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
     // TimestampWritable.doubleToTimestamp
     case DoubleType =>
       if (ansiEnabled) {
-        buildCast[Double](_, d => doubleToTimestampAnsi(d))
+        buildCast[Double](_, d => doubleToTimestampAnsi(d, origin.context))
       } else {
         buildCast[Double](_, d => doubleToTimestamp(d))
       }
     // TimestampWritable.floatToTimestamp
     case FloatType =>
       if (ansiEnabled) {
-        buildCast[Float](_, f => doubleToTimestampAnsi(f.toDouble))
+        buildCast[Float](_, f => doubleToTimestampAnsi(f.toDouble, 
origin.context))
       } else {
         buildCast[Float](_, f => doubleToTimestamp(f.toDouble))
       }
@@ -541,7 +541,7 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
     case StringType =>
       buildCast[UTF8String](_, utfs => {
         if (ansiEnabled) {
-          DateTimeUtils.stringToTimestampWithoutTimeZoneAnsi(utfs)
+          DateTimeUtils.stringToTimestampWithoutTimeZoneAnsi(utfs, 
origin.context)
         } else {
           DateTimeUtils.stringToTimestampWithoutTimeZone(utfs).orNull
         }
@@ -574,7 +574,7 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
   private[this] def castToDate(from: DataType): Any => Any = from match {
     case StringType =>
       if (ansiEnabled) {
-        buildCast[UTF8String](_, s => DateTimeUtils.stringToDateAnsi(s))
+        buildCast[UTF8String](_, s => DateTimeUtils.stringToDateAnsi(s, 
origin.context))
       } else {
         buildCast[UTF8String](_, s => DateTimeUtils.stringToDate(s).orNull)
       }
@@ -631,7 +631,7 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
   // LongConverter
   private[this] def castToLong(from: DataType): Any => Any = from match {
     case StringType if ansiEnabled =>
-      buildCast[UTF8String](_, UTF8StringUtils.toLongExact)
+      buildCast[UTF8String](_, v => UTF8StringUtils.toLongExact(v, 
origin.context))
     case StringType =>
       val result = new LongWrapper()
       buildCast[UTF8String](_, s => if (s.toLong(result)) result.value else 
null)
@@ -654,7 +654,7 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
   // IntConverter
   private[this] def castToInt(from: DataType): Any => Any = from match {
     case StringType if ansiEnabled =>
-      buildCast[UTF8String](_, UTF8StringUtils.toIntExact)
+      buildCast[UTF8String](_, v => UTF8StringUtils.toIntExact(v, 
origin.context))
     case StringType =>
       val result = new IntWrapper()
       buildCast[UTF8String](_, s => if (s.toInt(result)) result.value else 
null)
@@ -686,7 +686,7 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
   // ShortConverter
   private[this] def castToShort(from: DataType): Any => Any = from match {
     case StringType if ansiEnabled =>
-      buildCast[UTF8String](_, UTF8StringUtils.toShortExact)
+      buildCast[UTF8String](_, v => UTF8StringUtils.toShortExact(v, 
origin.context))
     case StringType =>
       val result = new IntWrapper()
       buildCast[UTF8String](_, s => if (s.toShort(result)) {
@@ -733,7 +733,7 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
   // ByteConverter
   private[this] def castToByte(from: DataType): Any => Any = from match {
     case StringType if ansiEnabled =>
-      buildCast[UTF8String](_, UTF8StringUtils.toByteExact)
+      buildCast[UTF8String](_, v => UTF8StringUtils.toByteExact(v, 
origin.context))
     case StringType =>
       val result = new IntWrapper()
       buildCast[UTF8String](_, s => if (s.toByte(result)) {
@@ -815,7 +815,8 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
         if (d == null) null else changePrecision(d, target)
       })
     case StringType if ansiEnabled =>
-      buildCast[UTF8String](_, s => changePrecision(Decimal.fromStringANSI(s), 
target))
+      buildCast[UTF8String](_,
+        s => changePrecision(Decimal.fromStringANSI(s, origin.context), 
target))
     case BooleanType =>
       buildCast[Boolean](_, b => toPrecision(if (b) Decimal.ONE else 
Decimal.ZERO, target))
     case DateType =>
@@ -844,7 +845,7 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
           case _: NumberFormatException =>
             val d = Cast.processFloatingPointSpecialLiterals(doubleStr, false)
             if(ansiEnabled && d == null) {
-              throw QueryExecutionErrors.invalidInputSyntaxForNumericError(s)
+              throw QueryExecutionErrors.invalidInputSyntaxForNumericError(s, 
origin.context)
             } else {
               d
             }
@@ -869,7 +870,7 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
           case _: NumberFormatException =>
             val f = Cast.processFloatingPointSpecialLiterals(floatStr, true)
             if (ansiEnabled && f == null) {
-              throw QueryExecutionErrors.invalidInputSyntaxForNumericError(s)
+              throw QueryExecutionErrors.invalidInputSyntaxForNumericError(s, 
origin.context)
             } else {
               f
             }
@@ -1016,7 +1017,7 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
     case CalendarIntervalType => castToIntervalCode(from)
     case it: DayTimeIntervalType => castToDayTimeIntervalCode(from, it)
     case it: YearMonthIntervalType => castToYearMonthIntervalCode(from, it)
-    case BooleanType => castToBooleanCode(from)
+    case BooleanType => castToBooleanCode(from, ctx)
     case ByteType => castToByteCode(from, ctx)
     case ShortType => castToShortCode(from, ctx)
     case IntegerType => castToIntCode(from, ctx)
@@ -1295,8 +1296,9 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
         val intOpt = ctx.freshVariable("intOpt", classOf[Option[Integer]])
         (c, evPrim, evNull) =>
           if (ansiEnabled) {
+            val errorContext = ctx.addReferenceObj("errCtx", origin.context)
             code"""
-              $evPrim = 
org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToDateAnsi($c);
+              $evPrim = $dateTimeUtilsCls.stringToDateAnsi($c, $errorContext);
             """
           } else {
             code"""
@@ -1373,9 +1375,10 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
               }
           """
       case StringType if ansiEnabled =>
+        val errorContext = ctx.addReferenceObj("errCtx", origin.context)
         (c, evPrim, evNull) =>
           code"""
-              Decimal $tmp = Decimal.fromStringANSI($c);
+              Decimal $tmp = Decimal.fromStringANSI($c, $errorContext);
               ${changePrecision(tmp, target, evPrim, evNull, canNullSafeCast, 
ctx)}
           """
       case BooleanType =>
@@ -1432,9 +1435,9 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
       val longOpt = ctx.freshVariable("longOpt", classOf[Option[Long]])
       (c, evPrim, evNull) =>
         if (ansiEnabled) {
+          val errorContext = ctx.addReferenceObj("errCtx", origin.context)
           code"""
-            $evPrim =
-              
org.apache.spark.sql.catalyst.util.DateTimeUtils.stringToTimestampAnsi($c, 
$zid);
+            $evPrim = $dateTimeUtilsCls.stringToTimestampAnsi($c, $zid, 
$errorContext);
            """
         } else {
           code"""
@@ -1471,7 +1474,8 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
     case DoubleType =>
       (c, evPrim, evNull) =>
         if (ansiEnabled) {
-          code"$evPrim = $dateTimeUtilsCls.doubleToTimestampAnsi($c);"
+          val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+          code"$evPrim = $dateTimeUtilsCls.doubleToTimestampAnsi($c, 
$errorContext);"
         } else {
           code"""
             if (Double.isNaN($c) || Double.isInfinite($c)) {
@@ -1484,7 +1488,8 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
     case FloatType =>
       (c, evPrim, evNull) =>
         if (ansiEnabled) {
-          code"$evPrim = $dateTimeUtilsCls.doubleToTimestampAnsi((double)$c);"
+          val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+          code"$evPrim = $dateTimeUtilsCls.doubleToTimestampAnsi((double)$c, 
$errorContext);"
         } else {
           code"""
             if (Float.isNaN($c) || Float.isInfinite($c)) {
@@ -1503,9 +1508,9 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
       val longOpt = ctx.freshVariable("longOpt", classOf[Option[Long]])
       (c, evPrim, evNull) =>
         if (ansiEnabled) {
+          val errorContext = ctx.addReferenceObj("errCtx", origin.context)
           code"""
-            $evPrim =
-              $dateTimeUtilsCls.stringToTimestampWithoutTimeZoneAnsi($c);
+            $evPrim = 
$dateTimeUtilsCls.stringToTimestampWithoutTimeZoneAnsi($c, $errorContext);
            """
         } else {
           code"""
@@ -1613,12 +1618,15 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
   private[this] def timestampToDoubleCode(ts: ExprValue): Block =
     code"$ts / (double)$MICROS_PER_SECOND"
 
-  private[this] def castToBooleanCode(from: DataType): CastFunction = from 
match {
+  private[this] def castToBooleanCode(
+      from: DataType,
+      ctx: CodegenContext): CastFunction = from match {
     case StringType =>
       val stringUtils = 
inline"${StringUtils.getClass.getName.stripSuffix("$")}"
       (c, evPrim, evNull) =>
         val castFailureCode = if (ansiEnabled) {
-          s"throw QueryExecutionErrors.invalidInputSyntaxForBooleanError($c);"
+          val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+          s"throw QueryExecutionErrors.invalidInputSyntaxForBooleanError($c, 
$errorContext);"
         } else {
           s"$evNull = true;"
         }
@@ -1746,7 +1754,8 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
   private[this] def castToByteCode(from: DataType, ctx: CodegenContext): 
CastFunction = from match {
     case StringType if ansiEnabled =>
       val stringUtils = 
UTF8StringUtils.getClass.getCanonicalName.stripSuffix("$")
-      (c, evPrim, evNull) => code"$evPrim = $stringUtils.toByteExact($c);"
+      val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+      (c, evPrim, evNull) => code"$evPrim = $stringUtils.toByteExact($c, 
$errorContext);"
     case StringType =>
       val wrapper = ctx.freshVariable("intWrapper", 
classOf[UTF8String.IntWrapper])
       (c, evPrim, evNull) =>
@@ -1782,7 +1791,8 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
       ctx: CodegenContext): CastFunction = from match {
     case StringType if ansiEnabled =>
       val stringUtils = 
UTF8StringUtils.getClass.getCanonicalName.stripSuffix("$")
-      (c, evPrim, evNull) => code"$evPrim = $stringUtils.toShortExact($c);"
+      val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+      (c, evPrim, evNull) => code"$evPrim = $stringUtils.toShortExact($c, 
$errorContext);"
     case StringType =>
       val wrapper = ctx.freshVariable("intWrapper", 
classOf[UTF8String.IntWrapper])
       (c, evPrim, evNull) =>
@@ -1816,7 +1826,8 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
   private[this] def castToIntCode(from: DataType, ctx: CodegenContext): 
CastFunction = from match {
     case StringType if ansiEnabled =>
       val stringUtils = 
UTF8StringUtils.getClass.getCanonicalName.stripSuffix("$")
-      (c, evPrim, evNull) => code"$evPrim = $stringUtils.toIntExact($c);"
+      val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+      (c, evPrim, evNull) => code"$evPrim = $stringUtils.toIntExact($c, 
$errorContext);"
     case StringType =>
       val wrapper = ctx.freshVariable("intWrapper", 
classOf[UTF8String.IntWrapper])
       (c, evPrim, evNull) =>
@@ -1850,7 +1861,8 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
   private[this] def castToLongCode(from: DataType, ctx: CodegenContext): 
CastFunction = from match {
     case StringType if ansiEnabled =>
       val stringUtils = 
UTF8StringUtils.getClass.getCanonicalName.stripSuffix("$")
-      (c, evPrim, evNull) => code"$evPrim = $stringUtils.toLongExact($c);"
+      val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+      (c, evPrim, evNull) => code"$evPrim = $stringUtils.toLongExact($c, 
$errorContext);"
     case StringType =>
       val wrapper = ctx.freshVariable("longWrapper", 
classOf[UTF8String.LongWrapper])
       (c, evPrim, evNull) =>
@@ -1886,7 +1898,8 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
         val floatStr = ctx.freshVariable("floatStr", StringType)
         (c, evPrim, evNull) =>
           val handleNull = if (ansiEnabled) {
-            s"throw 
QueryExecutionErrors.invalidInputSyntaxForNumericError($c);"
+            val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+            s"throw QueryExecutionErrors.invalidInputSyntaxForNumericError($c, 
$errorContext);"
           } else {
             s"$evNull = true;"
           }
@@ -1922,7 +1935,8 @@ abstract class CastBase extends UnaryExpression with 
TimeZoneAwareExpression wit
         val doubleStr = ctx.freshVariable("doubleStr", StringType)
         (c, evPrim, evNull) =>
           val handleNull = if (ansiEnabled) {
-            s"throw 
QueryExecutionErrors.invalidInputSyntaxForNumericError($c);"
+            val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+            s"throw QueryExecutionErrors.invalidInputSyntaxForNumericError($c, 
$errorContext);"
           } else {
             s"$evNull = true;"
           }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index 65da5e9cb42..97ad3e3c10a 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -448,15 +448,15 @@ object DateTimeUtils {
     }
   }
 
-  def stringToTimestampAnsi(s: UTF8String, timeZoneId: ZoneId): Long = {
+  def stringToTimestampAnsi(s: UTF8String, timeZoneId: ZoneId, errorContext: 
String = ""): Long = {
     stringToTimestamp(s, timeZoneId).getOrElse {
-      throw QueryExecutionErrors.cannotCastToDateTimeError(s, TimestampType)
+      throw QueryExecutionErrors.cannotCastToDateTimeError(s, TimestampType, 
errorContext)
     }
   }
 
-  def doubleToTimestampAnsi(d: Double): Long = {
+  def doubleToTimestampAnsi(d: Double, errorContext: String): Long = {
     if (d.isNaN || d.isInfinite) {
-      throw QueryExecutionErrors.cannotCastToDateTimeError(d, TimestampType)
+      throw QueryExecutionErrors.cannotCastToDateTimeError(d, TimestampType, 
errorContext)
     } else {
       DoubleExactNumeric.toLong(d * MICROS_PER_SECOND)
     }
@@ -503,9 +503,9 @@ object DateTimeUtils {
     stringToTimestampWithoutTimeZone(s, true)
   }
 
-  def stringToTimestampWithoutTimeZoneAnsi(s: UTF8String): Long = {
+  def stringToTimestampWithoutTimeZoneAnsi(s: UTF8String, errorContext: 
String): Long = {
     stringToTimestampWithoutTimeZone(s, true).getOrElse {
-      throw QueryExecutionErrors.cannotCastToDateTimeError(s, TimestampNTZType)
+      throw QueryExecutionErrors.cannotCastToDateTimeError(s, 
TimestampNTZType, errorContext)
     }
   }
 
@@ -621,9 +621,9 @@ object DateTimeUtils {
     }
   }
 
-  def stringToDateAnsi(s: UTF8String): Int = {
+  def stringToDateAnsi(s: UTF8String, errorContext: String = ""): Int = {
     stringToDate(s).getOrElse {
-      throw QueryExecutionErrors.cannotCastToDateTimeError(s, DateType)
+      throw QueryExecutionErrors.cannotCastToDateTimeError(s, DateType, 
errorContext)
     }
   }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/UTF8StringUtils.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/UTF8StringUtils.scala
index 7fb564d1bd3..9589cf3774e 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/UTF8StringUtils.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/UTF8StringUtils.scala
@@ -25,20 +25,24 @@ import org.apache.spark.unsafe.types.UTF8String
  */
 object UTF8StringUtils {
 
-  def toLongExact(s: UTF8String): Long = withException(s.toLongExact)
+  def toLongExact(s: UTF8String, errorContext: String): Long =
+    withException(s.toLongExact, errorContext)
 
-  def toIntExact(s: UTF8String): Int = withException(s.toIntExact)
+  def toIntExact(s: UTF8String, errorContext: String): Int =
+    withException(s.toIntExact, errorContext)
 
-  def toShortExact(s: UTF8String): Short = withException(s.toShortExact)
+  def toShortExact(s: UTF8String, errorContext: String): Short =
+    withException(s.toShortExact, errorContext)
 
-  def toByteExact(s: UTF8String): Byte = withException(s.toByteExact)
+  def toByteExact(s: UTF8String, errorContext: String): Byte =
+    withException(s.toByteExact, errorContext)
 
-  private def withException[A](f: => A): A = {
+  private def withException[A](f: => A, errorContext: String): A = {
     try {
       f
     } catch {
       case e: NumberFormatException =>
-        throw QueryExecutionErrors.invalidInputSyntaxForNumericError(e)
+        throw QueryExecutionErrors.invalidInputSyntaxForNumericError(e, 
errorContext)
     }
   }
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 3a89147c4b5..fec4788c333 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -104,14 +104,18 @@ object QueryExecutionErrors extends QueryErrorsBase {
         decimalPrecision.toString, decimalScale.toString, 
SQLConf.ANSI_ENABLED.key, context))
   }
 
-  def invalidInputSyntaxForNumericError(e: NumberFormatException): 
NumberFormatException = {
+  def invalidInputSyntaxForNumericError(
+      e: NumberFormatException,
+      errorContext: String): NumberFormatException = {
     new NumberFormatException(s"${e.getMessage}. To return NULL instead, use 
'try_cast'. " +
-      s"If necessary set ${SQLConf.ANSI_ENABLED.key} to false to bypass this 
error.")
+      s"If necessary set ${SQLConf.ANSI_ENABLED.key} to false to bypass this 
error." + errorContext)
   }
 
-  def invalidInputSyntaxForNumericError(s: UTF8String): NumberFormatException 
= {
+  def invalidInputSyntaxForNumericError(
+      s: UTF8String,
+      errorContext: String): NumberFormatException = {
     new SparkNumberFormatException(errorClass = 
"INVALID_INPUT_SYNTAX_FOR_NUMERIC_TYPE",
-      messageParameters = Array(toSQLValue(s, StringType), 
SQLConf.ANSI_ENABLED.key))
+      messageParameters = Array(toSQLValue(s, StringType), 
SQLConf.ANSI_ENABLED.key, errorContext))
   }
 
   def cannotCastFromNullTypeError(to: DataType): Throwable = {
@@ -1044,9 +1048,9 @@ object QueryExecutionErrors extends QueryErrorsBase {
       e)
   }
 
-  def cannotCastToDateTimeError(value: Any, to: DataType): Throwable = {
+  def cannotCastToDateTimeError(value: Any, to: DataType, errorContext: 
String): Throwable = {
     new DateTimeException(s"Cannot cast $value to $to. To return NULL instead, 
use 'try_cast'. " +
-      s"If necessary set ${SQLConf.ANSI_ENABLED.key} to false to bypass this 
error.")
+      s"If necessary set ${SQLConf.ANSI_ENABLED.key} to false to bypass this 
error." + errorContext)
   }
 
   def registeringStreamingQueryListenerError(e: Exception): Throwable = {
@@ -1180,10 +1184,12 @@ object QueryExecutionErrors extends QueryErrorsBase {
       "SQLUserDefinedType nor registered with UDTRegistration.}")
   }
 
-  def invalidInputSyntaxForBooleanError(s: UTF8String): 
UnsupportedOperationException = {
+  def invalidInputSyntaxForBooleanError(
+      s: UTF8String,
+      errorContext: String): UnsupportedOperationException = {
     new UnsupportedOperationException(s"invalid input syntax for type boolean: 
$s. " +
       s"To return NULL instead, use 'try_cast'. If necessary set 
${SQLConf.ANSI_ENABLED.key} " +
-      "to false to bypass this error.")
+      "to false to bypass this error." + errorContext)
   }
 
   def unsupportedOperandTypeForSizeFunctionError(dataType: DataType): 
Throwable = {
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index 39c7e6ba580..ac6ac33451c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -613,7 +613,7 @@ object Decimal {
     }
   }
 
-  def fromStringANSI(str: UTF8String): Decimal = {
+  def fromStringANSI(str: UTF8String, errorContext: String = ""): Decimal = {
     try {
       val bigDecimal = stringToJavaBigDecimal(str)
       // We fast fail because constructing a very large JavaBigDecimal to 
Decimal is very slow.
@@ -626,7 +626,7 @@ object Decimal {
       }
     } catch {
       case _: NumberFormatException =>
-        throw QueryExecutionErrors.invalidInputSyntaxForNumericError(str)
+        throw QueryExecutionErrors.invalidInputSyntaxForNumericError(str, 
errorContext)
     }
   }
 
diff --git a/sql/core/src/test/resources/sql-tests/inputs/cast.sql 
b/sql/core/src/test/resources/sql-tests/inputs/cast.sql
index 39095cb8ce0..e391c31690f 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/cast.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/cast.sql
@@ -87,3 +87,14 @@ select cast('\t\n xyz \t\r' as boolean);
 
 select cast('23.45' as decimal(4, 2));
 select cast('123.45' as decimal(4, 2));
+select cast('xyz' as decimal(4, 2));
+
+select cast('2022-01-01' as date);
+select cast('a' as date);
+select cast('2022-01-01 00:00:00' as timestamp);
+select cast('a' as timestamp);
+select cast('2022-01-01 00:00:00' as timestamp_ntz);
+select cast('a' as timestamp_ntz);
+
+select cast(cast('inf' as double) as timestamp);
+select cast(cast('inf' as float) as timestamp);
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
index 6b705274dc8..3de9c1f743d 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 57
+-- Number of queries: 66
 
 
 -- !query
@@ -9,6 +9,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '1.23'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('1.23' AS int)
+       ^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -18,6 +21,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '1.23'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('1.23' AS long)
+       ^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -27,6 +33,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '-4.56'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('-4.56' AS int)
+       ^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -36,6 +45,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '-4.56'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('-4.56' AS long)
+       ^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -45,6 +57,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('abc' AS int)
+       ^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -54,6 +69,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('abc' AS long)
+       ^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -63,6 +81,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '1234567890123'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+== SQL(line 1, position 7) ==
+SELECT CAST('1234567890123' AS int)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -72,6 +93,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '12345678901234567890123'. To return 
NULL instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false 
to bypass this error.
+== SQL(line 1, position 7) ==
+SELECT CAST('12345678901234567890123' AS long)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -81,6 +105,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: ''. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('' AS int)
+       ^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -90,6 +117,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: ''. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('' AS long)
+       ^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -115,6 +145,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('123.a' AS int)
+       ^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -124,6 +157,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('123.a' AS long)
+       ^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -141,6 +177,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '-2147483649'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('-2147483649' AS int)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -158,6 +197,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '2147483648'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT CAST('2147483648' AS int)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -175,6 +217,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '-9223372036854775809'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+== SQL(line 1, position 7) ==
+SELECT CAST('-9223372036854775809' AS long)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -192,6 +237,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '9223372036854775808'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+== SQL(line 1, position 7) ==
+SELECT CAST('9223372036854775808' AS long)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -448,6 +496,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select cast('1中文' as tinyint)
+       ^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -457,6 +508,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select cast('1中文' as smallint)
+       ^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -466,6 +520,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select cast('1中文' as INT)
+       ^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -475,6 +532,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '中文1'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select cast('中文1' as bigint)
+       ^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -484,6 +544,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select cast('1中文' as bigint)
+       ^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -512,6 +575,9 @@ struct<>
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean:         
  xyz   
. To return NULL instead, use 'try_cast'. If necessary set 
spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 1, position 7) ==
+select cast('\t\n xyz \t\r' as boolean)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -532,3 +598,99 @@ Decimal(expanded,123.45,5,2}) cannot be represented as 
Decimal(4, 2). If necessa
 == SQL(line 1, position 7) ==
 select cast('123.45' as decimal(4, 2))
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+
+-- !query
+select cast('xyz' as decimal(4, 2))
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkNumberFormatException
+invalid input syntax for type numeric: 'xyz'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select cast('xyz' as decimal(4, 2))
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+
+-- !query
+select cast('2022-01-01' as date)
+-- !query schema
+struct<CAST(2022-01-01 AS DATE):date>
+-- !query output
+2022-01-01
+
+
+-- !query
+select cast('a' as date)
+-- !query schema
+struct<>
+-- !query output
+java.time.DateTimeException
+Cannot cast a to DateType. To return NULL instead, use 'try_cast'. If 
necessary set spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 1, position 7) ==
+select cast('a' as date)
+       ^^^^^^^^^^^^^^^^^
+
+
+-- !query
+select cast('2022-01-01 00:00:00' as timestamp)
+-- !query schema
+struct<CAST(2022-01-01 00:00:00 AS TIMESTAMP):timestamp>
+-- !query output
+2022-01-01 00:00:00
+
+
+-- !query
+select cast('a' as timestamp)
+-- !query schema
+struct<>
+-- !query output
+java.time.DateTimeException
+Cannot cast a to TimestampType. To return NULL instead, use 'try_cast'. If 
necessary set spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 1, position 7) ==
+select cast('a' as timestamp)
+       ^^^^^^^^^^^^^^^^^^^^^^
+
+
+-- !query
+select cast('2022-01-01 00:00:00' as timestamp_ntz)
+-- !query schema
+struct<CAST(2022-01-01 00:00:00 AS TIMESTAMP_NTZ):timestamp_ntz>
+-- !query output
+2022-01-01 00:00:00
+
+
+-- !query
+select cast('a' as timestamp_ntz)
+-- !query schema
+struct<>
+-- !query output
+java.time.DateTimeException
+Cannot cast a to TimestampNTZType. To return NULL instead, use 'try_cast'. If 
necessary set spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 1, position 7) ==
+select cast('a' as timestamp_ntz)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+
+-- !query
+select cast(cast('inf' as double) as timestamp)
+-- !query schema
+struct<>
+-- !query output
+java.time.DateTimeException
+Cannot cast Infinity to TimestampType. To return NULL instead, use 'try_cast'. 
If necessary set spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 1, position 7) ==
+select cast(cast('inf' as double) as timestamp)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+
+-- !query
+select cast(cast('inf' as float) as timestamp)
+-- !query schema
+struct<>
+-- !query output
+java.time.DateTimeException
+Cannot cast Infinity to TimestampType. To return NULL instead, use 'try_cast'. 
If necessary set spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 1, position 7) ==
+select cast(cast('inf' as float) as timestamp)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
index c7058cd7e3b..d9777b53d21 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
@@ -233,6 +233,9 @@ struct<>
 -- !query output
 java.time.DateTimeException
 Cannot cast xx to DateType. To return NULL instead, use 'try_cast'. If 
necessary set spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 1, position 7) ==
+select next_day("xx", "Mon")
+       ^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -325,6 +328,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '1.2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select date_add('2011-11-11', '1.2')
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -433,6 +439,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: '1.2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select date_sub(date'2011-11-11', '1.2')
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
index 59761d5ac53..57e39bbfe3a 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
@@ -243,6 +243,9 @@ struct<>
 -- !query output
 java.time.DateTimeException
 Cannot cast Unparseable to TimestampType. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select cast("Unparseable" as timestamp)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -252,3 +255,6 @@ struct<>
 -- !query output
 java.time.DateTimeException
 Cannot cast Unparseable to DateType. To return NULL instead, use 'try_cast'. 
If necessary set spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 1, position 7) ==
+select cast("Unparseable" as date)
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index 8f88727f66f..3b8d95bca0a 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -123,6 +123,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select interval 2 second * 'a'
+       ^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -132,6 +135,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select interval 2 second / 'a'
+       ^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -141,6 +147,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select interval 2 year * 'a'
+       ^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -150,6 +159,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select interval 2 year / 'a'
+       ^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -175,6 +187,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select 'a' * interval 2 second
+       ^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -184,6 +199,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select 'a' * interval 2 year
+       ^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -1499,6 +1517,9 @@ struct<>
 -- !query output
 java.time.DateTimeException
 Cannot cast 4 11:11 to TimestampType. To return NULL instead, use 'try_cast'. 
If necessary set spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 1, position 7) ==
+select '4 11:11' - interval '4 22:12' day to minute
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -1508,6 +1529,9 @@ struct<>
 -- !query output
 java.time.DateTimeException
 Cannot cast 4 12:12:12 to TimestampType. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select '4 12:12:12' + interval '4 22:12' day to minute
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -1543,6 +1567,9 @@ struct<>
 -- !query output
 java.time.DateTimeException
 Cannot cast 1 to TimestampType. To return NULL instead, use 'try_cast'. If 
necessary set spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 1, position 7) ==
+select str - interval '4 22:12' day to minute from interval_view
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -1552,6 +1579,9 @@ struct<>
 -- !query output
 java.time.DateTimeException
 Cannot cast 1 to TimestampType. To return NULL instead, use 'try_cast'. If 
necessary set spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 1, position 7) ==
+select str + interval '4 22:12' day to minute from interval_view
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
index c65384673c2..7d07282ab67 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 142
+-- Number of queries: 143
 
 
 -- !query
@@ -83,6 +83,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 42) ==
+...t("abcd", -2), left("abcd", 0), left("abcd", 'a')
+                                   ^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -108,6 +111,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 44) ==
+...("abcd", -2), right("abcd", 0), right("abcd", 'a')
+                                   ^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -414,6 +420,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: 'invalid_length'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+== SQL(line 1, position 7) ==
+SELECT lpad('hi', 'invalid_length')
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -423,6 +432,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: 'invalid_length'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+== SQL(line 1, position 7) ==
+SELECT rpad('hi', 'invalid_length')
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
diff --git a/sql/core/src/test/resources/sql-tests/results/cast.sql.out 
b/sql/core/src/test/resources/sql-tests/results/cast.sql.out
index 79a1e28a143..9ed02e3bed2 100644
--- a/sql/core/src/test/resources/sql-tests/results/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/cast.sql.out
@@ -1,5 +1,5 @@
 -- Automatically generated by SQLQueryTestSuite
--- Number of queries: 57
+-- Number of queries: 66
 
 
 -- !query
@@ -474,3 +474,75 @@ select cast('123.45' as decimal(4, 2))
 struct<CAST(123.45 AS DECIMAL(4,2)):decimal(4,2)>
 -- !query output
 NULL
+
+
+-- !query
+select cast('xyz' as decimal(4, 2))
+-- !query schema
+struct<CAST(xyz AS DECIMAL(4,2)):decimal(4,2)>
+-- !query output
+NULL
+
+
+-- !query
+select cast('2022-01-01' as date)
+-- !query schema
+struct<CAST(2022-01-01 AS DATE):date>
+-- !query output
+2022-01-01
+
+
+-- !query
+select cast('a' as date)
+-- !query schema
+struct<CAST(a AS DATE):date>
+-- !query output
+NULL
+
+
+-- !query
+select cast('2022-01-01 00:00:00' as timestamp)
+-- !query schema
+struct<CAST(2022-01-01 00:00:00 AS TIMESTAMP):timestamp>
+-- !query output
+2022-01-01 00:00:00
+
+
+-- !query
+select cast('a' as timestamp)
+-- !query schema
+struct<CAST(a AS TIMESTAMP):timestamp>
+-- !query output
+NULL
+
+
+-- !query
+select cast('2022-01-01 00:00:00' as timestamp_ntz)
+-- !query schema
+struct<CAST(2022-01-01 00:00:00 AS TIMESTAMP_NTZ):timestamp_ntz>
+-- !query output
+2022-01-01 00:00:00
+
+
+-- !query
+select cast('a' as timestamp_ntz)
+-- !query schema
+struct<CAST(a AS TIMESTAMP_NTZ):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select cast(cast('inf' as double) as timestamp)
+-- !query schema
+struct<CAST(CAST(inf AS DOUBLE) AS TIMESTAMP):timestamp>
+-- !query output
+NULL
+
+
+-- !query
+select cast(cast('inf' as float) as timestamp)
+-- !query schema
+struct<CAST(CAST(inf AS FLOAT) AS TIMESTAMP):timestamp>
+-- !query output
+NULL
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out
index 4aba60b0220..166bea4a722 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/boolean.sql.out
@@ -57,6 +57,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: test. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('test') AS error
+       ^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -74,6 +77,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: foo. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('foo') AS error
+       ^^^^^^^^^^^^^^
 
 
 -- !query
@@ -99,6 +105,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: yeah. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('yeah') AS error
+       ^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -124,6 +133,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: nay. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('nay') AS error
+       ^^^^^^^^^^^^^^
 
 
 -- !query
@@ -133,6 +145,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: on. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('on') AS true
+       ^^^^^^^^^^^^^
 
 
 -- !query
@@ -142,6 +157,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: off. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('off') AS `false`
+       ^^^^^^^^^^^^^^
 
 
 -- !query
@@ -151,6 +169,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: of. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('of') AS `false`
+       ^^^^^^^^^^^^^
 
 
 -- !query
@@ -160,6 +181,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: o. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('o') AS error
+       ^^^^^^^^^^^^
 
 
 -- !query
@@ -169,6 +193,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: on_. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('on_') AS error
+       ^^^^^^^^^^^^^^
 
 
 -- !query
@@ -178,6 +205,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: off_. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('off_') AS error
+       ^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -195,6 +225,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: 11. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('11') AS error
+       ^^^^^^^^^^^^^
 
 
 -- !query
@@ -212,6 +245,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: 000. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('000') AS error
+       ^^^^^^^^^^^^^^
 
 
 -- !query
@@ -221,6 +257,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: . To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean('') AS error
+       ^^^^^^^^^^^
 
 
 -- !query
@@ -327,6 +366,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean:   tru e . To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean(string('  tru e ')) AS invalid
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -336,6 +378,9 @@ struct<>
 -- !query output
 java.lang.UnsupportedOperationException
 invalid input syntax for type boolean: . To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT boolean(string('')) AS invalid
+       ^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -479,7 +524,11 @@ INSERT INTO BOOLTBL2
 struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
-failed to evaluate expression CAST('XXX' AS BOOLEAN): invalid input syntax for 
type boolean: XXX. To return NULL instead, use 'try_cast'. If necessary set 
spark.sql.ansi.enabled to false to bypass this error.; line 2 pos 3
+failed to evaluate expression CAST('XXX' AS BOOLEAN): invalid input syntax for 
type boolean: XXX. To return NULL instead, use 'try_cast'. If necessary set 
spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 2, position 11) ==
+   VALUES (boolean('XXX'))
+           ^^^^^^^^^^^^^^
+; line 2 pos 3
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
index eccfdbae757..39636e02159 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
@@ -97,6 +97,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'N A N'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT float('N A N')
+       ^^^^^^^^^^^^^^
 
 
 -- !query
@@ -106,6 +109,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'NaN x'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT float('NaN x')
+       ^^^^^^^^^^^^^^
 
 
 -- !query
@@ -115,6 +121,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: ' INFINITY    x'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+== SQL(line 1, position 7) ==
+SELECT float(' INFINITY    x')
+       ^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -148,6 +157,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'nan'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 13) ==
+SELECT float(decimal('nan'))
+             ^^^^^^^^^^^^^^
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
index d143e1f1c59..b2f61306c7c 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
@@ -129,6 +129,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'N A N'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT double('N A N')
+       ^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -138,6 +141,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'NaN x'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+SELECT double('NaN x')
+       ^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -147,6 +153,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: ' INFINITY    x'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+== SQL(line 1, position 7) ==
+SELECT double(' INFINITY    x')
+       ^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -180,6 +189,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
 invalid input syntax for type numeric: 'nan'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 14) ==
+SELECT double(decimal('nan'))
+              ^^^^^^^^^^^^^^
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
index 9f9f212c731..836370935f6 100755
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
@@ -66,6 +66,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: 'four: 2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select string('four: ') || 2+2
+       ^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -75,6 +78,9 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: 'four: 2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 1, position 7) ==
+select 'four: ' || 2+2
+       ^^^^^^^^^^^^^^^
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
index 158196e7c82..1d48d7c7b92 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
@@ -463,6 +463,11 @@ struct<>
 -- !query output
 java.lang.NumberFormatException
 invalid input syntax for type numeric: 'NaN'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+== SQL(line 3, position 12) ==
+window w as (order by f_numeric range between
+            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+             1.1 preceding and 'NaN' following)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
index 680c5707a45..c799d65985d 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
@@ -72,7 +72,11 @@ insert into datetimes values
 struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
-failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): Cannot cast 
11:00 BST to TimestampType. To return NULL instead, use 'try_cast'. If 
necessary set spark.sql.ansi.enabled to false to bypass this error.; line 1 pos 
22
+failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): Cannot cast 
11:00 BST to TimestampType. To return NULL instead, use 'try_cast'. If 
necessary set spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 2, position 23) ==
+(1, timestamp '11:00', cast ('11:00 BST' as timestamp), cast ('1 year' as 
timestamp), ...
+                       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+; line 1 pos 22
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
index c937d663771..87beeacc0bc 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
@@ -501,4 +501,8 @@ FROM (VALUES(1,1),(2,2),(3,(cast('nan' as 
int))),(4,3),(5,4)) t(a,b)
 struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
-failed to evaluate expression CAST('nan' AS INT): invalid input syntax for 
type numeric: 'nan'. To return NULL instead, use 'try_cast'. If necessary set 
spark.sql.ansi.enabled to false to bypass this error.; line 3 pos 6
+failed to evaluate expression CAST('nan' AS INT): invalid input syntax for 
type numeric: 'nan'. To return NULL instead, use 'try_cast'. If necessary set 
spark.sql.ansi.enabled to false to bypass this error.
+== SQL(line 3, position 28) ==
+FROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b)
+                            ^^^^^^^^^^^^^^^^^^
+; line 3 pos 6


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to