This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new 5dfa24d154a [SPARK-38996][SQL][3.3] Use double quotes for types in 
error messages
5dfa24d154a is described below

commit 5dfa24d154a222f25431040d4434bccb1af8fdb0
Author: Max Gekk <max.g...@gmail.com>
AuthorDate: Mon Apr 25 11:53:27 2022 +0900

    [SPARK-38996][SQL][3.3] Use double quotes for types in error messages
    
    ### What changes were proposed in this pull request?
    
    This PR is a backport of https://github.com/apache/spark/pull/36324
    
    In the PR, I propose to modify the method `QueryErrorsBase.toSQLType()` to 
use double quotes for types in error messages.
    
    ### Why are the changes needed?
    1. To highlight types and make them more visible for users.
    2. To be able to easily parse types from error text.
    3. To be consistent to other outputs of identifiers, sql statement and etc. 
where Spark uses quotes or ticks.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, the PR changes user-facing errors.
    
    ### How was this patch tested?
    By running the modified test suites:
    ```
    $ build/sbt "test:testOnly *QueryParsingErrorsSuite"
    $ build/sbt "test:testOnly *QueryCompilationErrorsSuite"
    $ build/sbt "test:testOnly *QueryExecutionErrorsSuite"
    $ build/sbt "testOnly *CastSuite"
    $ build/sbt "testOnly *AnsiCastSuiteWithAnsiModeOn"
    $ build/sbt "testOnly *EncoderResolutionSuite"
    $ build/sbt "test:testOnly *DatasetSuite"
    $ build/sbt "test:testOnly *InsertSuite"
    ```
    
    Authored-by: Max Gekk <max.gekkgmail.com>
    Signed-off-by: Max Gekk <max.gekkgmail.com>
    (cherry picked from commit 5e494d3de70c6e46f33addd751a227e6f9d5703f)
    Signed-off-by: Max Gekk <max.gekkgmail.com>
    
    Closes #36329 from MaxGekk/wrap-types-in-error-classes-3.3.
    
    Authored-by: Max Gekk <max.g...@gmail.com>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../apache/spark/sql/errors/QueryErrorsBase.scala  |  2 +-
 .../catalyst/encoders/EncoderResolutionSuite.scala |  8 +--
 .../catalyst/expressions/AnsiCastSuiteBase.scala   | 36 ++++++------
 .../spark/sql/catalyst/expressions/CastSuite.scala | 66 +++++++++++-----------
 .../sql/catalyst/util/DateFormatterSuite.scala     |  2 +-
 .../catalyst/util/TimestampFormatterSuite.scala    |  2 +-
 .../org/apache/spark/sql/types/DecimalSuite.scala  |  2 +-
 .../resources/sql-tests/results/ansi/cast.sql.out  | 66 +++++++++++-----------
 .../resources/sql-tests/results/ansi/date.sql.out  |  6 +-
 .../results/ansi/datetime-parsing-invalid.sql.out  |  4 +-
 .../sql-tests/results/ansi/interval.sql.out        | 20 +++----
 .../results/ansi/string-functions.sql.out          |  8 +--
 .../sql-tests/results/postgreSQL/float4.sql.out    | 14 ++---
 .../sql-tests/results/postgreSQL/float8.sql.out    | 10 ++--
 .../sql-tests/results/postgreSQL/int8.sql.out      |  8 +--
 .../sql-tests/results/postgreSQL/text.sql.out      |  4 +-
 .../results/postgreSQL/window_part2.sql.out        |  2 +-
 .../results/postgreSQL/window_part3.sql.out        |  2 +-
 .../results/postgreSQL/window_part4.sql.out        |  2 +-
 .../results/timestampNTZ/timestamp-ansi.sql.out    |  2 +-
 .../scala/org/apache/spark/sql/DatasetSuite.scala  |  2 +-
 .../org/apache/spark/sql/SQLInsertTestSuite.scala  |  2 +-
 .../sql/errors/QueryCompilationErrorsSuite.scala   |  4 +-
 .../sql/errors/QueryExecutionErrorsSuite.scala     |  9 +--
 .../org/apache/spark/sql/sources/InsertSuite.scala |  8 +--
 25 files changed, 146 insertions(+), 145 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
index 7daf8ae7325..4400bedfd5d 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
@@ -62,6 +62,6 @@ trait QueryErrorsBase {
   }
 
   def toSQLType(t: DataType): String = {
-    t.sql
+    "\"" + t.sql + "\""
   }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala
index a96196669b3..34b8cba6e90 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala
@@ -88,7 +88,7 @@ class EncoderResolutionSuite extends PlanTest {
     val attrs = Seq('arr.array(StringType))
     assert(intercept[AnalysisException](encoder.resolveAndBind(attrs)).message 
==
       s"""
-         |Cannot up cast array element from STRING to BIGINT.
+         |Cannot up cast array element from "STRING" to "BIGINT".
          |The type path of the target object is:
          |- array element class: "scala.Long"
          |- field (class: "scala.Array", name: "arr")
@@ -211,7 +211,7 @@ class EncoderResolutionSuite extends PlanTest {
       val attrs = Seq(attr)
       
assert(intercept[AnalysisException](encoder.resolveAndBind(attrs)).message ==
         s"""
-           |Cannot up cast a from ${attr.dataType.sql} to STRING.
+           |Cannot up cast a from "${attr.dataType.sql}" to "STRING".
            |The type path of the target object is:
            |- root class: "java.lang.String"
            |You can either add an explicit cast to the input data or choose a 
higher precision type
@@ -225,7 +225,7 @@ class EncoderResolutionSuite extends PlanTest {
     }.message
     assert(msg1 ==
       s"""
-         |Cannot up cast b from BIGINT to INT.
+         |Cannot up cast b from "BIGINT" to "INT".
          |The type path of the target object is:
          |- field (class: "scala.Int", name: "b")
          |- root class: "org.apache.spark.sql.catalyst.encoders.StringIntClass"
@@ -238,7 +238,7 @@ class EncoderResolutionSuite extends PlanTest {
     }.message
     assert(msg2 ==
       s"""
-         |Cannot up cast b.`b` from DECIMAL(38,18) to BIGINT.
+         |Cannot up cast b.`b` from "DECIMAL(38,18)" to "BIGINT".
          |The type path of the target object is:
          |- field (class: "scala.Long", name: "b")
          |- field (class: 
"org.apache.spark.sql.catalyst.encoders.StringLongClass", name: "b")
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
index 9be144efd77..75327dcfe5a 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/AnsiCastSuiteBase.scala
@@ -176,29 +176,29 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
     // cast to IntegerType
     Seq(IntegerType, ShortType, ByteType, LongType).foreach { dataType =>
       checkExceptionInExpression[NumberFormatException](cast("string", 
dataType),
-        s"Invalid input syntax for type ${dataType.sql}: 'string'")
+        s"""Invalid input syntax for type "${dataType.sql}": 'string'""")
       checkExceptionInExpression[NumberFormatException](cast("123-string", 
dataType),
-        s"Invalid input syntax for type ${dataType.sql}: '123-string'")
+        s"""Invalid input syntax for type "${dataType.sql}": '123-string'""")
       checkExceptionInExpression[NumberFormatException](cast("2020-07-19", 
dataType),
-        s"Invalid input syntax for type ${dataType.sql}: '2020-07-19'")
+        s"""Invalid input syntax for type "${dataType.sql}": '2020-07-19'""")
       checkExceptionInExpression[NumberFormatException](cast("1.23", dataType),
-        s"Invalid input syntax for type ${dataType.sql}: '1.23'")
+        s"""Invalid input syntax for type "${dataType.sql}": '1.23'""")
     }
 
     Seq(DoubleType, FloatType, DecimalType.USER_DEFAULT).foreach { dataType =>
       checkExceptionInExpression[NumberFormatException](cast("string", 
dataType),
-        s"Invalid input syntax for type ${dataType.sql}: 'string'")
+        s"""Invalid input syntax for type "${dataType.sql}": 'string'""")
       checkExceptionInExpression[NumberFormatException](cast("123.000.00", 
dataType),
-        s"Invalid input syntax for type ${dataType.sql}: '123.000.00'")
+        s"""Invalid input syntax for type "${dataType.sql}": '123.000.00'""")
       checkExceptionInExpression[NumberFormatException](cast("abc.com", 
dataType),
-        s"Invalid input syntax for type ${dataType.sql}: 'abc.com'")
+        s"""Invalid input syntax for type "${dataType.sql}": 'abc.com'""")
     }
   }
 
   protected def checkCastToNumericError(l: Literal, to: DataType,
       expectedDataTypeInErrorMsg: DataType, tryCastResult: Any): Unit = {
     checkExceptionInExpression[NumberFormatException](
-      cast(l, to), s"Invalid input syntax for type 
${expectedDataTypeInErrorMsg.sql}: 'true'")
+      cast(l, to), s"""Invalid input syntax for type 
"${expectedDataTypeInErrorMsg.sql}": 'true'""")
   }
 
   test("cast from invalid string array to numeric array should throw 
NumberFormatException") {
@@ -245,7 +245,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
 
     checkExceptionInExpression[NumberFormatException](
       cast("abcd", DecimalType(38, 1)),
-      s"Invalid input syntax for type ${DecimalType(38, 1).sql}: 'abcd'")
+      s"""Invalid input syntax for type "${DecimalType(38, 1).sql}": 'abcd'""")
   }
 
   protected def checkCastToBooleanError(l: Literal, to: DataType, 
tryCastResult: Any): Unit = {
@@ -260,7 +260,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
 
   protected def checkCastToTimestampError(l: Literal, to: DataType): Unit = {
     checkExceptionInExpression[DateTimeException](
-      cast(l, to), s"Invalid input syntax for type TIMESTAMP: 
${toSQLValue(l)}")
+      cast(l, to), s"""Invalid input syntax for type "TIMESTAMP": 
${toSQLValue(l)}""")
   }
 
   test("cast from timestamp II") {
@@ -281,7 +281,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
       assert(negativeTs.getTime < 0)
       Seq(ByteType, ShortType, IntegerType).foreach { dt =>
         checkExceptionInExpression[SparkArithmeticException](
-          cast(negativeTs, dt), s"to ${dt.sql} causes overflow")
+          cast(negativeTs, dt), s"""to "${dt.sql}" causes overflow""")
       }
     }
   }
@@ -292,7 +292,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
       assert(negativeTs.getTime < 0)
       Seq(ByteType, ShortType, IntegerType).foreach { dt =>
         checkExceptionInExpression[SparkArithmeticException](
-          cast(negativeTs, dt), s"to ${dt.sql} causes overflow")
+          cast(negativeTs, dt), s"""to "${dt.sql}" causes overflow""")
       }
       val expectedSecs = Math.floorDiv(negativeTs.getTime, MILLIS_PER_SECOND)
       checkEvaluation(cast(negativeTs, LongType), expectedSecs)
@@ -371,7 +371,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
       assert(ret.resolved == !isTryCast)
       if (!isTryCast) {
         checkExceptionInExpression[NumberFormatException](
-          ret, s"Invalid input syntax for type ${IntegerType.sql}")
+          ret, s"""Invalid input syntax for type "${IntegerType.sql}"""")
       }
     }
 
@@ -389,7 +389,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
       assert(ret.resolved == !isTryCast)
       if (!isTryCast) {
         checkExceptionInExpression[NumberFormatException](
-          ret, s"Invalid input syntax for type ${IntegerType.sql}")
+          ret, s"""Invalid input syntax for type "${IntegerType.sql}"""")
       }
     }
   }
@@ -514,7 +514,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
     assert(ret.resolved === !isTryCast)
     if (!isTryCast) {
       checkExceptionInExpression[NumberFormatException](
-        ret, s"Invalid input syntax for type ${IntegerType.sql}")
+        ret, s"""Invalid input syntax for type "${IntegerType.sql}"""")
     }
   }
 
@@ -523,7 +523,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
       def checkCastWithParseError(str: String): Unit = {
         checkExceptionInExpression[DateTimeException](
           cast(Literal(str), TimestampType, Option(zid.getId)),
-          s"Invalid input syntax for type TIMESTAMP: '$str'")
+          s"""Invalid input syntax for type "TIMESTAMP": '$str'""")
       }
 
       checkCastWithParseError("123")
@@ -544,7 +544,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
       def checkCastWithParseError(str: String): Unit = {
         checkExceptionInExpression[DateTimeException](
           cast(Literal(str), DateType, Option(zid.getId)),
-          s"Invalid input syntax for type DATE: '$str'")
+          s"""Invalid input syntax for type "DATE": '$str'""")
       }
 
       checkCastWithParseError("2015-13-18")
@@ -572,7 +572,7 @@ abstract class AnsiCastSuiteBase extends CastSuiteBase {
       "2021-06-17 00:00:00ABC").foreach { invalidInput =>
       checkExceptionInExpression[DateTimeException](
         cast(invalidInput, TimestampNTZType),
-        s"Invalid input syntax for type TIMESTAMP_NTZ: '$invalidInput'")
+        s"""Invalid input syntax for type "TIMESTAMP_NTZ": '$invalidInput'""")
     }
   }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
index fe53dd0e581..f78a1d30d72 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
@@ -592,15 +592,15 @@ class CastSuite extends CastSuiteBase {
       val e1 = intercept[ArithmeticException] {
         Cast(Literal(Byte.MaxValue + 1), ByteType).eval()
       }.getMessage
-      assert(e1.contains("Casting 128 to TINYINT causes overflow"))
+      assert(e1.contains("Casting 128 to \"TINYINT\" causes overflow"))
       val e2 = intercept[ArithmeticException] {
         Cast(Literal(Short.MaxValue + 1), ShortType).eval()
       }.getMessage
-      assert(e2.contains("Casting 32768 to SMALLINT causes overflow"))
+      assert(e2.contains("Casting 32768 to \"SMALLINT\" causes overflow"))
       val e3 = intercept[ArithmeticException] {
         Cast(Literal(Int.MaxValue + 1L), IntegerType).eval()
       }.getMessage
-      assert(e3.contains("Casting 2147483648L to INT causes overflow"))
+      assert(e3.contains("Casting 2147483648L to \"INT\" causes overflow"))
     }
   }
 
@@ -642,15 +642,15 @@ class CastSuite extends CastSuiteBase {
           checkEvaluation(cast(v2, LongType), 25L)
         case MINUTE =>
           checkExceptionInExpression[ArithmeticException](cast(v2, ByteType),
-            s"Casting $v2 to TINYINT causes overflow")
+            s"""Casting $v2 to "TINYINT" causes overflow""")
           checkEvaluation(cast(v2, ShortType), (MINUTES_PER_HOUR * 25 + 
1).toShort)
           checkEvaluation(cast(v2, IntegerType), (MINUTES_PER_HOUR * 25 + 
1).toInt)
           checkEvaluation(cast(v2, LongType), MINUTES_PER_HOUR * 25 + 1)
         case SECOND =>
           checkExceptionInExpression[ArithmeticException](cast(v2, ByteType),
-            s"Casting $v2 to TINYINT causes overflow")
+            s"""Casting $v2 to "TINYINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v2, ShortType),
-            s"Casting $v2 to SMALLINT causes overflow")
+            s"""Casting $v2 to "SMALLINT" causes overflow""")
           checkEvaluation(cast(v2, IntegerType), num.toInt)
           checkEvaluation(cast(v2, LongType), num)
       }
@@ -659,34 +659,34 @@ class CastSuite extends CastSuiteBase {
       dt.endField match {
         case DAY =>
           checkExceptionInExpression[ArithmeticException](cast(v3, ByteType),
-            s"Casting $v3 to TINYINT causes overflow")
+            s"""Casting $v3 to "TINYINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v3, ShortType),
-            s"Casting $v3 to SMALLINT causes overflow")
+            s"""Casting $v3 to "SMALLINT" causes overflow""")
           checkEvaluation(cast(v3, IntegerType), (Long.MaxValue / 
MICROS_PER_DAY).toInt)
           checkEvaluation(cast(v3, LongType), Long.MaxValue / MICROS_PER_DAY)
         case HOUR =>
           checkExceptionInExpression[ArithmeticException](cast(v3, ByteType),
-            s"Casting $v3 to TINYINT causes overflow")
+            s"""Casting $v3 to "TINYINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v3, ShortType),
-            s"Casting $v3 to SMALLINT causes overflow")
+            s"""Casting $v3 to "SMALLINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v3, 
IntegerType),
-            s"Casting $v3 to INT causes overflow")
+            s"""Casting $v3 to "INT" causes overflow""")
           checkEvaluation(cast(v3, LongType), Long.MaxValue / MICROS_PER_HOUR)
         case MINUTE =>
           checkExceptionInExpression[ArithmeticException](cast(v3, ByteType),
-            s"Casting $v3 to TINYINT causes overflow")
+            s"""Casting $v3 to "TINYINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v3, ShortType),
-            s"Casting $v3 to SMALLINT causes overflow")
+            s"""Casting $v3 to "SMALLINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v3, 
IntegerType),
-            s"Casting $v3 to INT causes overflow")
+            s"""Casting $v3 to "INT" causes overflow""")
           checkEvaluation(cast(v3, LongType), Long.MaxValue / 
MICROS_PER_MINUTE)
         case SECOND =>
           checkExceptionInExpression[ArithmeticException](cast(v3, ByteType),
-            s"Casting $v3 to TINYINT causes overflow")
+            s"""Casting $v3 to "TINYINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v3, ShortType),
-            s"Casting $v3 to SMALLINT causes overflow")
+            s"""Casting $v3 to "SMALLINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v3, 
IntegerType),
-            s"Casting $v3 to INT causes overflow")
+            s"""Casting $v3 to "INT" causes overflow""")
           checkEvaluation(cast(v3, LongType), Long.MaxValue / 
MICROS_PER_SECOND)
       }
 
@@ -694,34 +694,34 @@ class CastSuite extends CastSuiteBase {
       dt.endField match {
         case DAY =>
           checkExceptionInExpression[ArithmeticException](cast(v4, ByteType),
-            s"Casting $v4 to TINYINT causes overflow")
+            s"""Casting $v4 to "TINYINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v4, ShortType),
-            s"Casting $v4 to SMALLINT causes overflow")
+            s"""Casting $v4 to "SMALLINT" causes overflow""")
           checkEvaluation(cast(v4, IntegerType), (Long.MinValue / 
MICROS_PER_DAY).toInt)
           checkEvaluation(cast(v4, LongType), Long.MinValue / MICROS_PER_DAY)
         case HOUR =>
           checkExceptionInExpression[ArithmeticException](cast(v4, ByteType),
-            s"Casting $v4 to TINYINT causes overflow")
+            s"""Casting $v4 to "TINYINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v4, ShortType),
-            s"Casting $v4 to SMALLINT causes overflow")
+            s"""Casting $v4 to "SMALLINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v4, 
IntegerType),
-            s"Casting $v4 to INT causes overflow")
+            s"""Casting $v4 to "INT" causes overflow""")
           checkEvaluation(cast(v4, LongType), Long.MinValue / MICROS_PER_HOUR)
         case MINUTE =>
           checkExceptionInExpression[ArithmeticException](cast(v4, ByteType),
-            s"Casting $v4 to TINYINT causes overflow")
+            s"""Casting $v4 to "TINYINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v4, ShortType),
-            s"Casting $v4 to SMALLINT causes overflow")
+            s"""Casting $v4 to "SMALLINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v4, 
IntegerType),
-            s"Casting $v4 to INT causes overflow")
+            s"""Casting $v4 to "INT" causes overflow""")
           checkEvaluation(cast(v4, LongType), Long.MinValue / 
MICROS_PER_MINUTE)
         case SECOND =>
           checkExceptionInExpression[ArithmeticException](cast(v4, ByteType),
-            s"Casting $v4 to TINYINT causes overflow")
+            s"""Casting $v4 to "TINYINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v4, ShortType),
-            s"Casting $v4 to SMALLINT causes overflow")
+            s"""Casting $v4 to "SMALLINT" causes overflow""")
           checkExceptionInExpression[ArithmeticException](cast(v4, 
IntegerType),
-            s"Casting $v4 to INT causes overflow")
+            s"""Casting $v4 to "INT" causes overflow""")
           checkEvaluation(cast(v4, LongType), Long.MinValue / 
MICROS_PER_SECOND)
       }
     }
@@ -777,7 +777,7 @@ class CastSuite extends CastSuiteBase {
     ).foreach {
       case (v, toType) =>
         checkExceptionInExpression[ArithmeticException](cast(v, toType),
-          s"Casting $v to ${toType.sql} causes overflow")
+          s"""Casting $v to "${toType.sql}" causes overflow""")
     }
 
     Seq(
@@ -792,7 +792,7 @@ class CastSuite extends CastSuiteBase {
     ).foreach {
       case (v, toType) =>
         checkExceptionInExpression[ArithmeticException](cast(v, toType),
-          s"Casting ${v}L to ${toType.sql} causes overflow")
+          s"""Casting ${v}L to "${toType.sql}" causes overflow""")
     }
   }
 
@@ -829,7 +829,7 @@ class CastSuite extends CastSuiteBase {
       case (v, dt, toType) =>
         val value = Literal.create(v, dt)
         checkExceptionInExpression[ArithmeticException](cast(value, toType),
-          s"Casting $value to ${toType.sql} causes overflow")
+          s"""Casting $value to "${toType.sql}" causes overflow""")
     }
 
     Seq(
@@ -887,7 +887,7 @@ class CastSuite extends CastSuiteBase {
     ).foreach {
       case (v, toType) =>
         checkExceptionInExpression[ArithmeticException](cast(v, toType),
-          s"Casting $v to ${toType.sql} causes overflow")
+          s"""Casting $v to "${toType.sql}" causes overflow""")
     }
 
     Seq(
@@ -898,7 +898,7 @@ class CastSuite extends CastSuiteBase {
     ).foreach {
       case (v, toType) =>
         checkExceptionInExpression[ArithmeticException](cast(v, toType),
-          s"Casting ${v}L to ${toType.sql} causes overflow")
+          s"""Casting ${v}L to "${toType.sql}" causes overflow""")
     }
   }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
index 71351f6263f..cae89b64e06 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
@@ -208,6 +208,6 @@ class DateFormatterSuite extends DatetimeFormatterSuite {
     val errMsg = intercept[DateTimeException] {
       formatter.parse("x123")
     }.getMessage
-    assert(errMsg.contains("Invalid input syntax for type DATE: 'x123'"))
+    assert(errMsg.contains("""Invalid input syntax for type "DATE": 'x123'"""))
   }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
index 204fe93e2d1..c812f8b9b73 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
@@ -453,7 +453,7 @@ class TimestampFormatterSuite extends 
DatetimeFormatterSuite {
       val errMsg = intercept[DateTimeException] {
         formatter.parse("x123")
       }.getMessage
-      assert(errMsg.contains("Invalid input syntax for type TIMESTAMP: 
'x123'"))
+      assert(errMsg.contains("""Invalid input syntax for type "TIMESTAMP": 
'x123'"""))
     }
   }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
index 77b07ce533e..0e9222a0196 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
@@ -285,7 +285,7 @@ class DecimalSuite extends SparkFunSuite with 
PrivateMethodTester with SQLHelper
     assert(Decimal.fromString(UTF8String.fromString("str")) === null)
     val e = 
intercept[NumberFormatException](Decimal.fromStringANSI(UTF8String.fromString("str")))
     assert(e.getMessage.contains("Invalid input syntax for type " +
-      s"${DecimalType.USER_DEFAULT.sql}: 'str'"))
+      s""""${DecimalType.USER_DEFAULT.sql}": 'str'"""))
   }
 
   test("SPARK-35841: Casting string to decimal type doesn't work " +
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
index a2cb4ca1125..ec4770b9508 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/cast.sql.out
@@ -8,7 +8,7 @@ SELECT CAST('1.23' AS int)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: '1.23'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": '1.23'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('1.23' AS int)
        ^^^^^^^^^^^^^^^^^^^
@@ -20,7 +20,7 @@ SELECT CAST('1.23' AS long)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: '1.23'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "BIGINT": '1.23'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('1.23' AS long)
        ^^^^^^^^^^^^^^^^^^^^
@@ -32,7 +32,7 @@ SELECT CAST('-4.56' AS int)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: '-4.56'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": '-4.56'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('-4.56' AS int)
        ^^^^^^^^^^^^^^^^^^^^
@@ -44,7 +44,7 @@ SELECT CAST('-4.56' AS long)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: '-4.56'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "BIGINT": '-4.56'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('-4.56' AS long)
        ^^^^^^^^^^^^^^^^^^^^^
@@ -56,7 +56,7 @@ SELECT CAST('abc' AS int)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('abc' AS int)
        ^^^^^^^^^^^^^^^^^^
@@ -68,7 +68,7 @@ SELECT CAST('abc' AS long)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "BIGINT": 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('abc' AS long)
        ^^^^^^^^^^^^^^^^^^^
@@ -80,7 +80,7 @@ SELECT CAST('abc' AS float)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type FLOAT: 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "FLOAT": 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('abc' AS float)
        ^^^^^^^^^^^^^^^^^^^^
@@ -92,7 +92,7 @@ SELECT CAST('abc' AS double)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DOUBLE": 'abc'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('abc' AS double)
        ^^^^^^^^^^^^^^^^^^^^^
@@ -104,7 +104,7 @@ SELECT CAST('1234567890123' AS int)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: '1234567890123'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": '1234567890123'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('1234567890123' AS int)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -116,7 +116,7 @@ SELECT CAST('12345678901234567890123' AS long)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: '12345678901234567890123'. To return 
NULL instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false 
to bypass this error.
+Invalid input syntax for type "BIGINT": '12345678901234567890123'. To return 
NULL instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false 
to bypass this error.
 == SQL(line 1, position 7) ==
 SELECT CAST('12345678901234567890123' AS long)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -128,7 +128,7 @@ SELECT CAST('' AS int)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: ''. To return NULL instead, use 'try_cast'. 
If necessary set spark.sql.ansi.enabled to false to bypass this error.
+Invalid input syntax for type "INT": ''. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('' AS int)
        ^^^^^^^^^^^^^^^
@@ -140,7 +140,7 @@ SELECT CAST('' AS long)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: ''. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "BIGINT": ''. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('' AS long)
        ^^^^^^^^^^^^^^^^
@@ -152,7 +152,7 @@ SELECT CAST('' AS float)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type FLOAT: ''. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "FLOAT": ''. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('' AS float)
        ^^^^^^^^^^^^^^^^^
@@ -164,7 +164,7 @@ SELECT CAST('' AS double)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: ''. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DOUBLE": ''. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('' AS double)
        ^^^^^^^^^^^^^^^^^^
@@ -192,7 +192,7 @@ SELECT CAST('123.a' AS int)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('123.a' AS int)
        ^^^^^^^^^^^^^^^^^^^^
@@ -204,7 +204,7 @@ SELECT CAST('123.a' AS long)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "BIGINT": '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('123.a' AS long)
        ^^^^^^^^^^^^^^^^^^^^^
@@ -216,7 +216,7 @@ SELECT CAST('123.a' AS float)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type FLOAT: '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "FLOAT": '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('123.a' AS float)
        ^^^^^^^^^^^^^^^^^^^^^^
@@ -228,7 +228,7 @@ SELECT CAST('123.a' AS double)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DOUBLE": '123.a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('123.a' AS double)
        ^^^^^^^^^^^^^^^^^^^^^^^
@@ -248,7 +248,7 @@ SELECT CAST('-2147483649' AS int)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: '-2147483649'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": '-2147483649'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('-2147483649' AS int)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -268,7 +268,7 @@ SELECT CAST('2147483648' AS int)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: '2147483648'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": '2147483648'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT CAST('2147483648' AS int)
        ^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -288,7 +288,7 @@ SELECT CAST('-9223372036854775809' AS long)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: '-9223372036854775809'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+Invalid input syntax for type "BIGINT": '-9223372036854775809'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 == SQL(line 1, position 7) ==
 SELECT CAST('-9223372036854775809' AS long)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -308,7 +308,7 @@ SELECT CAST('9223372036854775808' AS long)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: '9223372036854775808'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+Invalid input syntax for type "BIGINT": '9223372036854775808'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 == SQL(line 1, position 7) ==
 SELECT CAST('9223372036854775808' AS long)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -567,7 +567,7 @@ select cast('1中文' as tinyint)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type TINYINT: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "TINYINT": '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast('1中文' as tinyint)
        ^^^^^^^^^^^^^^^^^^^^^^
@@ -579,7 +579,7 @@ select cast('1中文' as smallint)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type SMALLINT: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "SMALLINT": '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast('1中文' as smallint)
        ^^^^^^^^^^^^^^^^^^^^^^^
@@ -591,7 +591,7 @@ select cast('1中文' as INT)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast('1中文' as INT)
        ^^^^^^^^^^^^^^^^^^
@@ -603,7 +603,7 @@ select cast('中文1' as bigint)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: '中文1'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "BIGINT": '中文1'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast('中文1' as bigint)
        ^^^^^^^^^^^^^^^^^^^^^
@@ -615,7 +615,7 @@ select cast('1中文' as bigint)
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "BIGINT": '1中文'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast('1中文' as bigint)
        ^^^^^^^^^^^^^^^^^^^^^
@@ -678,7 +678,7 @@ select cast('xyz' as decimal(4, 2))
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DECIMAL(4,2): 'xyz'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DECIMAL(4,2)": 'xyz'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast('xyz' as decimal(4, 2))
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -698,7 +698,7 @@ select cast('a' as date)
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type DATE: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DATE": 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast('a' as date)
        ^^^^^^^^^^^^^^^^^
@@ -718,7 +718,7 @@ select cast('a' as timestamp)
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type TIMESTAMP: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "TIMESTAMP": 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast('a' as timestamp)
        ^^^^^^^^^^^^^^^^^^^^^^
@@ -738,7 +738,7 @@ select cast('a' as timestamp_ntz)
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type TIMESTAMP_NTZ: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "TIMESTAMP_NTZ": 'a'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast('a' as timestamp_ntz)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -750,7 +750,7 @@ select cast(cast('inf' as double) as timestamp)
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type TIMESTAMP: Infinity. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "TIMESTAMP": Infinity. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast(cast('inf' as double) as timestamp)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -762,7 +762,7 @@ select cast(cast('inf' as float) as timestamp)
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type TIMESTAMP: Infinity. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "TIMESTAMP": Infinity. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast(cast('inf' as float) as timestamp)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
index fa65b4dd071..d13585c7fd1 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
@@ -232,7 +232,7 @@ select next_day("xx", "Mon")
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type DATE: 'xx'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DATE": 'xx'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select next_day("xx", "Mon")
        ^^^^^^^^^^^^^^^^^^^^^
@@ -327,7 +327,7 @@ select date_add('2011-11-11', '1.2')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: '1.2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": '1.2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select date_add('2011-11-11', '1.2')
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -438,7 +438,7 @@ select date_sub(date'2011-11-11', '1.2')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: '1.2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": '1.2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select date_sub(date'2011-11-11', '1.2')
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
index e30b592020d..d19b94df3fe 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
@@ -242,7 +242,7 @@ select cast("Unparseable" as timestamp)
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type TIMESTAMP: 'Unparseable'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+Invalid input syntax for type "TIMESTAMP": 'Unparseable'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 == SQL(line 1, position 7) ==
 select cast("Unparseable" as timestamp)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -254,7 +254,7 @@ select cast("Unparseable" as date)
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type DATE: 'Unparseable'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DATE": 'Unparseable'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select cast("Unparseable" as date)
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index d7975dfb58a..30b14eceb70 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -122,7 +122,7 @@ select interval 2 second * 'a'
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DOUBLE": 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select interval 2 second * 'a'
        ^^^^^^^^^^^^^^^^^^^^^^^
@@ -134,7 +134,7 @@ select interval 2 second / 'a'
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DOUBLE": 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select interval 2 second / 'a'
        ^^^^^^^^^^^^^^^^^^^^^^^
@@ -146,7 +146,7 @@ select interval 2 year * 'a'
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DOUBLE": 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select interval 2 year * 'a'
        ^^^^^^^^^^^^^^^^^^^^^
@@ -158,7 +158,7 @@ select interval 2 year / 'a'
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DOUBLE": 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select interval 2 year / 'a'
        ^^^^^^^^^^^^^^^^^^^^^
@@ -186,7 +186,7 @@ select 'a' * interval 2 second
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DOUBLE": 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select 'a' * interval 2 second
        ^^^^^^^^^^^^^^^^^^^^^^^
@@ -198,7 +198,7 @@ select 'a' * interval 2 year
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DOUBLE": 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select 'a' * interval 2 year
        ^^^^^^^^^^^^^^^^^^^^^
@@ -1516,7 +1516,7 @@ select '4 11:11' - interval '4 22:12' day to minute
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type TIMESTAMP: '4 11:11'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "TIMESTAMP": '4 11:11'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select '4 11:11' - interval '4 22:12' day to minute
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1528,7 +1528,7 @@ select '4 12:12:12' + interval '4 22:12' day to minute
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type TIMESTAMP: '4 12:12:12'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "TIMESTAMP": '4 12:12:12'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 == SQL(line 1, position 7) ==
 select '4 12:12:12' + interval '4 22:12' day to minute
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1566,7 +1566,7 @@ select str - interval '4 22:12' day to minute from 
interval_view
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type TIMESTAMP: '1'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "TIMESTAMP": '1'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select str - interval '4 22:12' day to minute from interval_view
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1578,7 +1578,7 @@ select str + interval '4 22:12' day to minute from 
interval_view
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type TIMESTAMP: '1'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "TIMESTAMP": '1'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select str + interval '4 22:12' day to minute from interval_view
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
index 083471b15d4..814655ba68a 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/string-functions.sql.out
@@ -82,7 +82,7 @@ select left("abcd", -2), left("abcd", 0), left("abcd", 'a')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 42) ==
 ...t("abcd", -2), left("abcd", 0), left("abcd", 'a')
                                    ^^^^^^^^^^^^^^^^^
@@ -110,7 +110,7 @@ select right("abcd", -2), right("abcd", 0), right("abcd", 
'a')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": 'a'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 44) ==
 ...("abcd", -2), right("abcd", 0), right("abcd", 'a')
                                    ^^^^^^^^^^^^^^^^^^
@@ -419,7 +419,7 @@ SELECT lpad('hi', 'invalid_length')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: 'invalid_length'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": 'invalid_length'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT lpad('hi', 'invalid_length')
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -431,7 +431,7 @@ SELECT rpad('hi', 'invalid_length')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: 'invalid_length'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": 'invalid_length'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT rpad('hi', 'invalid_length')
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
index b63d2d1307e..106a139c3b6 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float4.sql.out
@@ -96,7 +96,7 @@ SELECT float('N A N')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type FLOAT: 'N A N'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "FLOAT": 'N A N'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT float('N A N')
        ^^^^^^^^^^^^^^
@@ -108,7 +108,7 @@ SELECT float('NaN x')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type FLOAT: 'NaN x'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "FLOAT": 'NaN x'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT float('NaN x')
        ^^^^^^^^^^^^^^
@@ -120,7 +120,7 @@ SELECT float(' INFINITY    x')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type FLOAT: ' INFINITY    x'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "FLOAT": ' INFINITY    x'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 == SQL(line 1, position 7) ==
 SELECT float(' INFINITY    x')
        ^^^^^^^^^^^^^^^^^^^^^^^
@@ -156,7 +156,7 @@ SELECT float(decimal('nan'))
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DECIMAL(10,0): 'nan'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DECIMAL(10,0)": 'nan'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 13) ==
 SELECT float(decimal('nan'))
              ^^^^^^^^^^^^^^
@@ -340,7 +340,7 @@ SELECT int(float('2147483647'))
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting 2.14748365E9 to INT causes overflow. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Casting 2.14748365E9 to "INT" causes overflow. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -357,7 +357,7 @@ SELECT int(float('-2147483900'))
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting -2.1474839E9 to INT causes overflow. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Casting -2.1474839E9 to "INT" causes overflow. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -390,7 +390,7 @@ SELECT bigint(float('-9223380000000000000'))
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting -9.22338E18 to BIGINT causes overflow. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Casting -9.22338E18 to "BIGINT" causes overflow. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
index b0582c09523..57f97e3c242 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/float8.sql.out
@@ -128,7 +128,7 @@ SELECT double('N A N')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: 'N A N'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DOUBLE": 'N A N'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT double('N A N')
        ^^^^^^^^^^^^^^^
@@ -140,7 +140,7 @@ SELECT double('NaN x')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: 'NaN x'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DOUBLE": 'NaN x'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 SELECT double('NaN x')
        ^^^^^^^^^^^^^^^
@@ -152,7 +152,7 @@ SELECT double(' INFINITY    x')
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DOUBLE: ' INFINITY    x'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+Invalid input syntax for type "DOUBLE": ' INFINITY    x'. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 == SQL(line 1, position 7) ==
 SELECT double(' INFINITY    x')
        ^^^^^^^^^^^^^^^^^^^^^^^^
@@ -188,7 +188,7 @@ SELECT double(decimal('nan'))
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type DECIMAL(10,0): 'nan'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "DECIMAL(10,0)": 'nan'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 14) ==
 SELECT double(decimal('nan'))
               ^^^^^^^^^^^^^^
@@ -845,7 +845,7 @@ SELECT bigint(double('-9223372036854780000'))
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting -9.22337203685478E18D to BIGINT causes overflow. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+Casting -9.22337203685478E18D to "BIGINT" causes overflow. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
index be1fce4b41e..7939887b4e2 100755
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
@@ -619,7 +619,7 @@ SELECT CAST(q1 AS int) FROM int8_tbl WHERE q2 <> 456
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting 4567890123456789L to INT causes overflow. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Casting 4567890123456789L to "INT" causes overflow. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
@@ -636,7 +636,7 @@ SELECT CAST(q1 AS smallint) FROM int8_tbl WHERE q2 <> 456
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting 4567890123456789L to SMALLINT causes overflow. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Casting 4567890123456789L to "SMALLINT" causes overflow. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
@@ -673,7 +673,7 @@ SELECT CAST(double('922337203685477580700.0') AS bigint)
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting 9.223372036854776E20D to BIGINT causes overflow. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
+Casting 9.223372036854776E20D to "BIGINT" causes overflow. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
@@ -745,7 +745,7 @@ SELECT string(int(shiftleft(bigint(-1), 63))+1)
 struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
-Casting -9223372036854775808L to INT causes overflow. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Casting -9223372036854775808L to "INT" causes overflow. To return NULL 
instead, use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to 
bypass this error.
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
index cff6bf28040..b080e2885a9 100755
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/text.sql.out
@@ -65,7 +65,7 @@ select string('four: ') || 2+2
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: 'four: 2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "BIGINT": 'four: 2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select string('four: ') || 2+2
        ^^^^^^^^^^^^^^^^^^^^^^^
@@ -77,7 +77,7 @@ select 'four: ' || 2+2
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type BIGINT: 'four: 2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "BIGINT": 'four: 2'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 1, position 7) ==
 select 'four: ' || 2+2
        ^^^^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
index c48d92a9900..ab3b9b5dc09 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part2.sql.out
@@ -462,7 +462,7 @@ window w as (order by f_numeric range between
 struct<>
 -- !query output
 org.apache.spark.SparkNumberFormatException
-Invalid input syntax for type INT: 'NaN'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "INT": 'NaN'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 3, position 12) ==
 window w as (order by f_numeric range between
             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
index b5281d4c605..1d44bb22a82 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part3.sql.out
@@ -72,7 +72,7 @@ insert into datetimes values
 struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
-failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): Invalid input 
syntax for type TIMESTAMP: '11:00 BST'. To return NULL instead, use 'try_cast'. 
If necessary set spark.sql.ansi.enabled to false to bypass this error.
+failed to evaluate expression CAST('11:00 BST' AS TIMESTAMP): Invalid input 
syntax for type "TIMESTAMP": '11:00 BST'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 == SQL(line 2, position 23) ==
 (1, timestamp '11:00', cast ('11:00 BST' as timestamp), cast ('1 year' as 
timestamp), ...
                        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
index 6beb6fd5958..2c4aee689ed 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/postgreSQL/window_part4.sql.out
@@ -501,7 +501,7 @@ FROM (VALUES(1,1),(2,2),(3,(cast('nan' as 
int))),(4,3),(5,4)) t(a,b)
 struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
-failed to evaluate expression CAST('nan' AS INT): Invalid input syntax for 
type INT: 'nan'. To return NULL instead, use 'try_cast'. If necessary set 
spark.sql.ansi.enabled to false to bypass this error.
+failed to evaluate expression CAST('nan' AS INT): Invalid input syntax for 
type "INT": 'nan'. To return NULL instead, use 'try_cast'. If necessary set 
spark.sql.ansi.enabled to false to bypass this error.
 == SQL(line 3, position 28) ==
 FROM (VALUES(1,1),(2,2),(3,(cast('nan' as int))),(4,3),(5,4)) t(a,b)
                             ^^^^^^^^^^^^^^^^^^
diff --git 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index c09a7a1811c..10e1cb7eeed 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -332,7 +332,7 @@ select to_timestamp(1)
 struct<>
 -- !query output
 java.time.DateTimeException
-Invalid input syntax for type TIMESTAMP_NTZ: '1'. To return NULL instead, use 
'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
+Invalid input syntax for type "TIMESTAMP_NTZ": '1'. To return NULL instead, 
use 'try_cast'. If necessary set spark.sql.ansi.enabled to false to bypass this 
error.
 
 
 -- !query
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
index f3dccc224a7..8586c5f3266 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
@@ -1951,7 +1951,7 @@ class DatasetSuite extends QueryTest
         .map(b => b - 1)
         .collect()
     }
-    assert(thrownException.message.contains("Cannot up cast id from BIGINT to 
TINYINT"))
+    assert(thrownException.message.contains("""Cannot up cast id from "BIGINT" 
to "TINYINT""""))
   }
 
   test("SPARK-26690: checkpoints should be executed with an execution id") {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
index 4023e6cbed0..0f7cd0c5581 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLInsertTestSuite.scala
@@ -302,7 +302,7 @@ trait SQLInsertTestSuite extends QueryTest with 
SQLTestUtils {
             val errorMsg = intercept[NumberFormatException] {
               sql("insert into t partition(a='ansi') values('ansi')")
             }.getMessage
-            assert(errorMsg.contains("Invalid input syntax for type INT: 
'ansi'"))
+            assert(errorMsg.contains("""Invalid input syntax for type "INT": 
'ansi'"""))
           } else {
             sql("insert into t partition(a='ansi') values('ansi')")
             checkAnswer(sql("select * from t"), Row("ansi", null) :: Nil)
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
index 6a7da405fcc..3c9b55a21ea 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala
@@ -37,7 +37,7 @@ class QueryCompilationErrorsSuite extends QueryTest with 
SharedSparkSession {
     }.message
     assert(msg1 ===
       s"""
-         |Cannot up cast b from BIGINT to INT.
+         |Cannot up cast b from "BIGINT" to "INT".
          |The type path of the target object is:
          |- field (class: "scala.Int", name: "b")
          |- root class: "org.apache.spark.sql.errors.StringIntClass"
@@ -51,7 +51,7 @@ class QueryCompilationErrorsSuite extends QueryTest with 
SharedSparkSession {
     }.message
     assert(msg2 ===
       s"""
-         |Cannot up cast b.`b` from DECIMAL(38,18) to BIGINT.
+         |Cannot up cast b.`b` from "DECIMAL(38,18)" to "BIGINT".
          |The type path of the target object is:
          |- field (class: "scala.Long", name: "b")
          |- field (class: "org.apache.spark.sql.errors.StringLongClass", name: 
"b")
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index 8b5699dd2b1..323b6d375db 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -142,7 +142,7 @@ class QueryExecutionErrorsSuite extends QueryTest
         .collect()
     }
     assert(e2.getMessage === "The feature is not supported: pivoting by the 
value" +
-      """ '[dotnet,Dummies]' of the column data type STRUCT<col1: STRING, 
training: STRING>.""")
+      """ '[dotnet,Dummies]' of the column data type "STRUCT<col1: STRING, 
training: STRING>".""")
   }
 
   test("UNSUPPORTED_FEATURE: unsupported pivot operations") {
@@ -236,7 +236,8 @@ class QueryExecutionErrorsSuite extends QueryTest
 
     assert(e.getErrorClass === "UNSUPPORTED_OPERATION")
     assert(e.getMessage === "The operation is not supported: " +
-      "TIMESTAMP must supply timeZoneId parameter while converting to the 
arrow timestamp type.")
+      "\"TIMESTAMP\" must supply timeZoneId parameter " +
+      "while converting to the arrow timestamp type.")
   }
 
   test("UNSUPPORTED_OPERATION - SPARK-36346: can't read Timestamp as 
TimestampNTZ") {
@@ -249,7 +250,7 @@ class QueryExecutionErrorsSuite extends QueryTest
 
         assert(e.getErrorClass === "UNSUPPORTED_OPERATION")
         assert(e.getMessage === "The operation is not supported: " +
-          "Unable to convert TIMESTAMP of Orc to data type TIMESTAMP_NTZ.")
+          "Unable to convert \"TIMESTAMP\" of Orc to data type 
\"TIMESTAMP_NTZ\".")
       }
     }
   }
@@ -264,7 +265,7 @@ class QueryExecutionErrorsSuite extends QueryTest
 
         assert(e.getErrorClass === "UNSUPPORTED_OPERATION")
         assert(e.getMessage === "The operation is not supported: " +
-          "Unable to convert TIMESTAMP_NTZ of Orc to data type TIMESTAMP.")
+          "Unable to convert \"TIMESTAMP_NTZ\" of Orc to data type 
\"TIMESTAMP\".")
       }
     }
   }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
index 52d03e6f956..9e0f63cd8ef 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
@@ -713,13 +713,13 @@ class InsertSuite extends DataSourceTest with 
SharedSparkSession {
         var msg = intercept[SparkException] {
           sql(s"insert into t values($outOfRangeValue1)")
         }.getCause.getMessage
-        assert(msg.contains(s"Casting ${outOfRangeValue1}L to INT causes 
overflow"))
+        assert(msg.contains(s"""Casting ${outOfRangeValue1}L to "INT" causes 
overflow"""))
 
         val outOfRangeValue2 = (Int.MinValue - 1L).toString
         msg = intercept[SparkException] {
           sql(s"insert into t values($outOfRangeValue2)")
         }.getCause.getMessage
-        assert(msg.contains(s"Casting ${outOfRangeValue2}L to INT causes 
overflow"))
+        assert(msg.contains(s"""Casting ${outOfRangeValue2}L to "INT" causes 
overflow"""))
       }
     }
   }
@@ -733,13 +733,13 @@ class InsertSuite extends DataSourceTest with 
SharedSparkSession {
         var msg = intercept[SparkException] {
           sql(s"insert into t values(${outOfRangeValue1}D)")
         }.getCause.getMessage
-        assert(msg.contains(s"Casting ${outOfRangeValue1}D to BIGINT causes 
overflow"))
+        assert(msg.contains(s"""Casting ${outOfRangeValue1}D to "BIGINT" 
causes overflow"""))
 
         val outOfRangeValue2 = Math.nextDown(Long.MinValue)
         msg = intercept[SparkException] {
           sql(s"insert into t values(${outOfRangeValue2}D)")
         }.getCause.getMessage
-        assert(msg.contains(s"Casting ${outOfRangeValue2}D to BIGINT causes 
overflow"))
+        assert(msg.contains(s"""Casting ${outOfRangeValue2}D to "BIGINT" 
causes overflow"""))
       }
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to