This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new c34baebb36d4 [SPARK-47719][SQL] Change 
spark.sql.legacy.timeParserPolicy default to CORRECTED
c34baebb36d4 is described below

commit c34baebb36d4e4c8895085b3114da8dc07165469
Author: Serge Rielau <se...@rielau.com>
AuthorDate: Fri Apr 5 11:35:38 2024 -0700

    [SPARK-47719][SQL] Change spark.sql.legacy.timeParserPolicy default to 
CORRECTED
    
    ### What changes were proposed in this pull request?
    
    We changed the time parser policy in Spark 3.0.0.
    The config has since defaulted to raise an exception if there is a 
potential conflict between teh legacy and the new policy.
    Spark 4.0.0 is a good time to default to the new policy
    
    ### Why are the changes needed?
    
    Move the product forward and retire legacy behavior over time.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Run existing unit tests and verify changes.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #45859 from srielau/SPARK-47719-parser-policy-default-to-corrected.
    
    Lead-authored-by: Serge Rielau <se...@rielau.com>
    Co-authored-by: Wenchen Fan <cloud0...@gmail.com>
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
---
 .../org/apache/spark/sql/ClientE2ETestSuite.scala  |  4 +-
 docs/sql-migration-guide.md                        |  2 +
 .../sql/tests/connect/test_connect_session.py      |  1 +
 .../org/apache/spark/sql/internal/SqlApiConf.scala |  2 +-
 .../org/apache/spark/sql/internal/SQLConf.scala    |  6 +-
 .../sql/catalyst/util/DateFormatterSuite.scala     |  2 +-
 .../sql/catalyst/util/DatetimeFormatterSuite.scala | 59 +++++++--------
 .../catalyst/util/TimestampFormatterSuite.scala    | 36 +++++-----
 .../results/ansi/datetime-parsing-invalid.sql.out  | 72 +++++++++----------
 .../results/datetime-parsing-invalid.sql.out       | 84 ++++------------------
 .../sql-tests/results/json-functions.sql.out       | 24 ++-----
 .../sql-tests/results/xml-functions.sql.out        | 24 ++-----
 12 files changed, 115 insertions(+), 201 deletions(-)

diff --git 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala
 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala
index f2f1571452c0..95ee69d2a47d 100644
--- 
a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala
+++ 
b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/ClientE2ETestSuite.scala
@@ -74,7 +74,9 @@ class ClientE2ETestSuite extends RemoteSparkSession with 
SQLHelper with PrivateM
 
   for (enrichErrorEnabled <- Seq(false, true)) {
     test(s"cause exception - ${enrichErrorEnabled}") {
-      withSQLConf("spark.sql.connect.enrichError.enabled" -> 
enrichErrorEnabled.toString) {
+      withSQLConf(
+        "spark.sql.connect.enrichError.enabled" -> enrichErrorEnabled.toString,
+        "spark.sql.legacy.timeParserPolicy" -> "EXCEPTION") {
         val ex = intercept[SparkUpgradeException] {
           spark
             .sql("""
diff --git a/docs/sql-migration-guide.md b/docs/sql-migration-guide.md
index 13d6702c4cf9..019728a45f40 100644
--- a/docs/sql-migration-guide.md
+++ b/docs/sql-migration-guide.md
@@ -46,6 +46,8 @@ license: |
 - Since Spark 4.0, MySQL JDBC datasource will read FLOAT as FloatType, while 
in Spark 3.5 and previous, it was read as DoubleType. To restore the previous 
behavior, you can cast the column to the old type.
 - Since Spark 4.0, MySQL JDBC datasource will read BIT(n > 1) as BinaryType, 
while in Spark 3.5 and previous, read as LongType. To restore the previous 
behavior, set `spark.sql.legacy.mysql.bitArrayMapping.enabled` to `true`.
 - Since Spark 4.0, MySQL JDBC datasource will write ShortType as SMALLINT, 
while in Spark 3.5 and previous, write as INTEGER. To restore the previous 
behavior, you can replace the column with IntegerType whenever before writing.
+- Since Spark 4.0, The default value for 
`spark.sql.legacy.ctePrecedencePolicy` has been changed from `EXCEPTION` to 
`CORRECTED`. Instead of raising an error, inner CTE definitions take precedence 
over outer definitions.
+- Since Spark 4.0, The default value for `spark.sql.legacy.timeParserPolicy` 
has been changed from `EXCEPTION` to `CORRECTED`. Instead of raising an 
`INCONSISTENT_BEHAVIOR_CROSS_VERSION` error, `CANNOT_PARSE_TIMESTAMP` will be 
raised if ANSI mode is enable. `NULL` will be returned if ANSI mode is 
disabled. See [Datetime Patterns for Formatting and 
Parsing](sql-ref-datetime-pattern.html).
 
 ## Upgrading from Spark SQL 3.5.1 to 3.5.2
 
diff --git a/python/pyspark/sql/tests/connect/test_connect_session.py 
b/python/pyspark/sql/tests/connect/test_connect_session.py
index b73a56340984..186580046ef0 100644
--- a/python/pyspark/sql/tests/connect/test_connect_session.py
+++ b/python/pyspark/sql/tests/connect/test_connect_session.py
@@ -130,6 +130,7 @@ class SparkConnectSessionTests(ReusedConnectTestCase):
             {
                 "spark.sql.connect.enrichError.enabled": True,
                 "spark.sql.pyspark.jvmStacktrace.enabled": False,
+                "spark.sql.legacy.timeParserPolicy": "EXCEPTION",
             }
         ):
             with self.sql_conf({"spark.sql.connect.serverStacktrace.enabled": 
False}):
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/internal/SqlApiConf.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/internal/SqlApiConf.scala
index 99f7f2a11f2e..813e8228a511 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/internal/SqlApiConf.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/internal/SqlApiConf.scala
@@ -79,6 +79,6 @@ private[sql] object DefaultSqlApiConf extends SqlApiConf {
   override def charVarcharAsString: Boolean = false
   override def datetimeJava8ApiEnabled: Boolean = false
   override def sessionLocalTimeZone: String = TimeZone.getDefault.getID
-  override def legacyTimeParserPolicy: LegacyBehaviorPolicy.Value = 
LegacyBehaviorPolicy.EXCEPTION
+  override def legacyTimeParserPolicy: LegacyBehaviorPolicy.Value = 
LegacyBehaviorPolicy.CORRECTED
   override def defaultStringType: StringType = StringType
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 71256c6c65fc..9f07722528e8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -4027,13 +4027,13 @@ object SQLConf {
     .doc("When LEGACY, java.text.SimpleDateFormat is used for formatting and 
parsing " +
       "dates/timestamps in a locale-sensitive manner, which is the approach 
before Spark 3.0. " +
       "When set to CORRECTED, classes from java.time.* packages are used for 
the same purpose. " +
-      "The default value is EXCEPTION, RuntimeException is thrown when we will 
get different " +
-      "results.")
+      "When set to EXCEPTION, RuntimeException is thrown when we will get 
different " +
+      "results. The default is CORRECTED.")
     .version("3.0.0")
     .stringConf
     .transform(_.toUpperCase(Locale.ROOT))
     .checkValues(LegacyBehaviorPolicy.values.map(_.toString))
-    .createWithDefault(LegacyBehaviorPolicy.EXCEPTION.toString)
+    .createWithDefault(LegacyBehaviorPolicy.CORRECTED.toString)
 
   val LEGACY_ARRAY_EXISTS_FOLLOWS_THREE_VALUED_LOGIC =
     buildConf("spark.sql.legacy.followThreeValuedLogicInArrayExists")
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
index 20b1ade81f59..110a63981ba2 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateFormatterSuite.scala
@@ -185,7 +185,7 @@ class DateFormatterSuite extends DatetimeFormatterSuite {
     val formatter = DateFormatter("MM-dd")
     // The date parser in 2.4 accepts 1970-02-29 and turn it into 1970-03-01, 
so we should get a
     // SparkUpgradeException here.
-    intercept[SparkUpgradeException](formatter.parse("02-29"))
+    intercept[DateTimeException](formatter.parse("02-29"))
   }
 
   test("SPARK-36418: default parsing w/o pattern") {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DatetimeFormatterSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DatetimeFormatterSuite.scala
index 63717a125d2e..a60602065d74 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DatetimeFormatterSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DatetimeFormatterSuite.scala
@@ -24,6 +24,7 @@ import org.scalatest.matchers.must.Matchers
 import org.apache.spark.{SparkFunSuite, SparkIllegalArgumentException, 
SparkUpgradeException}
 import org.apache.spark.sql.catalyst.plans.SQLHelper
 import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.{date, UTC}
+import org.apache.spark.sql.internal.SQLConf
 
 trait DatetimeFormatterSuite extends SparkFunSuite with SQLHelper with 
Matchers {
   import DateTimeFormatterHelper._
@@ -99,34 +100,36 @@ trait DatetimeFormatterSuite extends SparkFunSuite with 
SQLHelper with Matchers
   }
 
   test("SPARK-31939: Fix Parsing day of year when year field pattern is 
missing") {
-    // resolved to queryable LocaleDate or fail directly
-    assertEqual("yyyy-dd-DD", "2020-29-60", date(2020, 2, 29))
-    assertError("yyyy-dd-DD", "2020-02-60",
-      "Field DayOfMonth 29 differs from DayOfMonth 2 derived from 2020-02-29")
-    assertEqual("yyyy-MM-DD", "2020-02-60", date(2020, 2, 29))
-    assertError("yyyy-MM-DD", "2020-03-60",
-      "Field MonthOfYear 2 differs from MonthOfYear 3 derived from 2020-02-29")
-    assertEqual("yyyy-MM-dd-DD", "2020-02-29-60", date(2020, 2, 29))
-    assertError("yyyy-MM-dd-DD", "2020-03-01-60",
-      "Field DayOfYear 61 differs from DayOfYear 60 derived from 2020-03-01")
-    assertEqual("yyyy-DDD", "2020-366", date(2020, 12, 31))
-    assertError("yyyy-DDD", "2019-366",
-      "Invalid date 'DayOfYear 366' as '2019' is not a leap year")
+    withSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key -> "EXCEPTION") {
+      // resolved to queryable LocaleDate or fail directly
+      assertEqual("yyyy-dd-DD", "2020-29-60", date(2020, 2, 29))
+      assertError("yyyy-dd-DD", "2020-02-60",
+        "Field DayOfMonth 29 differs from DayOfMonth 2 derived from 
2020-02-29")
+      assertEqual("yyyy-MM-DD", "2020-02-60", date(2020, 2, 29))
+      assertError("yyyy-MM-DD", "2020-03-60",
+        "Field MonthOfYear 2 differs from MonthOfYear 3 derived from 
2020-02-29")
+      assertEqual("yyyy-MM-dd-DD", "2020-02-29-60", date(2020, 2, 29))
+      assertError("yyyy-MM-dd-DD", "2020-03-01-60",
+        "Field DayOfYear 61 differs from DayOfYear 60 derived from 2020-03-01")
+      assertEqual("yyyy-DDD", "2020-366", date(2020, 12, 31))
+      assertError("yyyy-DDD", "2019-366",
+        "Invalid date 'DayOfYear 366' as '2019' is not a leap year")
 
-    // unresolved and need to check manually(SPARK-31939 fixed)
-    assertEqual("DDD", "365", date(1970, 12, 31))
-    assertError("DDD", "366",
-      "Invalid date 'DayOfYear 366' as '1970' is not a leap year")
-    assertEqual("MM-DD", "03-60", date(1970, 3))
-    assertError("MM-DD", "02-60",
-      "Field MonthOfYear 2 differs from MonthOfYear 3 derived from 1970-03-01")
-    assertEqual("MM-dd-DD", "02-28-59", date(1970, 2, 28))
-    assertError("MM-dd-DD", "02-28-60",
-      "Field MonthOfYear 2 differs from MonthOfYear 3 derived from 1970-03-01")
-    assertError("MM-dd-DD", "02-28-58",
-      "Field DayOfMonth 28 differs from DayOfMonth 27 derived from 1970-02-27")
-    assertEqual("dd-DD", "28-59", date(1970, 2, 28))
-    assertError("dd-DD", "27-59",
-      "Field DayOfMonth 27 differs from DayOfMonth 28 derived from 1970-02-28")
+      // unresolved and need to check manually(SPARK-31939 fixed)
+      assertEqual("DDD", "365", date(1970, 12, 31))
+      assertError("DDD", "366",
+        "Invalid date 'DayOfYear 366' as '1970' is not a leap year")
+      assertEqual("MM-DD", "03-60", date(1970, 3))
+      assertError("MM-DD", "02-60",
+        "Field MonthOfYear 2 differs from MonthOfYear 3 derived from 
1970-03-01")
+      assertEqual("MM-dd-DD", "02-28-59", date(1970, 2, 28))
+      assertError("MM-dd-DD", "02-28-60",
+        "Field MonthOfYear 2 differs from MonthOfYear 3 derived from 
1970-03-01")
+      assertError("MM-dd-DD", "02-28-58",
+        "Field DayOfMonth 28 differs from DayOfMonth 27 derived from 
1970-02-27")
+      assertEqual("dd-DD", "28-59", date(1970, 2, 28))
+      assertError("dd-DD", "27-59",
+        "Field DayOfMonth 27 differs from DayOfMonth 28 derived from 
1970-02-28")
+    }
   }
 }
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
index 2bc801fbb288..7eac6e0feef6 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/TimestampFormatterSuite.scala
@@ -36,23 +36,25 @@ class TimestampFormatterSuite extends 
DatetimeFormatterSuite {
   override protected def useDateFormatter: Boolean = false
 
   test("parsing timestamps using time zones") {
-    val localDate = "2018-12-02T10:11:12.001234"
-    val expectedMicros = Map(
-      "UTC" -> 1543745472001234L,
-      PST.getId -> 1543774272001234L,
-      CET.getId -> 1543741872001234L,
-      "Africa/Dakar" -> 1543745472001234L,
-      "America/Los_Angeles" -> 1543774272001234L,
-      "Asia/Urumqi" -> 1543723872001234L,
-      "Asia/Hong_Kong" -> 1543716672001234L,
-      "Europe/Brussels" -> 1543741872001234L)
-    outstandingTimezonesIds.foreach { zoneId =>
-      val formatter = TimestampFormatter(
-        "yyyy-MM-dd'T'HH:mm:ss.SSSSSS",
-        getZoneId(zoneId),
-        isParsing = true)
-      val microsSinceEpoch = formatter.parse(localDate)
-      assert(microsSinceEpoch === expectedMicros(zoneId))
+    withSQLConf(SQLConf.LEGACY_TIME_PARSER_POLICY.key -> "EXCEPTION") {
+      val localDate = "2018-12-02T10:11:12.001234"
+      val expectedMicros = Map(
+        "UTC" -> 1543745472001234L,
+        PST.getId -> 1543774272001234L,
+        CET.getId -> 1543741872001234L,
+        "Africa/Dakar" -> 1543745472001234L,
+        "America/Los_Angeles" -> 1543774272001234L,
+        "Asia/Urumqi" -> 1543723872001234L,
+        "Asia/Hong_Kong" -> 1543716672001234L,
+        "Europe/Brussels" -> 1543741872001234L)
+      outstandingTimezonesIds.foreach { zoneId =>
+        val formatter = TimestampFormatter(
+          "yyyy-MM-dd'T'HH:mm:ss.SSSSSS",
+          getZoneId(zoneId),
+          isParsing = true)
+        val microsSinceEpoch = formatter.parse(localDate)
+        assert(microsSinceEpoch === expectedMicros(zoneId))
+      }
     }
   }
 
diff --git 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
index 51530c29f79a..514a0c6ae7d3 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/ansi/datetime-parsing-invalid.sql.out
@@ -13,13 +13,13 @@ select to_timestamp('1', 'yy')
 -- !query schema
 struct<>
 -- !query output
-org.apache.spark.SparkUpgradeException
+org.apache.spark.SparkDateTimeException
 {
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
+  "errorClass" : "CANNOT_PARSE_TIMESTAMP",
+  "sqlState" : "22007",
   "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'1'"
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "message" : "Text '1' could not be parsed at index 0"
   }
 }
 
@@ -45,13 +45,13 @@ select to_timestamp('123', 'yy')
 -- !query schema
 struct<>
 -- !query output
-org.apache.spark.SparkUpgradeException
+org.apache.spark.SparkDateTimeException
 {
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
+  "errorClass" : "CANNOT_PARSE_TIMESTAMP",
+  "sqlState" : "22007",
   "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'123'"
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "message" : "Text '123' could not be parsed, unparsed text found at index 
2"
   }
 }
 
@@ -61,13 +61,13 @@ select to_timestamp('1', 'yyy')
 -- !query schema
 struct<>
 -- !query output
-org.apache.spark.SparkUpgradeException
+org.apache.spark.SparkDateTimeException
 {
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
+  "errorClass" : "CANNOT_PARSE_TIMESTAMP",
+  "sqlState" : "22007",
   "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'1'"
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "message" : "Text '1' could not be parsed at index 0"
   }
 }
 
@@ -110,13 +110,13 @@ select to_timestamp('9', 'DD')
 -- !query schema
 struct<>
 -- !query output
-org.apache.spark.SparkUpgradeException
+org.apache.spark.SparkDateTimeException
 {
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
+  "errorClass" : "CANNOT_PARSE_TIMESTAMP",
+  "sqlState" : "22007",
   "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'9'"
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "message" : "Text '9' could not be parsed at index 0"
   }
 }
 
@@ -142,13 +142,13 @@ select to_timestamp('9', 'DDD')
 -- !query schema
 struct<>
 -- !query output
-org.apache.spark.SparkUpgradeException
+org.apache.spark.SparkDateTimeException
 {
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
+  "errorClass" : "CANNOT_PARSE_TIMESTAMP",
+  "sqlState" : "22007",
   "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'9'"
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "message" : "Text '9' could not be parsed at index 0"
   }
 }
 
@@ -158,13 +158,13 @@ select to_timestamp('99', 'DDD')
 -- !query schema
 struct<>
 -- !query output
-org.apache.spark.SparkUpgradeException
+org.apache.spark.SparkDateTimeException
 {
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
+  "errorClass" : "CANNOT_PARSE_TIMESTAMP",
+  "sqlState" : "22007",
   "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'99'"
+    "ansiConfig" : "\"spark.sql.ansi.enabled\"",
+    "message" : "Text '99' could not be parsed at index 0"
   }
 }
 
@@ -284,17 +284,9 @@ org.apache.spark.SparkDateTimeException
 -- !query
 select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD'))
 -- !query schema
-struct<>
+struct<from_csv(2018-366):struct<date:date>>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'2018-366'"
-  }
-}
+{"date":null}
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
 
b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
index 6deb3e0fe54e..fffbb2a4e017 100644
--- 
a/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
+++ 
b/sql/core/src/test/resources/sql-tests/results/datetime-parsing-invalid.sql.out
@@ -11,17 +11,9 @@ long overflow
 -- !query
 select to_timestamp('1', 'yy')
 -- !query schema
-struct<>
+struct<to_timestamp(1, yy):timestamp>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'1'"
-  }
-}
+NULL
 
 
 -- !query
@@ -35,33 +27,17 @@ NULL
 -- !query
 select to_timestamp('123', 'yy')
 -- !query schema
-struct<>
+struct<to_timestamp(123, yy):timestamp>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'123'"
-  }
-}
+NULL
 
 
 -- !query
 select to_timestamp('1', 'yyy')
 -- !query schema
-struct<>
+struct<to_timestamp(1, yyy):timestamp>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'1'"
-  }
-}
+NULL
 
 
 -- !query
@@ -92,17 +68,9 @@ NULL
 -- !query
 select to_timestamp('9', 'DD')
 -- !query schema
-struct<>
+struct<to_timestamp(9, DD):timestamp>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'9'"
-  }
-}
+NULL
 
 
 -- !query
@@ -116,33 +84,17 @@ NULL
 -- !query
 select to_timestamp('9', 'DDD')
 -- !query schema
-struct<>
+struct<to_timestamp(9, DDD):timestamp>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'9'"
-  }
-}
+NULL
 
 
 -- !query
 select to_timestamp('99', 'DDD')
 -- !query schema
-struct<>
+struct<to_timestamp(99, DDD):timestamp>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'99'"
-  }
-}
+NULL
 
 
 -- !query
@@ -204,17 +156,9 @@ NULL
 -- !query
 select from_csv('2018-366', 'date Date', map('dateFormat', 'yyyy-DDD'))
 -- !query schema
-struct<>
+struct<from_csv(2018-366):struct<date:date>>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'2018-366'"
-  }
-}
+{"date":null}
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
index 2232e32077e2..06b8ed88ff88 100644
--- a/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/json-functions.sql.out
@@ -425,17 +425,9 @@ select from_json(
   'd date',
   map('dateFormat', 'MM-dd'))
 -- !query schema
-struct<>
+struct<from_json({"d": "02-29"}):struct<d:date>>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'02-29'"
-  }
-}
+{"d":null}
 
 
 -- !query
@@ -444,17 +436,9 @@ select from_json(
   't timestamp',
   map('timestampFormat', 'MM-dd'))
 -- !query schema
-struct<>
+struct<from_json({"t": "02-29"}):struct<t:timestamp>>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'02-29'"
-  }
-}
+{"t":null}
 
 
 -- !query
diff --git 
a/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out 
b/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out
index b03b7d2e70c7..60531b2bfd50 100644
--- a/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/xml-functions.sql.out
@@ -357,17 +357,9 @@ select from_xml(
   'd date',
   map('dateFormat', 'MM-dd'))
 -- !query schema
-struct<>
+struct<from_xml(<p><d>02-29</d></p>):struct<d:date>>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'02-29'"
-  }
-}
+{"d":null}
 
 
 -- !query
@@ -376,17 +368,9 @@ select from_xml(
   't timestamp',
   map('timestampFormat', 'MM-dd'))
 -- !query schema
-struct<>
+struct<from_xml(<p><t>02-29</t></p>):struct<t:timestamp>>
 -- !query output
-org.apache.spark.SparkUpgradeException
-{
-  "errorClass" : 
"INCONSISTENT_BEHAVIOR_CROSS_VERSION.PARSE_DATETIME_BY_NEW_PARSER",
-  "sqlState" : "42K0B",
-  "messageParameters" : {
-    "config" : "\"spark.sql.legacy.timeParserPolicy\"",
-    "datetime" : "'02-29'"
-  }
-}
+{"t":null}
 
 
 -- !query


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to