This is an automated email from the ASF dual-hosted git repository. zabetak pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push: new 1c126d94744 HIVE-27772: UNIX_TIMESTAMP should return NULL when date fields are out of bounds (Simhadri Govindappa reviewed by Stamatis Zampetakis) 1c126d94744 is described below commit 1c126d947448ffc9784a1465306e018ba183a014 Author: SimhadriG <simhadri...@gmail.com> AuthorDate: Thu Oct 5 14:31:12 2023 +0530 HIVE-27772: UNIX_TIMESTAMP should return NULL when date fields are out of bounds (Simhadri Govindappa reviewed by Stamatis Zampetakis) In the case of invalid dates, such as '2001-02-31' (day field exceeds valid range for the given month), the UNIX_TIMESTAMP function behaves unexpectedly. Instead of returning NULL (as it happens in other systems like Spark, MySQL, etc.), it provides a value corresponding to another valid date based on some resolution rules (e.g., Feb 28th or March 1st). The resolution rules and results depend on the underlying formatter implementation used by UNIX_TIMESTAMP. By default, the DATETIME formatter uses the SMART resolution style and the SIMPLE formatter the LENIENT. Both of these styles are able to resolve "invalid" bounds to valid dates. In order to prevent seemingly "invalid" dates to be parsed correctly we have to use the STRICT resolution style. However, we cannot simply switch the formatters to always use the STRICT resolution cause that would break existing applications relying on the existing resolution rules. To address the problem reported here and retain the previous behaviour we opted to make the resolution style configurable by adding a new property. The new property only affects the DATETIME formatter; the SIMPLE formatter is almost deprecated so we don't add new features to it. Close apache/hive#4777 --- .../java/org/apache/hadoop/hive/conf/HiveConf.java | 4 + .../hadoop/hive/conf/TestHiveConfVarsValidate.java | 9 ++ .../ql/udf/generic/InstantDateTimeFormatter.java | 6 +- .../hive/ql/udf/generic/InstantFormatter.java | 16 ++- ...ericUDFToUnixTimestampEvaluateStringString.java | 30 +++-- ...nericUDFToUnixTimestampEvaluateStringString.csv | 140 +++++++++++++-------- 6 files changed, 135 insertions(+), 70 deletions(-) diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 5e9c8425ddf..290cd8a4efa 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -3860,6 +3860,10 @@ public class HiveConf extends Configuration { "is discouraged. It suffers from known bugs that are unlikely to be fixed in subsequent versions of the product." + "Furthermore, using SIMPLE formatter may lead to strange behavior, and unexpected results when combined " + "with SQL functions/operators that are using the new DATETIME formatter."), + HIVE_DATETIME_RESOLVER_STYLE("hive.datetime.formatter.resolver.style", "SMART", + new StringSet("SMART", "STRICT", "LENIENT"), + "The style used by the hive.datetime.formatter (only applicable to DATETIME) to resolve dates amd times." + + "The possible values are STRICT, SMART, and LENIENT and their behavior follows the java.time.format.ResolverStyle API."), // HiveServer2 specific configs HIVE_SERVER2_CLEAR_DANGLING_SCRATCH_DIR("hive.server2.clear.dangling.scratchdir", false, "Clear dangling scratch dir periodically in HS2"), diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfVarsValidate.java b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfVarsValidate.java index 7ac44588c08..42736ddb3d6 100644 --- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfVarsValidate.java +++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConfVarsValidate.java @@ -26,6 +26,7 @@ import java.util.List; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_DATETIME_FORMATTER; +import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_DATETIME_RESOLVER_STYLE; import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVE_EXPLAIN_NODE_VISIT_LIMIT; import static org.junit.Assert.assertEquals; @@ -62,6 +63,14 @@ public class TestHiveConfVarsValidate { list.add(new Object[] { HIVE_DATETIME_FORMATTER, "simple", null }); list.add(new Object[] { HIVE_DATETIME_FORMATTER, "dateTime", null }); list.add(new Object[] { HIVE_DATETIME_FORMATTER, "OTHER", "Invalid value.. expects one of [datetime, simple]" }); + list.add(new Object[] { HIVE_DATETIME_RESOLVER_STYLE, "SMART", null}); + list.add(new Object[] { HIVE_DATETIME_RESOLVER_STYLE, "STRICT", null}); + list.add(new Object[] { HIVE_DATETIME_RESOLVER_STYLE, "LENIENT", null}); + list.add(new Object[] { HIVE_DATETIME_RESOLVER_STYLE, "smart", null}); + list.add(new Object[] { HIVE_DATETIME_RESOLVER_STYLE, "strict", null}); + list.add(new Object[] { HIVE_DATETIME_RESOLVER_STYLE, "lenient", null}); + list.add(new Object[] { HIVE_DATETIME_RESOLVER_STYLE, "OTHER", "Invalid value.. expects one of [smart, strict, " + + "lenient]" }); return list; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/InstantDateTimeFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/InstantDateTimeFormatter.java index 67ca27e5773..d2f5e160d3c 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/InstantDateTimeFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/InstantDateTimeFormatter.java @@ -27,13 +27,15 @@ import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; +import java.time.format.ResolverStyle; import java.util.Objects; final class InstantDateTimeFormatter extends InstantFormatterCache<DateTimeFormatter> { - InstantDateTimeFormatter(final ZoneId zoneId) { + InstantDateTimeFormatter(final ZoneId zoneId, ResolverStyle resolverStyle) { super(zoneId, - s -> new DateTimeFormatterBuilder().parseCaseInsensitive().appendPattern(s).toFormatter().withZone(zoneId)); + s -> new DateTimeFormatterBuilder().parseCaseInsensitive() + .appendPattern(s).toFormatter().withResolverStyle(resolverStyle).withZone(zoneId)); } @Override diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/InstantFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/InstantFormatter.java index 382a10089dd..9828bef07b7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/InstantFormatter.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/InstantFormatter.java @@ -22,6 +22,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import java.time.Instant; import java.time.ZoneId; +import java.time.format.ResolverStyle; /** * Formatter for parsing and printing {@link Instant} objects. @@ -47,7 +48,7 @@ public interface InstantFormatter { */ SIMPLE { @Override - InstantFormatter newFormatter(ZoneId zone) { + InstantFormatter newFormatter(ZoneId zone, ResolverStyle resolverStyle) { return new InstantSimpleDateFormatter(zone); } }, @@ -56,16 +57,17 @@ public interface InstantFormatter { */ DATETIME { @Override - InstantFormatter newFormatter(ZoneId zone) { - return new InstantDateTimeFormatter(zone); + InstantFormatter newFormatter(ZoneId zone, ResolverStyle resolverStyle) { + return new InstantDateTimeFormatter(zone, resolverStyle); } }; /** * Creates a new formatter with the specified zone id. - * @param zone - the zone id + * @param zone - the zone id + * @param resolverStyle - The style is used to control how the input is resolved. * @return a new formatter with the specified zone id. */ - abstract InstantFormatter newFormatter(ZoneId zone); + abstract InstantFormatter newFormatter(ZoneId zone, ResolverStyle resolverStyle); } /** @@ -77,7 +79,9 @@ public interface InstantFormatter { static InstantFormatter ofConfiguration(Configuration conf) { ZoneId zoneId = TimestampTZUtil.parseTimeZone(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_LOCAL_TIME_ZONE)); Type type = Type.valueOf(HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_DATETIME_FORMATTER).toUpperCase()); - return type.newFormatter(zoneId); + ResolverStyle resolverStyle = ResolverStyle.valueOf(HiveConf.getVar(conf, + HiveConf.ConfVars.HIVE_DATETIME_RESOLVER_STYLE).toUpperCase()); + return type.newFormatter(zoneId, resolverStyle); } /** diff --git a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestampEvaluateStringString.java b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestampEvaluateStringString.java index e51b3910458..bda6a171b16 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestampEvaluateStringString.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestampEvaluateStringString.java @@ -51,24 +51,27 @@ import static org.junit.Assert.assertEquals; @RunWith(Parameterized.class) public class TestGenericUDFToUnixTimestampEvaluateStringString { private final GenericUDFToUnixTimeStamp udf = new GenericUDFToUnixTimeStamp(); + private final GenericUDFUnixTimeStamp udfUnixTimeStamp = new GenericUDFUnixTimeStamp(); private final ObjectInspector[] argInspectors = new ObjectInspector[2]; private final String value; private final String pattern; private final String zone; private final String formatter; + private final String resolverStyle; private final LongWritable expectedResult; public TestGenericUDFToUnixTimestampEvaluateStringString(String value, String pattern, String zone, String formatter, - String expectedResult) { + String resolverStyle, String expectedResult) { this.value = value; this.pattern = pattern; this.zone = zone; this.formatter = formatter; + this.resolverStyle = resolverStyle; this.expectedResult = expectedResult.equals("null") ? null : new LongWritable(Long.parseLong(expectedResult)); Arrays.fill(argInspectors, PrimitiveObjectInspectorFactory.writableStringObjectInspector); } - @Parameterized.Parameters(name = "('{0}','{1}'), zone={2}, parserLegacy={3}") + @Parameterized.Parameters(name = "('{0}','{1}'), zone={2}, parserLegacy={3}, resolverStyle={4}") public static Collection<String[]> readInputs() throws IOException, CsvException { CSVParser parser = new CSVParserBuilder().withSeparator(';').withIgnoreQuotations(true).build(); try (CSVReader reader = new CSVReaderBuilder(new InputStreamReader( @@ -79,19 +82,30 @@ public class TestGenericUDFToUnixTimestampEvaluateStringString { } @Test - public void testEvaluate() throws HiveException, InterruptedException { + public void testEvaluateToUnixTimeStamp() throws HiveException, InterruptedException { + testEvaluateWithUDF(udf); + } + + @Test + public void testEvaluateUnixTimeStamp() throws HiveException, InterruptedException { + testEvaluateWithUDF(udfUnixTimeStamp); + } + + private void testEvaluateWithUDF(GenericUDF udfToTest) throws HiveException, InterruptedException { HiveConf conf = new HiveConf(); conf.setVar(HiveConf.ConfVars.HIVE_DATETIME_FORMATTER, formatter); conf.setVar(HiveConf.ConfVars.HIVE_LOCAL_TIME_ZONE, zone); + conf.setVar(HiveConf.ConfVars.HIVE_DATETIME_RESOLVER_STYLE, resolverStyle); SessionState state = SessionState.start(conf); - udf.initialize(argInspectors); - LongWritable result = (LongWritable) udf.evaluate( + udfToTest.initialize(argInspectors); + LongWritable result = (LongWritable) udfToTest.evaluate( new DeferredObject[] { new DeferredJavaObject(new Text(value)), new DeferredJavaObject(new Text(pattern)) }); - assertEquals(udfDisplayWithInputs(), expectedResult, result); + assertEquals(udfDisplayWithInputs(udfToTest), expectedResult, result); SessionState.endStart(state); } - private String udfDisplayWithInputs() { - return udf.getDisplayString(new String[] { value, pattern }) + " sessionZone=" + zone + ", formatter=" + formatter; + private String udfDisplayWithInputs(GenericUDF udf) { + return udf.getDisplayString(new String[] { value, pattern }) + " sessionZone=" + zone + ", formatter=" + formatter + + ", resolver Style=" + resolverStyle; } } diff --git a/ql/src/test/resources/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestampEvaluateStringString.csv b/ql/src/test/resources/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestampEvaluateStringString.csv index ff4ee0725df..b3e34017245 100644 --- a/ql/src/test/resources/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestampEvaluateStringString.csv +++ b/ql/src/test/resources/org/apache/hadoop/hive/ql/udf/generic/TestGenericUDFToUnixTimestampEvaluateStringString.csv @@ -1,54 +1,86 @@ -1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;DATETIME;0 -1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;SIMPLE;0 -1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Atlantic/Azores;DATETIME;3600 -1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Atlantic/Azores;SIMPLE;3600 -1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Europe/Paris;DATETIME;-3600 -1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Europe/Paris;SIMPLE;-3600 -1970-01-01 00:00:00 GMT;yyyy-MM-dd HH:mm:ss z;Etc/GMT;DATETIME;0 -1970-01-01 00:00:00 GMT;yyyy-MM-dd HH:mm:ss z;Etc/GMT;SIMPLE;0 -1970-01-01 00:00:00 GMT-01:00;yyyy-MM-dd HH:mm:ss z;Etc/GMT;DATETIME;3600 -1970-01-01 00:00:00 GMT-01:00;yyyy-MM-dd HH:mm:ss z;Etc/GMT;SIMPLE;3600 -1970-01-01 00:00:00 GMT+01:00;yyyy-MM-dd HH:mm:ss z;Etc/GMT;DATETIME;-3600 -1970-01-01 00:00:00 GMT+01:00;yyyy-MM-dd HH:mm:ss z;Etc/GMT;SIMPLE;-3600 -1970-01-01 00:00:00 GMT-01:00;yyyy-MM-dd HH:mm:ss z;Europe/Paris;DATETIME;3600 -1970-01-01 00:00:00 GMT-01:00;yyyy-MM-dd HH:mm:ss z;Europe/Paris;SIMPLE;3600 -1800-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;DATETIME;-5364662400 -1800-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;SIMPLE;-5364662400 -1800-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Asia/Kolkata;DATETIME;-5364683608 -1800-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Asia/Kolkata;SIMPLE;-5364682200 -Jul 9 2023;MMM dd yyyy;Etc/GMT;DATETIME;null -Jul 9 2023;MMM dd yyyy;Etc/GMT;SIMPLE;1688860800 -Jul 09 2023;MMM dd yyyy;Etc/GMT;DATETIME;1688860800 -Jul 09 2023;MMM dd yyyy;Etc/GMT;SIMPLE;1688860800 -Jul 21 2023;MMM dd yyyy;Etc/GMT;DATETIME;1689897600 -Jul 21 2023;MMM dd yyyy;Etc/GMT;SIMPLE;1689897600 -2023-07-21;YYYY-MM-DD;Etc/GMT;DATETIME;null -2023-07-21;YYYY-MM-DD;Etc/GMT;SIMPLE;1672531200 -Jul 21 2023 09:13;MMM dd yyyy HH:mm;Etc/GMT;DATETIME;1689930780 -Jul 21 2023 09:13;MMM dd yyyy HH:mm;Etc/GMT;SIMPLE;1689930780 -Jul 21 2023 9:13;MMM dd yyyy HH:mm;Etc/GMT;DATETIME;null -Jul 21 2023 9:13;MMM dd yyyy HH:mm;Etc/GMT;SIMPLE;1689930780 -2023-07-21 09:13;yyyy-MM-dd HH:mm;Etc/GMT;DATETIME;1689930780 -2023-07-21 09:13;yyyy-MM-dd HH:mm;Etc/GMT;SIMPLE;1689930780 -2023-07-21 9:13;yyyy-MM-dd HH:mm;Etc/GMT;DATETIME;null -2023-07-21 9:13;yyyy-MM-dd HH:mm;Etc/GMT;SIMPLE;1689930780 -2023-07-21 9:13PM;yyyy-MM-dd h:mma;Etc/GMT;DATETIME;1689973980 -2023-07-21 9:13PM;yyyy-MM-dd h:mma;Etc/GMT;SIMPLE;1689973980 -2023-07-21 09:13AM;yyyy-MM-dd HH:mmAA;Etc/GMT;DATETIME;null -2023-07-21 09:13AM;yyyy-MM-dd HH:mmAA;Etc/GMT;SIMPLE;null -2023-07-21 09:13AM;yyyy-MM-dd HH:mmaa;Etc/GMT;DATETIME;null -2023-07-21 09:13AM;yyyy-MM-dd HH:mmaa;Etc/GMT;SIMPLE;1689930780 -2023-07-21 09:13AM;yyyy-MM-dd HH:mma;Etc/GMT;DATETIME;1689930780 -2023-07-21 09:13AM;yyyy-MM-dd HH:mma;Etc/GMT;SIMPLE;1689930780 -2023-07-21 09:13PM;yyyy-MM-dd HH:mma;Etc/GMT;DATETIME;null -2023-07-21 09:13PM;yyyy-MM-dd HH:mma;Etc/GMT;SIMPLE;1689930780 -2023-07-21 09:13PM;yyyy-MM-dd hh:mmaa;Etc/GMT;DATETIME;null -2023-07-21 09:13PM;yyyy-MM-dd hh:mmaa;Etc/GMT;SIMPLE;1689973980 -2023-07-21 09:13PM;yyyy-MM-dd hh:mma;Etc/GMT;DATETIME;1689973980 -2023-07-21 09:13PM;yyyy-MM-dd hh:mma;Etc/GMT;SIMPLE;1689973980 -2023-07-21 09:13:10;yyyy-MM-dd HH:mm:ss;Etc/GMT;DATETIME;1689930790 -2023-07-21 09:13:10;yyyy-MM-dd HH:mm:ss;Etc/GMT;SIMPLE;1689930790 -2023-07-21 09:13:10.123;yyyy-MM-dd HH:mm:ss.sss;Etc/GMT;DATETIME;null -2023-07-21 09:13:10.123;yyyy-MM-dd HH:mm:ss.sss;Etc/GMT;SIMPLE;1689930903 -2023-07-21 09:13:10.123;yyyy-MM-dd HH:mm:ss.SSS;Etc/GMT;DATETIME;1689930790 -2023-07-21 09:13:10.123;yyyy-MM-dd HH:mm:ss.SSS;Etc/GMT;DATETIME;1689930790 +1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;DATETIME;SMART;0 +1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;SIMPLE;LENIENT;0 +1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Atlantic/Azores;DATETIME;SMART;3600 +1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Atlantic/Azores;SIMPLE;LENIENT;3600 +1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Europe/Paris;DATETIME;SMART;-3600 +1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Europe/Paris;SIMPLE;LENIENT;-3600 +1970-01-01 00:00:00 GMT;yyyy-MM-dd HH:mm:ss z;Etc/GMT;DATETIME;SMART;0 +1970-01-01 00:00:00 GMT;yyyy-MM-dd HH:mm:ss z;Etc/GMT;SIMPLE;LENIENT;0 +1970-01-01 00:00:00 GMT-01:00;yyyy-MM-dd HH:mm:ss z;Etc/GMT;DATETIME;SMART;3600 +1970-01-01 00:00:00 GMT-01:00;yyyy-MM-dd HH:mm:ss z;Etc/GMT;SIMPLE;LENIENT;3600 +1970-01-01 00:00:00 GMT+01:00;yyyy-MM-dd HH:mm:ss z;Etc/GMT;DATETIME;SMART;-3600 +1970-01-01 00:00:00 GMT+01:00;yyyy-MM-dd HH:mm:ss z;Etc/GMT;SIMPLE;LENIENT;-3600 +1970-01-01 00:00:00 GMT-01:00;yyyy-MM-dd HH:mm:ss z;Europe/Paris;DATETIME;SMART;3600 +1970-01-01 00:00:00 GMT-01:00;yyyy-MM-dd HH:mm:ss z;Europe/Paris;SIMPLE;LENIENT;3600 +1800-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;DATETIME;SMART;-5364662400 +1800-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;SIMPLE;LENIENT;-5364662400 +1800-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Asia/Kolkata;DATETIME;SMART;-5364683608 +1800-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Asia/Kolkata;SIMPLE;LENIENT;-5364682200 +Jul 9 2023;MMM dd yyyy;Etc/GMT;DATETIME;SMART;null +Jul 9 2023;MMM dd yyyy;Etc/GMT;SIMPLE;LENIENT;1688860800 +Jul 09 2023;MMM dd yyyy;Etc/GMT;DATETIME;SMART;1688860800 +Jul 09 2023;MMM dd yyyy;Etc/GMT;SIMPLE;LENIENT;1688860800 +Jul 21 2023;MMM dd yyyy;Etc/GMT;DATETIME;SMART;1689897600 +Jul 21 2023;MMM dd yyyy;Etc/GMT;SIMPLE;LENIENT;1689897600 +2023-07-21;YYYY-MM-DD;Etc/GMT;DATETIME;SMART;null +2023-07-21;YYYY-MM-DD;Etc/GMT;SIMPLE;LENIENT;1672531200 +Jul 21 2023 09:13;MMM dd yyyy HH:mm;Etc/GMT;DATETIME;SMART;1689930780 +Jul 21 2023 09:13;MMM dd yyyy HH:mm;Etc/GMT;SIMPLE;LENIENT;1689930780 +Jul 21 2023 9:13;MMM dd yyyy HH:mm;Etc/GMT;DATETIME;SMART;null +Jul 21 2023 9:13;MMM dd yyyy HH:mm;Etc/GMT;SIMPLE;LENIENT;1689930780 +2023-07-21 09:13;yyyy-MM-dd HH:mm;Etc/GMT;DATETIME;SMART;1689930780 +2023-07-21 09:13;yyyy-MM-dd HH:mm;Etc/GMT;SIMPLE;LENIENT;1689930780 +2023-07-21 9:13;yyyy-MM-dd HH:mm;Etc/GMT;DATETIME;SMART;null +2023-07-21 9:13;yyyy-MM-dd HH:mm;Etc/GMT;SIMPLE;LENIENT;1689930780 +2023-07-21 9:13PM;yyyy-MM-dd h:mma;Etc/GMT;DATETIME;SMART;1689973980 +2023-07-21 9:13PM;yyyy-MM-dd h:mma;Etc/GMT;SIMPLE;LENIENT;1689973980 +2023-07-21 09:13AM;yyyy-MM-dd HH:mmAA;Etc/GMT;DATETIME;SMART;null +2023-07-21 09:13AM;yyyy-MM-dd HH:mmAA;Etc/GMT;SIMPLE;LENIENT;null +2023-07-21 09:13AM;yyyy-MM-dd HH:mmaa;Etc/GMT;DATETIME;SMART;null +2023-07-21 09:13AM;yyyy-MM-dd HH:mmaa;Etc/GMT;SIMPLE;LENIENT;1689930780 +2023-07-21 09:13AM;yyyy-MM-dd HH:mma;Etc/GMT;DATETIME;SMART;1689930780 +2023-07-21 09:13AM;yyyy-MM-dd HH:mma;Etc/GMT;SIMPLE;LENIENT;1689930780 +2023-07-21 09:13PM;yyyy-MM-dd HH:mma;Etc/GMT;DATETIME;SMART;null +2023-07-21 09:13PM;yyyy-MM-dd HH:mma;Etc/GMT;SIMPLE;LENIENT;1689930780 +2023-07-21 09:13PM;yyyy-MM-dd hh:mmaa;Etc/GMT;DATETIME;SMART;null +2023-07-21 09:13PM;yyyy-MM-dd hh:mmaa;Etc/GMT;SIMPLE;LENIENT;1689973980 +2023-07-21 09:13PM;yyyy-MM-dd hh:mma;Etc/GMT;DATETIME;SMART;1689973980 +2023-07-21 09:13PM;yyyy-MM-dd hh:mma;Etc/GMT;SIMPLE;LENIENT;1689973980 +2023-07-21 09:13:10;yyyy-MM-dd HH:mm:ss;Etc/GMT;DATETIME;SMART;1689930790 +2023-07-21 09:13:10;yyyy-MM-dd HH:mm:ss;Etc/GMT;SIMPLE;LENIENT;1689930790 +2023-07-21 09:13:10.123;yyyy-MM-dd HH:mm:ss.sss;Etc/GMT;DATETIME;SMART;null +2023-07-21 09:13:10.123;yyyy-MM-dd HH:mm:ss.sss;Etc/GMT;SIMPLE;LENIENT;1689930903 +2023-07-21 09:13:10.123;yyyy-MM-dd HH:mm:ss.SSS;Etc/GMT;DATETIME;SMART;1689930790 +2023-07-21 09:13:10.123;yyyy-MM-dd HH:mm:ss.SSS;Etc/GMT;DATETIME;SMART;1689930790 +1970-01-01 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;DATETIME;STRICT;null +1970-01-01 00:00:00;uuuu-MM-dd HH:mm:ss;Etc/GMT;DATETIME;STRICT;0 +1970-01-01 00:00:00;uuuu-MM-dd HH:mm:ss;Atlantic/Azores;DATETIME;STRICT;3600 +1970-01-01 00:00:00;uuuu-MM-dd HH:mm:ss;Europe/Paris;DATETIME;STRICT;-3600 +1970-01-01 00:00:00 GMT;uuuu-MM-dd HH:mm:ss z;Etc/GMT;DATETIME;STRICT;0 +1970-01-01 00:00:00 GMT+01:00;uuuu-MM-dd HH:mm:ss z;Etc/GMT;DATETIME;STRICT;-3600 +1970-01-01 00:00:00 GMT-01:00;uuuu-MM-dd HH:mm:ss z;Europe/Paris;DATETIME;STRICT;3600 +1800-01-01 00:00:00;uuuu-MM-dd HH:mm:ss;Etc/GMT;DATETIME;STRICT;-5364662400 +Jul 9 2023;MMM dd yyyy;Etc/GMT;SIMPLE;LENIENT;1688860800 +Jul 9 2023;MMM dd yyyy;Etc/GMT;DATETIME;SMART;null +Jul 9 2023;MMM dd uuuu;Etc/GMT;DATETIME;STRICT;null +Jul 09 2023;MMM dd uuuu;Etc/GMT;DATETIME;STRICT;1688860800 +Jul 21 2023;MMM dd uuuu;Etc/GMT;DATETIME;STRICT;1689897600 +2001-02-28 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;SIMPLE;LENIENT;983318400 +2001-02-28 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;DATETIME;SMART;983318400 +2001-02-28 00:00:00;uuuu-MM-dd HH:mm:ss;Etc/GMT;DATETIME;STRICT;983318400 +2001-02-29;yyyy-MM-dd;Etc/GMT;SIMPLE;LENIENT;983404800 +2001-02-29;yyyy-MM-dd;Etc/GMT;DATETIME;LENIENT;983404800 +2001-02-29;yyyy-MM-dd;Etc/GMT;DATETIME;SMART;983318400 +2001-02-29;uuuu-MM-dd;Etc/GMT;DATETIME;STRICT;null +2001-02-31;yyyy-MM-dd;Etc/GMT;SIMPLE;LENIENT;983577600 +2001-02-31;yyyy-MM-dd;Etc/GMT;DATETIME;LENIENT;983577600 +2001-02-31;yyyy-MM-dd;Etc/GMT;DATETIME;SMART;983318400 +2001-02-31;uuuu-MM-dd;Etc/GMT;DATETIME;STRICT;null +Apr 31 2001;MMM dd yyyy;Etc/GMT;SIMPLE;LENIENT;988675200 +Apr 31 2001;MMM dd uuuu;Etc/GMT;DATETIME;LENIENT;988675200 +Apr 31 2001;MMM dd uuuu;Etc/GMT;DATETIME;SMART;988588800 +Apr 31 2001;MMM dd uuuu;Etc/GMT;DATETIME;STRICT;null +2001-06-31 00:00:00;yyyy-MM-dd HH:mm:ss;Etc/GMT;SIMPLE;LENIENT;993945600 +2001-06-31 00:00:00;uuuu-MM-dd HH:mm:ss;Etc/GMT;DATETIME;LENIENT;993945600 +2001-06-31 00:00:00;uuuu-MM-dd HH:mm:ss;Etc/GMT;DATETIME;SMART;993859200 +2001-06-31 00:00:00;uuuu-MM-dd HH:mm:ss;Etc/GMT;DATETIME;STRICT;null