This is an automated email from the ASF dual-hosted git repository. kgyrtkirk pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push: new f002c10 HIVE-24157: Strict mode to fail on CAST timestamp <-> numeric (#1497) (Zoltan Haindrich reviewed by Jesus Camacho Rodriguez) f002c10 is described below commit f002c103da25ea9826abff90aa84c0a1967c4a50 Author: Zoltan Haindrich <k...@rxd.hu> AuthorDate: Fri Oct 2 15:02:11 2020 +0200 HIVE-24157: Strict mode to fail on CAST timestamp <-> numeric (#1497) (Zoltan Haindrich reviewed by Jesus Camacho Rodriguez) --- .../java/org/apache/hadoop/hive/conf/HiveConf.java | 2 + data/conf/hive-site.xml | 6 ++ data/conf/llap/hive-site.xml | 5 ++ .../ql/udf/TimestampCastRestrictorResolver.java | 71 ++++++++++++++++++++++ .../apache/hadoop/hive/ql/udf/UDFToBoolean.java | 6 ++ .../org/apache/hadoop/hive/ql/udf/UDFToByte.java | 6 ++ .../org/apache/hadoop/hive/ql/udf/UDFToDouble.java | 6 ++ .../org/apache/hadoop/hive/ql/udf/UDFToFloat.java | 5 ++ .../apache/hadoop/hive/ql/udf/UDFToInteger.java | 6 ++ .../org/apache/hadoop/hive/ql/udf/UDFToLong.java | 5 ++ .../org/apache/hadoop/hive/ql/udf/UDFToShort.java | 6 ++ .../hive/ql/udf/generic/GenericUDFBaseCompare.java | 33 ++++++++++ .../hive/ql/udf/generic/GenericUDFTimestamp.java | 15 ++++- .../clientnegative/strict_numeric_to_timestamp.q | 2 + .../clientnegative/strict_numeric_to_timestamp2.q | 3 + .../clientnegative/strict_timestamp_to_numeric.q | 2 + .../clientnegative/strict_timestamp_to_numeric2.q | 3 + .../clientnegative/strict_timestamp_to_numeric3.q | 3 + .../clientnegative/strict_timestamp_to_numeric4.q | 3 + .../strict_numeric_to_timestamp.q.out | 1 + .../strict_numeric_to_timestamp2.q.out | 9 +++ .../strict_timestamp_to_numeric.q.out | 1 + .../strict_timestamp_to_numeric2.q.out | 9 +++ .../strict_timestamp_to_numeric3.q.out | 9 +++ .../strict_timestamp_to_numeric4.q.out | 9 +++ 25 files changed, 223 insertions(+), 3 deletions(-) diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java index 1e6e5ca..b0488db 100644 --- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java +++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java @@ -1774,6 +1774,8 @@ public class HiveConf extends Configuration { HIVE_STRICT_CHECKS_BUCKETING("hive.strict.checks.bucketing", true, "Enabling strict bucketing checks disallows the following:\n" + " Load into bucketed tables."), + HIVE_STRICT_TIMESTAMP_CONVERSION("hive.strict.timestamp.conversion", true, + "Restricts unsafe numeric to timestamp conversions"), HIVE_LOAD_DATA_OWNER("hive.load.data.owner", "", "Set the owner of files loaded using load data in managed tables."), diff --git a/data/conf/hive-site.xml b/data/conf/hive-site.xml index 53a38c8..f0f71f6 100644 --- a/data/conf/hive-site.xml +++ b/data/conf/hive-site.xml @@ -381,4 +381,10 @@ <name>hive.scheduled.queries.executor.enabled</name> <value>false</value> </property> + +<property> + <name>hive.strict.timestamp.conversion</name> + <value>false</value> +</property> + </configuration> diff --git a/data/conf/llap/hive-site.xml b/data/conf/llap/hive-site.xml index fe4337b..7bc3791 100644 --- a/data/conf/llap/hive-site.xml +++ b/data/conf/llap/hive-site.xml @@ -402,4 +402,9 @@ <description>Using property defined in HiveConf.ConfVars to test System property overriding</description> </property> +<property> + <name>hive.strict.timestamp.conversion</name> + <value>false</value> +</property> + </configuration> diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/TimestampCastRestrictorResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/TimestampCastRestrictorResolver.java new file mode 100644 index 0000000..edb0736 --- /dev/null +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/TimestampCastRestrictorResolver.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.udf; + +import java.lang.reflect.Method; +import java.util.List; + +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFArgumentException; +import org.apache.hadoop.hive.ql.exec.UDFMethodResolver; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; +import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo; +import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; + +/** + * Restricts casting timestamp/date types to numeric values. + * + * This Resolver is used in {@link UDF} implementations to enforce strict conversion rules. + */ +public class TimestampCastRestrictorResolver implements UDFMethodResolver { + + private UDFMethodResolver parentResolver; + private boolean strictTsConversion; + + public TimestampCastRestrictorResolver(UDFMethodResolver parentResolver) { + this.parentResolver = parentResolver; + SessionState ss = SessionState.get(); + if (ss != null && ss.getConf().getBoolVar(ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION)) { + strictTsConversion = true; + } + } + + @Override + public Method getEvalMethod(List<TypeInfo> argClasses) throws UDFArgumentException { + if (strictTsConversion) { + TypeInfo arg = argClasses.get(0); + if (arg instanceof PrimitiveTypeInfo) { + PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) arg; + PrimitiveCategory category = primitiveTypeInfo.getPrimitiveCategory(); + PrimitiveGrouping group = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(category); + if (group == PrimitiveGrouping.DATE_GROUP) { + throw new UDFArgumentException( + "Casting DATE/TIMESTAMP types to NUMERIC is prohibited (" + ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION + + ")"); + } + } + } + return parentResolver.getEvalMethod(argClasses); + } + +} diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java index 0957fe0..a2c1e07 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToBoolean.java @@ -22,6 +22,7 @@ package org.apache.hadoop.hive.ql.udf; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFMethodResolver; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToBoolean; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToBoolean; @@ -75,6 +76,11 @@ public class UDFToBoolean extends UDF { public UDFToBoolean() { } + @Override + public UDFMethodResolver getResolver() { + return new TimestampCastRestrictorResolver(super.getResolver()); + } + /** * Convert a void to boolean. This is called for CAST(... AS BOOLEAN) * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java index b335293..ee71590 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToByte.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFMethodResolver; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToLong; @@ -68,6 +69,11 @@ public class UDFToByte extends UDF { public UDFToByte() { } + @Override + public UDFMethodResolver getResolver() { + return new TimestampCastRestrictorResolver(super.getResolver()); + } + /** * Convert from void to a byte. This is called for CAST(... AS TINYINT) * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java index 6f820c6..2320e4a 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToDouble.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFMethodResolver; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToDouble; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToDouble; @@ -66,6 +67,11 @@ public class UDFToDouble extends UDF { public UDFToDouble() { } + @Override + public UDFMethodResolver getResolver() { + return new TimestampCastRestrictorResolver(super.getResolver()); + } + /** * Convert from void to a double. This is called for CAST(... AS DOUBLE) * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java index c7cabb2..e5df0b8 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToFloat.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFMethodResolver; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToFloat; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToFloat; @@ -67,6 +68,10 @@ public class UDFToFloat extends UDF { public UDFToFloat() { } + @Override + public UDFMethodResolver getResolver() { + return new TimestampCastRestrictorResolver(super.getResolver()); + } /** * Convert from void to a float. This is called for CAST(... AS FLOAT) * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java index 936f77d..af86db0 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToInteger.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFMethodResolver; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.gen.CastDoubleToLong; @@ -70,6 +71,11 @@ public class UDFToInteger extends UDF { public UDFToInteger() { } + @Override + public UDFMethodResolver getResolver() { + return new TimestampCastRestrictorResolver(super.getResolver()); + } + /** * Convert from void to an integer. This is called for CAST(... AS INT) * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java index 36d1c4d..eb465d9 100755 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToLong.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFMethodResolver; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToLong; @@ -68,6 +69,10 @@ public class UDFToLong extends UDF { public UDFToLong() { } + @Override + public UDFMethodResolver getResolver() { + return new TimestampCastRestrictorResolver(super.getResolver()); + } /** * Convert from void to a long. This is called for CAST(... AS BIGINT) * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java index 583b626..2ba2e01 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/UDFToShort.java @@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.udf; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDF; +import org.apache.hadoop.hive.ql.exec.UDFMethodResolver; import org.apache.hadoop.hive.ql.exec.vector.VectorizedExpressions; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastDecimalToLong; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToLong; @@ -68,6 +69,11 @@ public class UDFToShort extends UDF { public UDFToShort() { } + @Override + public UDFMethodResolver getResolver() { + return new TimestampCastRestrictorResolver(super.getResolver()); + } + /** * Convert from void to a short. This is called for CAST(... AS SMALLINT) * diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java index 97f4d1e..f8713ac 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseCompare.java @@ -18,22 +18,28 @@ package org.apache.hadoop.hive.ql.udf.generic; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException; import org.apache.hadoop.hive.ql.metadata.HiveException; +import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFUtils.ReturnObjectInspectorResolver; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters.Converter; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.ByteObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.IntObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; @@ -145,12 +151,39 @@ public abstract class GenericUDFBaseCompare extends GenericUDFBaseBinary { converter0 = ObjectInspectorConverters.getConverter(arguments[0], compareOI); converter1 = ObjectInspectorConverters.getConverter(arguments[1], compareOI); + + checkConversionAllowed(arguments[0], compareOI); + checkConversionAllowed(arguments[1], compareOI); } } return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector; } + protected void checkConversionAllowed(ObjectInspector argOI, ObjectInspector compareOI) + throws UDFArgumentException { + if (primitiveGroupOf(argOI) != PrimitiveGrouping.DATE_GROUP) { + return; + } + SessionState ss = SessionState.get(); + if (ss != null && ss.getConf().getBoolVar(ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION)) { + if (primitiveGroupOf(compareOI) == PrimitiveGrouping.NUMERIC_GROUP) { + throw new UDFArgumentException( + "Casting DATE/TIMESTAMP to NUMERIC is prohibited (" + ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION + ")"); + } + } + } + + protected PrimitiveGrouping primitiveGroupOf(ObjectInspector oi) { + if (oi instanceof PrimitiveObjectInspector) { + PrimitiveCategory category = ((PrimitiveObjectInspector) oi).getPrimitiveCategory(); + PrimitiveGrouping group = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(category); + return group; + } else { + return null; + } + } + public Integer compare(DeferredObject[] arguments) throws HiveException { Object o0,o1; o0 = arguments[0].get(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java index 70f57b7..ba4afde 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFTimestamp.java @@ -17,8 +17,6 @@ */ package org.apache.hadoop.hive.ql.udf.generic; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; @@ -31,11 +29,13 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.CastLongToTimestamp; import org.apache.hadoop.hive.ql.exec.vector.expressions.CastStringToTimestamp; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; +import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorConverter.TimestampConverter; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; +import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; /** * @@ -84,6 +84,15 @@ public class GenericUDFTimestamp extends GenericUDF { "The function TIMESTAMP takes only primitive types"); } + if (ss != null && ss.getConf().getBoolVar(ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION)) { + PrimitiveCategory category = argumentOI.getPrimitiveCategory(); + PrimitiveGrouping group = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(category); + if (group == PrimitiveGrouping.NUMERIC_GROUP) { + throw new UDFArgumentException( + "Casting NUMERIC types to TIMESTAMP is prohibited (" + ConfVars.HIVE_STRICT_TIMESTAMP_CONVERSION + ")"); + } + } + tc = new TimestampConverter(argumentOI, PrimitiveObjectInspectorFactory.writableTimestampObjectInspector); tc.setIntToTimestampInSeconds(intToTimestampInSeconds); diff --git a/ql/src/test/queries/clientnegative/strict_numeric_to_timestamp.q b/ql/src/test/queries/clientnegative/strict_numeric_to_timestamp.q new file mode 100644 index 0000000..da8ba71 --- /dev/null +++ b/ql/src/test/queries/clientnegative/strict_numeric_to_timestamp.q @@ -0,0 +1,2 @@ +set hive.strict.timestamp.conversion=true; +select cast(123 as timestamp); diff --git a/ql/src/test/queries/clientnegative/strict_numeric_to_timestamp2.q b/ql/src/test/queries/clientnegative/strict_numeric_to_timestamp2.q new file mode 100644 index 0000000..002412f --- /dev/null +++ b/ql/src/test/queries/clientnegative/strict_numeric_to_timestamp2.q @@ -0,0 +1,3 @@ +set hive.strict.timestamp.conversion=true; +create table t (a integer); +select cast(a as timestamp) from t; diff --git a/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric.q b/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric.q new file mode 100644 index 0000000..23d4420 --- /dev/null +++ b/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric.q @@ -0,0 +1,2 @@ +set hive.strict.timestamp.conversion=true; +select cast(cast('2011-11-11' as timestamp) as integer); diff --git a/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric2.q b/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric2.q new file mode 100644 index 0000000..a4566ff --- /dev/null +++ b/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric2.q @@ -0,0 +1,3 @@ +set hive.strict.timestamp.conversion=true; +create table t(a timestamp); +select cast(a as integer) from t; diff --git a/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric3.q b/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric3.q new file mode 100644 index 0000000..4638135 --- /dev/null +++ b/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric3.q @@ -0,0 +1,3 @@ +set hive.strict.timestamp.conversion=true; +create table t(a struct<t:timestamp>); +select cast(a.t as integer) from t; diff --git a/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric4.q b/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric4.q new file mode 100644 index 0000000..dacdd8a --- /dev/null +++ b/ql/src/test/queries/clientnegative/strict_timestamp_to_numeric4.q @@ -0,0 +1,3 @@ +set hive.strict.timestamp.conversion=true; +create table t(a timestamp); +select 1 from t where a=1000; diff --git a/ql/src/test/results/clientnegative/strict_numeric_to_timestamp.q.out b/ql/src/test/results/clientnegative/strict_numeric_to_timestamp.q.out new file mode 100644 index 0000000..a4c04bf --- /dev/null +++ b/ql/src/test/results/clientnegative/strict_numeric_to_timestamp.q.out @@ -0,0 +1 @@ +FAILED: SemanticException Line 0:-1 Wrong arguments '123': Casting NUMERIC types to TIMESTAMP is prohibited (hive.strict.timestamp.conversion) diff --git a/ql/src/test/results/clientnegative/strict_numeric_to_timestamp2.q.out b/ql/src/test/results/clientnegative/strict_numeric_to_timestamp2.q.out new file mode 100644 index 0000000..199764c --- /dev/null +++ b/ql/src/test/results/clientnegative/strict_numeric_to_timestamp2.q.out @@ -0,0 +1,9 @@ +PREHOOK: query: create table t (a integer) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t +POSTHOOK: query: create table t (a integer) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': Casting NUMERIC types to TIMESTAMP is prohibited (hive.strict.timestamp.conversion) diff --git a/ql/src/test/results/clientnegative/strict_timestamp_to_numeric.q.out b/ql/src/test/results/clientnegative/strict_timestamp_to_numeric.q.out new file mode 100644 index 0000000..fc7191a --- /dev/null +++ b/ql/src/test/results/clientnegative/strict_timestamp_to_numeric.q.out @@ -0,0 +1 @@ +FAILED: SemanticException Line 0:-1 Wrong arguments ''2011-11-11'': Casting DATE/TIMESTAMP types to NUMERIC is prohibited (hive.strict.timestamp.conversion) diff --git a/ql/src/test/results/clientnegative/strict_timestamp_to_numeric2.q.out b/ql/src/test/results/clientnegative/strict_timestamp_to_numeric2.q.out new file mode 100644 index 0000000..0f8b46f --- /dev/null +++ b/ql/src/test/results/clientnegative/strict_timestamp_to_numeric2.q.out @@ -0,0 +1,9 @@ +PREHOOK: query: create table t(a timestamp) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t +POSTHOOK: query: create table t(a timestamp) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t +FAILED: SemanticException Line 0:-1 Wrong arguments 'a': Casting DATE/TIMESTAMP types to NUMERIC is prohibited (hive.strict.timestamp.conversion) diff --git a/ql/src/test/results/clientnegative/strict_timestamp_to_numeric3.q.out b/ql/src/test/results/clientnegative/strict_timestamp_to_numeric3.q.out new file mode 100644 index 0000000..e8d0852 --- /dev/null +++ b/ql/src/test/results/clientnegative/strict_timestamp_to_numeric3.q.out @@ -0,0 +1,9 @@ +PREHOOK: query: create table t(a struct<t:timestamp>) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t +POSTHOOK: query: create table t(a struct<t:timestamp>) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t +FAILED: SemanticException Line 0:-1 Wrong arguments 't': Casting DATE/TIMESTAMP types to NUMERIC is prohibited (hive.strict.timestamp.conversion) diff --git a/ql/src/test/results/clientnegative/strict_timestamp_to_numeric4.q.out b/ql/src/test/results/clientnegative/strict_timestamp_to_numeric4.q.out new file mode 100644 index 0000000..5ca64e6 --- /dev/null +++ b/ql/src/test/results/clientnegative/strict_timestamp_to_numeric4.q.out @@ -0,0 +1,9 @@ +PREHOOK: query: create table t(a timestamp) +PREHOOK: type: CREATETABLE +PREHOOK: Output: database:default +PREHOOK: Output: default@t +POSTHOOK: query: create table t(a timestamp) +POSTHOOK: type: CREATETABLE +POSTHOOK: Output: database:default +POSTHOOK: Output: default@t +FAILED: SemanticException [Error 10014]: Line 2:22 Wrong arguments '1000': Casting DATE/TIMESTAMP to NUMERIC is prohibited (hive.strict.timestamp.conversion)