Repository: hive
Updated Branches:
  refs/heads/master 2c848ef5d -> 25aaf7db0


http://git-wip-us.apache.org/repos/asf/hive/blob/25aaf7db/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out 
b/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out
index d37a27e..c5d0214 100644
--- a/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out
+++ b/ql/src/test/results/clientpositive/spark/vector_decimal_aggregate.q.out
@@ -457,7 +457,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: decimal_vgby_small
-                  Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                       vectorizationSchemaColumns: [0:cdouble:double, 
1:cdecimal1:decimal(11,5)/DECIMAL_64, 2:cdecimal2:decimal(16,0)/DECIMAL_64, 
3:cint:int, 4:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
@@ -468,7 +468,7 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [1, 2, 3]
-                    Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(cdecimal1), max(cdecimal1), 
min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), 
min(cdecimal2), sum(cdecimal2), count()
                       Group By Vectorization:
@@ -482,7 +482,7 @@ STAGE PLANS:
                       keys: cint (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
_col5, _col6, _col7, _col8, _col9
-                      Statistics: Num rows: 12289 Data size: 346472 Basic 
stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 12289 Data size: 346462 Basic 
stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int)
                         sort order: +
@@ -493,7 +493,7 @@ STAGE PLANS:
                             native: true
                             nativeConditionsMet: 
hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine 
spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS 
true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                             valueColumnNums: [1, 2, 3, 4, 5, 6, 7, 8, 9]
-                        Statistics: Num rows: 12289 Data size: 346472 Basic 
stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 12289 Data size: 346462 Basic 
stats: COMPLETE Column stats: NONE
                         value expressions: _col1 (type: bigint), _col2 (type: 
decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 
(type: bigint), _col6 (type: decimal(16,0)), _col7 (type: decimal(16,0)), _col8 
(type: decimal(26,0)), _col9 (type: bigint)
             Execution mode: vectorized
             Map Vectorization:
@@ -540,14 +540,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9
-                Statistics: Num rows: 6144 Data size: 173221 Basic stats: 
COMPLETE Column stats: NONE
+                Statistics: Num rows: 6144 Data size: 173216 Basic stats: 
COMPLETE Column stats: NONE
                 Filter Operator
                   Filter Vectorization:
                       className: VectorFilterOperator
                       native: true
                       predicateExpression: FilterLongColGreaterLongScalar(col 
9:bigint, val 1)
                   predicate: (_col9 > 1L) (type: boolean)
-                  Statistics: Num rows: 2048 Data size: 57740 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 2048 Data size: 57738 Basic stats: 
COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: int), _col1 (type: bigint), 
_col2 (type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: 
decimal(21,5)), _col5 (type: bigint), _col6 (type: decimal(16,0)), _col7 (type: 
decimal(16,0)), _col8 (type: decimal(26,0))
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
_col5, _col6, _col7, _col8
@@ -555,13 +555,13 @@ STAGE PLANS:
                         className: VectorSelectOperator
                         native: true
                         projectedOutputColumnNums: [0, 1, 2, 3, 4, 5, 6, 7, 8]
-                    Statistics: Num rows: 2048 Data size: 57740 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 2048 Data size: 57738 Basic stats: 
COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
                       File Sink Vectorization:
                           className: VectorFileSinkOperator
                           native: false
-                      Statistics: Num rows: 2048 Data size: 57740 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 2048 Data size: 57738 Basic stats: 
COMPLETE Column stats: NONE
                       table:
                           input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -653,7 +653,7 @@ STAGE PLANS:
             Map Operator Tree:
                 TableScan
                   alias: decimal_vgby_small
-                  Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
                   TableScan Vectorization:
                       native: true
                       vectorizationSchemaColumns: [0:cdouble:double, 
1:cdecimal1:decimal(11,5)/DECIMAL_64, 2:cdecimal2:decimal(16,0)/DECIMAL_64, 
3:cint:int, 4:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
@@ -665,7 +665,7 @@ STAGE PLANS:
                         native: true
                         projectedOutputColumnNums: [3, 1, 2, 6, 9, 7, 12]
                         selectExpressions: CastDecimalToDouble(col 
5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 
1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 6:double, 
DoubleColMultiplyDoubleColumn(col 7:double, col 8:double)(children: 
CastDecimalToDouble(col 5:decimal(11,5))(children: 
ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) 
-> 7:double, CastDecimalToDouble(col 5:decimal(11,5))(children: 
ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) 
-> 8:double) -> 9:double, CastDecimalToDouble(col 10:decimal(16,0))(children: 
ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) 
-> 7:double, DoubleColMultiplyDoubleColumn(col 8:double, col 
11:double)(children: CastDecimalToDouble(col 10:decimal(16,0))(children: 
ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) 
-> 8:double, CastDecimalToDouble(col 10:decimal(16,0))(children: 
ConvertDecimal64ToDecimal(col 2:decima
 l(16,0)/DECIMAL_64) -> 10:decimal(16,0)) -> 11:double) -> 12:double
-                    Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
                     Group By Operator
                       aggregations: count(_col1), max(_col1), min(_col1), 
sum(_col1), sum(_col4), sum(_col3), count(_col2), max(_col2), min(_col2), 
sum(_col2), sum(_col6), sum(_col5), count()
                       Group By Vectorization:
@@ -679,7 +679,7 @@ STAGE PLANS:
                       keys: _col0 (type: int)
                       mode: hash
                       outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
_col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
-                      Statistics: Num rows: 12289 Data size: 346472 Basic 
stats: COMPLETE Column stats: NONE
+                      Statistics: Num rows: 12289 Data size: 346462 Basic 
stats: COMPLETE Column stats: NONE
                       Reduce Output Operator
                         key expressions: _col0 (type: int)
                         sort order: +
@@ -690,7 +690,7 @@ STAGE PLANS:
                             native: true
                             nativeConditionsMet: 
hive.vectorized.execution.reducesink.new.enabled IS true, hive.execution.engine 
spark IN [tez, spark] IS true, No PTF TopN IS true, No DISTINCT columns IS 
true, BinarySortableSerDe for keys IS true, LazyBinarySerDe for values IS true
                             valueColumnNums: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 
11, 12, 13]
-                        Statistics: Num rows: 12289 Data size: 346472 Basic 
stats: COMPLETE Column stats: NONE
+                        Statistics: Num rows: 12289 Data size: 346462 Basic 
stats: COMPLETE Column stats: NONE
                         value expressions: _col1 (type: bigint), _col2 (type: 
decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 
(type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: 
decimal(16,0)), _col9 (type: decimal(16,0)), _col10 (type: decimal(26,0)), 
_col11 (type: double), _col12 (type: double), _col13 (type: bigint)
             Execution mode: vectorized
             Map Vectorization:
@@ -737,14 +737,14 @@ STAGE PLANS:
                 keys: KEY._col0 (type: int)
                 mode: mergepartial
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
-                Statistics: Num rows: 6144 Data size: 173221 Basic stats: 
COMPLETE Column stats: NONE
+                Statistics: Num rows: 6144 Data size: 173216 Basic stats: 
COMPLETE Column stats: NONE
                 Filter Operator
                   Filter Vectorization:
                       className: VectorFilterOperator
                       native: true
                       predicateExpression: FilterLongColGreaterLongScalar(col 
13:bigint, val 1)
                   predicate: (_col13 > 1L) (type: boolean)
-                  Statistics: Num rows: 2048 Data size: 57740 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 2048 Data size: 57738 Basic stats: 
COMPLETE Column stats: NONE
                   Select Operator
                     expressions: _col0 (type: int), _col1 (type: bigint), 
_col2 (type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: 
decimal(21,5)), (CAST( _col4 AS decimal(15,9)) / _col1) (type: decimal(35,29)), 
power(((_col5 - ((_col6 * _col6) / _col1)) / _col1), 0.5) (type: double), 
power(((_col5 - ((_col6 * _col6) / _col1)) / CASE WHEN ((_col1 = 1L)) THEN 
(null) ELSE ((_col1 - 1)) END), 0.5) (type: double), _col7 (type: bigint), 
_col8 (type: decimal(16,0)), _col9 (type: decimal(16,0)), _col10 (type: 
decimal(26,0)), CAST( (CAST( _col10 AS decimal(20,4)) / _col7) AS 
decimal(20,4)) (type: decimal(20,4)), power(((_col11 - ((_col12 * _col12) / 
_col7)) / _col7), 0.5) (type: double), power(((_col11 - ((_col12 * _col12) / 
_col7)) / CASE WHEN ((_col7 = 1L)) THEN (null) ELSE ((_col7 - 1)) END), 0.5) 
(type: double)
                     outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
_col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14
@@ -753,13 +753,13 @@ STAGE PLANS:
                         native: true
                         projectedOutputColumnNums: [0, 1, 2, 3, 4, 16, 17, 18, 
7, 8, 9, 10, 23, 19, 25]
                         selectExpressions: DecimalColDivideDecimalColumn(col 
14:decimal(15,9), col 15:decimal(19,0))(children: CastDecimalToDecimal(col 
4:decimal(21,5)) -> 14:decimal(15,9), CastLongToDecimal(col 1:bigint) -> 
15:decimal(19,0)) -> 16:decimal(35,29), FuncPowerDoubleToDouble(col 
18:double)(children: DoubleColDivideLongColumn(col 17:double, col 
1:bigint)(children: DoubleColSubtractDoubleColumn(col 5:double, col 
18:double)(children: DoubleColDivideLongColumn(col 17:double, col 
1:bigint)(children: DoubleColMultiplyDoubleColumn(col 6:double, col 6:double) 
-> 17:double) -> 18:double) -> 17:double) -> 18:double) -> 17:double, 
FuncPowerDoubleToDouble(col 19:double)(children: DoubleColDivideLongColumn(col 
18:double, col 22:bigint)(children: DoubleColSubtractDoubleColumn(col 5:double, 
col 19:double)(children: DoubleColDivideLongColumn(col 18:double, col 
1:bigint)(children: DoubleColMultiplyDoubleColumn(col 6:double, col 6:double) 
-> 18:double) -> 19:double) -> 18:double, IfExprN
 ullCondExpr(col 20:boolean, null, col 21:bigint)(children: 
LongColEqualLongScalar(col 1:bigint, val 1) -> 20:boolean, 
LongColSubtractLongScalar(col 1:bigint, val 1) -> 21:bigint) -> 22:bigint) -> 
19:double) -> 18:double, CastDecimalToDecimal(col 24:decimal(38,22))(children: 
DecimalColDivideDecimalColumn(col 23:decimal(20,4), col 
15:decimal(19,0))(children: CastDecimalToDecimal(col 10:decimal(26,0)) -> 
23:decimal(20,4), CastLongToDecimal(col 7:bigint) -> 15:decimal(19,0)) -> 
24:decimal(38,22)) -> 23:decimal(20,4), FuncPowerDoubleToDouble(col 
25:double)(children: DoubleColDivideLongColumn(col 19:double, col 
7:bigint)(children: DoubleColSubtractDoubleColumn(col 11:double, col 
25:double)(children: DoubleColDivideLongColumn(col 19:double, col 
7:bigint)(children: DoubleColMultiplyDoubleColumn(col 12:double, col 12:double) 
-> 19:double) -> 25:double) -> 19:double) -> 25:double) -> 19:double, 
FuncPowerDoubleToDouble(col 26:double)(children: DoubleColDivideLongColumn(col 
25:double, col 28:bi
 gint)(children: DoubleColSubtractDoubleColumn(col 11:double, col 
26:double)(children: DoubleColDivideLongColumn(col 25:double, col 
7:bigint)(children: DoubleColMultiplyDoubleColumn(col 12:double, col 12:double) 
-> 25:double) -> 26:double) -> 25:double, IfExprNullCondExpr(col 22:boolean, 
null, col 27:bigint)(children: LongColEqualLongScalar(col 7:bigint, val 1) -> 
22:boolean, LongColSubtractLongScalar(col 7:bigint, val 1) -> 27:bigint) -> 
28:bigint) -> 26:double) -> 25:double
-                    Statistics: Num rows: 2048 Data size: 57740 Basic stats: 
COMPLETE Column stats: NONE
+                    Statistics: Num rows: 2048 Data size: 57738 Basic stats: 
COMPLETE Column stats: NONE
                     File Output Operator
                       compressed: false
                       File Sink Vectorization:
                           className: VectorFileSinkOperator
                           native: false
-                      Statistics: Num rows: 2048 Data size: 57740 Basic stats: 
COMPLETE Column stats: NONE
+                      Statistics: Num rows: 2048 Data size: 57738 Basic stats: 
COMPLETE Column stats: NONE
                       table:
                           input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                           output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -796,7 +796,7 @@ POSTHOOK: Input: default@decimal_vgby_small
 626923679      1024    9723.40270      -9778.95135     10541.05247     
10.29399655273437500000000000000        5742.091453325365       
5744.897264122335       1024    11646   -11712  12641   12.3447 
6877.306686989158       6880.6672084147185
 6981   2       -515.62107      -515.62107      -1031.24214     
-515.62107000000000000000000000000      0.0     0.0     3       6984454 -618    
6983218 2327739.3333    3292794.518850853       4032833.1995089175
 762    1       1531.21941      1531.21941      1531.21941      
1531.21941000000000000000000000000      0.0     NULL    2       6984454 1834    
6986288 3493144.0000    3491310.0       4937457.95244881
-NULL   3072    9318.43514      -4298.15135     5018444.11392   
1633.60811000000000000000000000000      5695.4830839098695      
5696.410309489299       3072    11161   -5148   6010880 1956.6667       
6821.647911041892       6822.758476439734
+NULL   3072    9318.43514      -4298.15135     5018444.11392   NULL    
5695.4830839098695      5696.410309489299       3072    11161   -5148   6010880 
1956.6667       6821.647911041892       6822.758476439734
 PREHOOK: query: SELECT SUM(HASH(*))
 FROM (SELECT cint,
     COUNT(cdecimal1), MAX(cdecimal1), MIN(cdecimal1), SUM(cdecimal1), 
AVG(cdecimal1), STDDEV_POP(cdecimal1), STDDEV_SAMP(cdecimal1),
@@ -815,4 +815,4 @@ FROM (SELECT cint,
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@decimal_vgby_small
 #### A masked pattern was here ####
-96966670826
+96673467876

http://git-wip-us.apache.org/repos/asf/hive/blob/25aaf7db/ql/src/test/results/clientpositive/timestamp_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_1.q.out 
b/ql/src/test/results/clientpositive/timestamp_1.q.out
index d3ca5cf..fab69ec 100644
--- a/ql/src/test/results/clientpositive/timestamp_1.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_1.q.out
@@ -46,7 +46,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -55,7 +55,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_1 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -129,7 +129,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -138,7 +138,7 @@ POSTHOOK: query: select cast(t as smallint) from 
timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -212,7 +212,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -221,7 +221,7 @@ POSTHOOK: query: select cast(t as smallint) from 
timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -295,7 +295,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -304,7 +304,7 @@ POSTHOOK: query: select cast(t as smallint) from 
timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -378,7 +378,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -387,7 +387,7 @@ POSTHOOK: query: select cast(t as smallint) from 
timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -461,7 +461,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_1 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1
@@ -470,7 +470,7 @@ POSTHOOK: query: select cast(t as smallint) from 
timestamp_1 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_1
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_1 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_1

http://git-wip-us.apache.org/repos/asf/hive/blob/25aaf7db/ql/src/test/results/clientpositive/timestamp_2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_2.q.out 
b/ql/src/test/results/clientpositive/timestamp_2.q.out
index f9bfb09..9a05dfe 100644
--- a/ql/src/test/results/clientpositive/timestamp_2.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_2.q.out
@@ -46,7 +46,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -55,7 +55,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_2 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -129,7 +129,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -138,7 +138,7 @@ POSTHOOK: query: select cast(t as smallint) from 
timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -212,7 +212,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -221,7 +221,7 @@ POSTHOOK: query: select cast(t as smallint) from 
timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -295,7 +295,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -304,7 +304,7 @@ POSTHOOK: query: select cast(t as smallint) from 
timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -378,7 +378,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -387,7 +387,7 @@ POSTHOOK: query: select cast(t as smallint) from 
timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -461,7 +461,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_2 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
-77
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2
@@ -470,7 +470,7 @@ POSTHOOK: query: select cast(t as smallint) from 
timestamp_2 limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_2
 #### A masked pattern was here ####
--4787
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_2 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_2

http://git-wip-us.apache.org/repos/asf/hive/blob/25aaf7db/ql/src/test/results/clientpositive/timestamp_3.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamp_3.q.out 
b/ql/src/test/results/clientpositive/timestamp_3.q.out
index 0664abf..6d59269 100644
--- a/ql/src/test/results/clientpositive/timestamp_3.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_3.q.out
@@ -46,7 +46,7 @@ POSTHOOK: query: select cast(t as tinyint) from timestamp_3 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_3
 #### A masked pattern was here ####
-48
+NULL
 PREHOOK: query: select cast(t as smallint) from timestamp_3 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_3
@@ -55,7 +55,7 @@ POSTHOOK: query: select cast(t as smallint) from timestamp_3 
limit 1
 POSTHOOK: type: QUERY
 POSTHOOK: Input: default@timestamp_3
 #### A masked pattern was here ####
--31184
+NULL
 PREHOOK: query: select cast(t as int) from timestamp_3 limit 1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@timestamp_3

http://git-wip-us.apache.org/repos/asf/hive/blob/25aaf7db/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out 
b/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
index 16c80f0..04c534e 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
@@ -379,7 +379,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: decimal_vgby_small
-            Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+            Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
             TableScan Vectorization:
                 native: true
                 vectorizationSchemaColumns: [0:cdouble:double, 
1:cdecimal1:decimal(11,5)/DECIMAL_64, 2:cdecimal2:decimal(16,0)/DECIMAL_64, 
3:cint:int, 4:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
@@ -390,7 +390,7 @@ STAGE PLANS:
                   className: VectorSelectOperator
                   native: true
                   projectedOutputColumnNums: [1, 2, 3]
-              Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+              Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count(cdecimal1), max(cdecimal1), 
min(cdecimal1), sum(cdecimal1), count(cdecimal2), max(cdecimal2), 
min(cdecimal2), sum(cdecimal2), count()
                 Group By Vectorization:
@@ -404,7 +404,7 @@ STAGE PLANS:
                 keys: cint (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9
-                Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+                Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: int)
                   sort order: +
@@ -414,7 +414,7 @@ STAGE PLANS:
                       native: false
                       nativeConditionsMet: 
hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, 
No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, 
LazyBinarySerDe for values IS true
                       nativeConditionsNotMet: hive.execution.engine mr IN 
[tez, spark] IS false
-                  Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
                   value expressions: _col1 (type: bigint), _col2 (type: 
decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 
(type: bigint), _col6 (type: decimal(16,0)), _col7 (type: decimal(16,0)), _col8 
(type: decimal(26,0)), _col9 (type: bigint)
       Execution mode: vectorized
       Map Vectorization:
@@ -442,17 +442,17 @@ STAGE PLANS:
           keys: KEY._col0 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col9
-          Statistics: Num rows: 6144 Data size: 173221 Basic stats: COMPLETE 
Column stats: NONE
+          Statistics: Num rows: 6144 Data size: 173216 Basic stats: COMPLETE 
Column stats: NONE
           Filter Operator
             predicate: (_col9 > 1L) (type: boolean)
-            Statistics: Num rows: 2048 Data size: 57740 Basic stats: COMPLETE 
Column stats: NONE
+            Statistics: Num rows: 2048 Data size: 57738 Basic stats: COMPLETE 
Column stats: NONE
             Select Operator
               expressions: _col0 (type: int), _col1 (type: bigint), _col2 
(type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: 
decimal(21,5)), _col5 (type: bigint), _col6 (type: decimal(16,0)), _col7 (type: 
decimal(16,0)), _col8 (type: decimal(26,0))
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8
-              Statistics: Num rows: 2048 Data size: 57740 Basic stats: 
COMPLETE Column stats: NONE
+              Statistics: Num rows: 2048 Data size: 57738 Basic stats: 
COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 2048 Data size: 57740 Basic stats: 
COMPLETE Column stats: NONE
+                Statistics: Num rows: 2048 Data size: 57738 Basic stats: 
COMPLETE Column stats: NONE
                 table:
                     input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                     output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
@@ -539,7 +539,7 @@ STAGE PLANS:
       Map Operator Tree:
           TableScan
             alias: decimal_vgby_small
-            Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+            Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
             TableScan Vectorization:
                 native: true
                 vectorizationSchemaColumns: [0:cdouble:double, 
1:cdecimal1:decimal(11,5)/DECIMAL_64, 2:cdecimal2:decimal(16,0)/DECIMAL_64, 
3:cint:int, 4:ROW__ID:struct<writeid:bigint,bucketid:int,rowid:bigint>]
@@ -551,7 +551,7 @@ STAGE PLANS:
                   native: true
                   projectedOutputColumnNums: [3, 1, 2, 6, 9, 7, 12]
                   selectExpressions: CastDecimalToDouble(col 
5:decimal(11,5))(children: ConvertDecimal64ToDecimal(col 
1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) -> 6:double, 
DoubleColMultiplyDoubleColumn(col 7:double, col 8:double)(children: 
CastDecimalToDouble(col 5:decimal(11,5))(children: 
ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) 
-> 7:double, CastDecimalToDouble(col 5:decimal(11,5))(children: 
ConvertDecimal64ToDecimal(col 1:decimal(11,5)/DECIMAL_64) -> 5:decimal(11,5)) 
-> 8:double) -> 9:double, CastDecimalToDouble(col 10:decimal(16,0))(children: 
ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) 
-> 7:double, DoubleColMultiplyDoubleColumn(col 8:double, col 
11:double)(children: CastDecimalToDouble(col 10:decimal(16,0))(children: 
ConvertDecimal64ToDecimal(col 2:decimal(16,0)/DECIMAL_64) -> 10:decimal(16,0)) 
-> 8:double, CastDecimalToDouble(col 10:decimal(16,0))(children: 
ConvertDecimal64ToDecimal(col 2:decimal(16,0
 )/DECIMAL_64) -> 10:decimal(16,0)) -> 11:double) -> 12:double
-              Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+              Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
               Group By Operator
                 aggregations: count(_col1), max(_col1), min(_col1), 
sum(_col1), sum(_col4), sum(_col3), count(_col2), max(_col2), min(_col2), 
sum(_col2), sum(_col6), sum(_col5), count()
                 Group By Vectorization:
@@ -565,7 +565,7 @@ STAGE PLANS:
                 keys: _col0 (type: int)
                 mode: hash
                 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13
-                Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+                Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
                 Reduce Output Operator
                   key expressions: _col0 (type: int)
                   sort order: +
@@ -575,7 +575,7 @@ STAGE PLANS:
                       native: false
                       nativeConditionsMet: 
hive.vectorized.execution.reducesink.new.enabled IS true, No PTF TopN IS true, 
No DISTINCT columns IS true, BinarySortableSerDe for keys IS true, 
LazyBinarySerDe for values IS true
                       nativeConditionsNotMet: hive.execution.engine mr IN 
[tez, spark] IS false
-                  Statistics: Num rows: 12289 Data size: 346472 Basic stats: 
COMPLETE Column stats: NONE
+                  Statistics: Num rows: 12289 Data size: 346462 Basic stats: 
COMPLETE Column stats: NONE
                   value expressions: _col1 (type: bigint), _col2 (type: 
decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: decimal(21,5)), _col5 
(type: double), _col6 (type: double), _col7 (type: bigint), _col8 (type: 
decimal(16,0)), _col9 (type: decimal(16,0)), _col10 (type: decimal(26,0)), 
_col11 (type: double), _col12 (type: double), _col13 (type: bigint)
       Execution mode: vectorized
       Map Vectorization:
@@ -603,17 +603,17 @@ STAGE PLANS:
           keys: KEY._col0 (type: int)
           mode: mergepartial
           outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col9, _col10, _col11, _col12, _col13
-          Statistics: Num rows: 6144 Data size: 173221 Basic stats: COMPLETE 
Column stats: NONE
+          Statistics: Num rows: 6144 Data size: 173216 Basic stats: COMPLETE 
Column stats: NONE
           Filter Operator
             predicate: (_col13 > 1L) (type: boolean)
-            Statistics: Num rows: 2048 Data size: 57740 Basic stats: COMPLETE 
Column stats: NONE
+            Statistics: Num rows: 2048 Data size: 57738 Basic stats: COMPLETE 
Column stats: NONE
             Select Operator
               expressions: _col0 (type: int), _col1 (type: bigint), _col2 
(type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: 
decimal(21,5)), (CAST( _col4 AS decimal(15,9)) / _col1) (type: decimal(35,29)), 
power(((_col5 - ((_col6 * _col6) / _col1)) / _col1), 0.5) (type: double), 
power(((_col5 - ((_col6 * _col6) / _col1)) / CASE WHEN ((_col1 = 1L)) THEN 
(null) ELSE ((_col1 - 1)) END), 0.5) (type: double), _col7 (type: bigint), 
_col8 (type: decimal(16,0)), _col9 (type: decimal(16,0)), _col10 (type: 
decimal(26,0)), CAST( (CAST( _col10 AS decimal(20,4)) / _col7) AS 
decimal(20,4)) (type: decimal(20,4)), power(((_col11 - ((_col12 * _col12) / 
_col7)) / _col7), 0.5) (type: double), power(((_col11 - ((_col12 * _col12) / 
_col7)) / CASE WHEN ((_col7 = 1L)) THEN (null) ELSE ((_col7 - 1)) END), 0.5) 
(type: double)
               outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14
-              Statistics: Num rows: 2048 Data size: 57740 Basic stats: 
COMPLETE Column stats: NONE
+              Statistics: Num rows: 2048 Data size: 57738 Basic stats: 
COMPLETE Column stats: NONE
               File Output Operator
                 compressed: false
-                Statistics: Num rows: 2048 Data size: 57740 Basic stats: 
COMPLETE Column stats: NONE
+                Statistics: Num rows: 2048 Data size: 57738 Basic stats: 
COMPLETE Column stats: NONE
                 table:
                     input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
                     output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat

http://git-wip-us.apache.org/repos/asf/hive/blob/25aaf7db/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
----------------------------------------------------------------------
diff --git 
a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java 
b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
index b5220a0..45fa739 100644
--- a/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
+++ b/vector-code-gen/src/org/apache/hadoop/hive/tools/GenVectorCode.java
@@ -1035,8 +1035,6 @@ public class GenVectorCode extends Task {
         "", "", ""},
       {"ColumnUnaryFunc", "CastLongToBooleanVia", "long", "long", 
"MathExpr.toBool", "",
         "", "", ""},
-      {"ColumnUnaryFunc", "CastDateToBooleanVia", "long", "long", 
"MathExpr.toBool", "",
-            "", "", "date"},
 
       // Boolean to long is done with an IdentityExpression
       // Boolean to double is done with standard Long to Double cast

Reply via email to