hive git commit: HIVE-18817 - ArrayIndexOutOfBounds exception during read of ACID table. (Eugene Koifman, Jason Dere, Prasanth Jayachandran, reviewed by Jason Dere)

2018-03-02 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master fffbec065 -> 61e21d6c3


HIVE-18817 - ArrayIndexOutOfBounds exception during read of ACID table. (Eugene 
Koifman, Jason Dere, Prasanth Jayachandran, reviewed by Jason Dere)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/61e21d6c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/61e21d6c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/61e21d6c

Branch: refs/heads/master
Commit: 61e21d6c3039087b83609577325175b1f603b50f
Parents: fffbec0
Author: Eugene Koifman 
Authored: Fri Mar 2 09:11:01 2018 -0800
Committer: Eugene Koifman 
Committed: Fri Mar 2 09:11:01 2018 -0800

--
 .../hadoop/hive/ql/io/orc/OrcRecordUpdater.java |  17 +++
 .../hive/ql/io/orc/TestInputOutputFormat.java   | 123 ++-
 .../results/clientpositive/acid_nullscan.q.out  |   8 +-
 .../clientpositive/autoColumnStats_4.q.out  |   4 +-
 .../llap/acid_bucket_pruning.q.out  |   4 +-
 .../test/results/clientpositive/row__id.q.out   |  18 +--
 6 files changed, 155 insertions(+), 19 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/61e21d6c/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
index 970af0e..d850062 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcRecordUpdater.java
@@ -47,6 +47,7 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.orc.OrcConf;
 import org.apache.orc.impl.AcidStats;
 import org.apache.orc.impl.OrcAcidUtils;
+import org.apache.orc.impl.WriterImpl;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -559,6 +560,17 @@ public class OrcRecordUpdater implements RecordUpdater {
 int lastBucket;
 long lastRowId;
 AcidStats acidStats = new AcidStats();
+/**
+ *  {@link #preStripeWrite(OrcFile.WriterContext)} is normally called by 
the
+ *  {@link org.apache.orc.MemoryManager} except on close().
+ *  {@link org.apache.orc.impl.WriterImpl#close()} calls preFooterWrite() 
before it calls
+ *  {@link WriterImpl#flushStripe()} which causes the {@link 
#ACID_KEY_INDEX_NAME} index to
+ *  have the last entry missing.  It should be also fixed in ORC but that 
requires upgrading
+ *  the ORC jars to have effect.
+ *
+ *  This is used to decide if we need to make preStripeWrite() call here.
+ */
+private long numKeysCurrentStripe = 0;
 
 KeyIndexBuilder(String name) {
   this.builderName = name;
@@ -572,11 +584,15 @@ public class OrcRecordUpdater implements RecordUpdater {
   lastKey.append(',');
   lastKey.append(lastRowId);
   lastKey.append(';');
+  numKeysCurrentStripe = 0;
 }
 
 @Override
 public void preFooterWrite(OrcFile.WriterContext context
) throws IOException {
+  if(numKeysCurrentStripe > 0) {
+preStripeWrite(context);
+  }
   context.getWriter().addUserMetadata(ACID_KEY_INDEX_NAME,
   UTF8.encode(lastKey.toString()));
   context.getWriter().addUserMetadata(OrcAcidUtils.ACID_STATS,
@@ -600,6 +616,7 @@ public class OrcRecordUpdater implements RecordUpdater {
   lastTransaction = transaction;
   lastBucket = bucket;
   lastRowId = rowId;
+  numKeysCurrentStripe++;
 }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/61e21d6c/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
--
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java 
b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
index 0ac29fa..073b072 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java
@@ -48,6 +48,7 @@ import org.apache.commons.codec.binary.Base64;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.*;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hive.common.ValidTxnList;
 import org.apache.hadoop.hive.common.ValidWriteIdList;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -68,6 +69,7 @@ import 
org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatchCtx;
 import org.apache.hadoop.hive.ql.io.AcidInputFormat;
 import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
 import org.apache.hadoop.hive.ql.io.AcidUtils;
+import org.apache.hadoop.hive.ql.io.BucketCodec;
 

hive git commit: HIVE-18826: fix TestEncryptedHDFSCliDriver.testCliDriver[encryption_move_tbl] (Jesus Camacho Rodriguez, reviewed by Zoltan Haindrich)

2018-03-02 Thread jcamacho
Repository: hive
Updated Branches:
  refs/heads/master 61e21d6c3 -> fc8a45bb0


HIVE-18826: fix TestEncryptedHDFSCliDriver.testCliDriver[encryption_move_tbl] 
(Jesus Camacho Rodriguez, reviewed by Zoltan Haindrich)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/fc8a45bb
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/fc8a45bb
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/fc8a45bb

Branch: refs/heads/master
Commit: fc8a45bb0ab736a7a9f5c7ea921832484b9287f9
Parents: 61e21d6
Author: Jesus Camacho Rodriguez 
Authored: Fri Mar 2 09:04:35 2018 -0800
Committer: Jesus Camacho Rodriguez 
Committed: Fri Mar 2 09:56:21 2018 -0800

--
 .../src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java   | 8 +---
 1 file changed, 5 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/fc8a45bb/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
--
diff --git a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java 
b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
index 45602a2..255bd5f 100644
--- a/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
+++ b/itests/util/src/main/java/org/apache/hadoop/hive/ql/QTestUtil.java
@@ -1149,15 +1149,17 @@ public class QTestUtil {
   createRemoteDirs();
 }
 
-// Create views registry
-HiveMaterializedViewsRegistry.get().init();
-
 testWarehouse = conf.getVar(HiveConf.ConfVars.METASTOREWAREHOUSE);
 String execEngine = conf.get("hive.execution.engine");
 conf.set("hive.execution.engine", "mr");
 SessionState.start(conf);
 conf.set("hive.execution.engine", execEngine);
 db = Hive.get(conf);
+// Create views registry
+String registryImpl = 
db.getConf().get("hive.server2.materializedviews.registry.impl");
+db.getConf().set("hive.server2.materializedviews.registry.impl", "DUMMY");
+HiveMaterializedViewsRegistry.get().init(db);
+db.getConf().set("hive.server2.materializedviews.registry.impl", 
registryImpl);
 drv = DriverFactory.newDriver(conf);
 pd = new ParseDriver();
 sem = new SemanticAnalyzer(queryState);



[01/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
Repository: hive
Updated Branches:
  refs/heads/master fc8a45bb0 -> 17441e485


http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/vectorization_7.q.out
--
diff --git a/ql/src/test/results/clientpositive/vectorization_7.q.out 
b/ql/src/test/results/clientpositive/vectorization_7.q.out
index 51d2b45..fcf7eec 100644
--- a/ql/src/test/results/clientpositive/vectorization_7.q.out
+++ b/ql/src/test/results/clientpositive/vectorization_7.q.out
@@ -75,7 +75,7 @@ STAGE PLANS:
   className: VectorFilterOperator
   native: true
   predicateExpression: FilterExprAndExpr(children: 
FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), 
FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, 
val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), 
FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), 
FilterStringColLikeStringScalar(col 7:string, pattern ss)), 
FilterExprOrExpr(children: FilterDoubleScalarLessDoubleColumn(val 98.0, col 
5:double), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 
13:double, val -15.0)(children: CastTimestampToDouble(col 9:timestamp) -> 
13:double), FilterDoubleScalarGreaterEqualDoubleColumn(val 3569.0, col 
5:double
-  predicate: (((98.0 < cdouble) or ((UDFToDouble(ctimestamp2) 
> -15.0) and (3569.0 >= cdouble))) and ((UDFToDouble(ctimestamp1) <= 0.0) or 
(UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and (ctinyint <> 0)) 
(type: boolean)
+  predicate: (((98.0D < cdouble) or ((UDFToDouble(ctimestamp2) 
> -15.0D) and (3569.0D >= cdouble))) and ((UDFToDouble(ctimestamp1) <= 0.0D) or 
(UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and (ctinyint <> 0Y)) 
(type: boolean)
   Statistics: Num rows: 5461 Data size: 1174134 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
 expressions: cboolean1 (type: boolean), cbigint (type: 
bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 
(type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), 
(UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), 
(- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), 
(cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % 
UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- 
ctinyint) % ctinyint) (type: tinyint)
@@ -295,7 +295,7 @@ STAGE PLANS:
   className: VectorFilterOperator
   native: true
   predicateExpression: FilterExprAndExpr(children: 
FilterLongColNotEqualLongScalar(col 0:tinyint, val 0), 
FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 13:double, 
val 0.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), 
FilterLongColEqualLongColumn(col 0:int, col 2:int)(children: col 0:tinyint), 
FilterStringColLikeStringScalar(col 7:string, pattern ss)), 
FilterExprOrExpr(children: FilterDoubleScalarLessDoubleColumn(val 98.0, col 
5:double), FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 
13:double, val 7.6855)(children: CastTimestampToDouble(col 
9:timestamp) -> 13:double), FilterDoubleScalarGreaterEqualDoubleColumn(val 
3569.0, col 5:double
-  predicate: (((98.0 < cdouble) or ((UDFToDouble(ctimestamp2) 
> 7.6855) and (3569.0 >= cdouble))) and ((UDFToDouble(ctimestamp1) 
<= 0.0) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and 
(ctinyint <> 0)) (type: boolean)
+  predicate: (((98.0D < cdouble) or ((UDFToDouble(ctimestamp2) 
> 7.6855D) and (3569.0D >= cdouble))) and 
((UDFToDouble(ctimestamp1) <= 0.0D) or (UDFToInteger(ctinyint) = cint) or 
(cstring2 like 'ss')) and (ctinyint <> 0Y)) (type: boolean)
   Statistics: Num rows: 5461 Data size: 1174134 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
 expressions: cboolean1 (type: boolean), cbigint (type: 
bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 
(type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), 
(UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), 
(- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), 
(cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % 
UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- 
ctinyint) % ctinyint) (type: tinyint)

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/vectorization_8.q.out
--
diff --git a/ql/src/test/results/clientpositive/vectorization_8.q.out 
b/ql/src/test/results/clientpositi

[23/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/having2.q.out
--
diff --git a/ql/src/test/results/clientpositive/having2.q.out 
b/ql/src/test/results/clientpositive/having2.q.out
index 67f8af8..12fae67 100644
--- a/ql/src/test/results/clientpositive/having2.q.out
+++ b/ql/src/test/results/clientpositive/having2.q.out
@@ -155,7 +155,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
   Filter Operator
-predicate: ((_col1 <= 4074689.00041) and (_col3 <= 822)) 
(type: boolean)
+predicate: ((_col1 <= 4074689.00041D) and (_col3 <= 822L)) 
(type: boolean)
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
 Select Operator
   expressions: _col0 (type: string), _col1 (type: double), _col2 
(type: double)
@@ -222,7 +222,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
   Filter Operator
-predicate: ((_col1 <= 4074689.00041) and (_col3 <= 822)) 
(type: boolean)
+predicate: ((_col1 <= 4074689.00041D) and (_col3 <= 822L)) 
(type: boolean)
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
 Select Operator
   expressions: _col0 (type: string), _col1 (type: double), _col2 
(type: double)
@@ -341,7 +341,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 275 Data size: 2921 Basic stats: PARTIAL 
Column stats: NONE
   Filter Operator
-predicate: ((_col1 <= 4074689.00041) and (_col2 <= 822.0) and 
(_col3 > 4)) (type: boolean)
+predicate: ((_col1 <= 4074689.00041D) and (_col2 <= 822.0D) 
and (_col3 > 4L)) (type: boolean)
 Statistics: Num rows: 10 Data size: 106 Basic stats: PARTIAL 
Column stats: NONE
 Select Operator
   expressions: _col0 (type: string)
@@ -464,7 +464,7 @@ STAGE PLANS:
 outputColumnNames: _col1, _col2, _col3, _col4
 Statistics: Num rows: 275 Data size: 2921 Basic stats: PARTIAL 
Column stats: NONE
 Filter Operator
-  predicate: ((_col2 <= 4074689.00041) and (_col3 <= 822.0) 
and (_col4 > 4)) (type: boolean)
+  predicate: ((_col2 <= 4074689.00041D) and (_col3 <= 822.0D) 
and (_col4 > 4L)) (type: boolean)
   Statistics: Num rows: 10 Data size: 106 Basic stats: PARTIAL 
Column stats: NONE
   Select Operator
 expressions: _col1 (type: string)
@@ -587,7 +587,7 @@ STAGE PLANS:
 outputColumnNames: _col1, _col2, _col3, _col4
 Statistics: Num rows: 275 Data size: 2921 Basic stats: PARTIAL 
Column stats: NONE
 Filter Operator
-  predicate: ((_col2 <= 4074689.00041) and (_col3 <= 822.0) 
and (_col4 > 4)) (type: boolean)
+  predicate: ((_col2 <= 4074689.00041D) and (_col3 <= 822.0D) 
and (_col4 > 4L)) (type: boolean)
   Statistics: Num rows: 10 Data size: 106 Basic stats: PARTIAL 
Column stats: NONE
   Select Operator
 expressions: _col1 (type: string), _col1 (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/implicit_cast1.q.out
--
diff --git a/ql/src/test/results/clientpositive/implicit_cast1.q.out 
b/ql/src/test/results/clientpositive/implicit_cast1.q.out
index 6e1706a..1e62fe3 100644
--- a/ql/src/test/results/clientpositive/implicit_cast1.q.out
+++ b/ql/src/test/results/clientpositive/implicit_cast1.q.out
@@ -28,7 +28,7 @@ STAGE PLANS:
 alias: implicit_test1
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
 Filter Operator
-  predicate: (a <> 0) (type: boolean)
+  predicate: (a <> 0L) (type: boolean)
   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
   Select Operator
 expressions: a (type: bigint), b (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out 
b/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
index 80e1455..b1250d3 100644
--- a/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
+++ b/ql/src/test/results/clientpositive/infer_bucket_sort_dyn_part.q.out
@@ -452,7 +452,7 @

[20/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/multi_count_distinct_null.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/multi_count_distinct_null.q.out 
b/ql/src/test/results/clientpositive/llap/multi_count_distinct_null.q.out
index 66bf74f..c210c4c 100644
--- a/ql/src/test/results/clientpositive/llap/multi_count_distinct_null.q.out
+++ b/ql/src/test/results/clientpositive/llap/multi_count_distinct_null.q.out
@@ -48,7 +48,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2
 Statistics: Num rows: 12 Data size: 1023 Basic stats: 
COMPLETE Column stats: COMPLETE
 Group By Operator
-  keys: _col0 (type: int), _col1 (type: varchar(10)), 
_col2 (type: int), 0 (type: bigint)
+  keys: _col0 (type: int), _col1 (type: varchar(10)), 
_col2 (type: int), 0L (type: bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 18 Data size: 1628 Basic stats: 
COMPLETE Column stats: COMPLETE
@@ -68,7 +68,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col3
 Statistics: Num rows: 18 Data size: 1628 Basic stats: COMPLETE 
Column stats: COMPLETE
 Select Operator
-  expressions: CASE WHEN (((_col3 = 3) and _col0 is not null)) 
THEN (1) ELSE (null) END (type: int), CASE WHEN (((_col3 = 5) and _col1 is not 
null)) THEN (1) ELSE (null) END (type: int), CASE WHEN (((_col3 = 6) and _col2 
is not null)) THEN (1) ELSE (null) END (type: int)
+  expressions: CASE WHEN (((_col3 = 3L) and _col0 is not 
null)) THEN (1) ELSE (null) END (type: int), CASE WHEN (((_col3 = 5L) and _col1 
is not null)) THEN (1) ELSE (null) END (type: int), CASE WHEN (((_col3 = 6L) 
and _col2 is not null)) THEN (1) ELSE (null) END (type: int)
   outputColumnNames: _col0, _col1, _col2
   Statistics: Num rows: 18 Data size: 1628 Basic stats: 
COMPLETE Column stats: COMPLETE
   Group By Operator
@@ -189,7 +189,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2
 Statistics: Num rows: 12 Data size: 1023 Basic stats: 
COMPLETE Column stats: COMPLETE
 Group By Operator
-  keys: _col0 (type: varchar(10)), _col1 (type: int), 
_col2 (type: int), 0 (type: bigint)
+  keys: _col0 (type: varchar(10)), _col1 (type: int), 
_col2 (type: int), 0L (type: bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 30 Data size: 2654 Basic stats: 
COMPLETE Column stats: COMPLETE
@@ -209,7 +209,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col3
 Statistics: Num rows: 30 Data size: 2654 Basic stats: COMPLETE 
Column stats: COMPLETE
 Select Operator
-  expressions: CASE WHEN (((_col3 = 3) and _col0 is not null)) 
THEN (1) ELSE (null) END (type: int), CASE WHEN (((_col3 = 5) and _col1 is not 
null)) THEN (1) ELSE (null) END (type: int), CASE WHEN (((_col3 = 6) and _col2 
is not null)) THEN (1) ELSE (null) END (type: int), CASE WHEN ((_col3 = 4)) 
THEN (1) ELSE (null) END (type: int), CASE WHEN ((_col3 = 0)) THEN (1) ELSE 
(null) END (type: int)
+  expressions: CASE WHEN (((_col3 = 3L) and _col0 is not 
null)) THEN (1) ELSE (null) END (type: int), CASE WHEN (((_col3 = 5L) and _col1 
is not null)) THEN (1) ELSE (null) END (type: int), CASE WHEN (((_col3 = 6L) 
and _col2 is not null)) THEN (1) ELSE (null) END (type: int), CASE WHEN ((_col3 
= 4L)) THEN (1) ELSE (null) END (type: int), CASE WHEN ((_col3 = 0L)) THEN (1) 
ELSE (null) END (type: int)
   outputColumnNames: _col0, _col1, _col2, _col3, _col4
   Statistics: Num rows: 30 Data size: 2654 Basic stats: 
COMPLETE Column stats: COMPLETE
   Group By Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out 
b/ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out
index ec3c286..cce6bc3 100644
--- a/ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out
+++ b/ql/src/test/results/clientpositive/llap/offset_limit_ppd_optimizer.q.out
@@ -198,7 +198,7 @@ STAGE PLANS:
   alias: src
   Statistics: Num rows: 500 Data size: 89000 Basic stats: 
COMPLETE Column stats: COMPLETE
   Select Operator
-expressi

[02/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
--
diff --git a/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out 
b/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
index 2f7ce31..8244871 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_aggregate.q.out
@@ -127,7 +127,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col9
   Statistics: Num rows: 6144 Data size: 1082441 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (_col9 > 1) (type: boolean)
+predicate: (_col9 > 1L) (type: boolean)
 Statistics: Num rows: 2048 Data size: 360813 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: _col0 (type: int), _col1 (type: bigint), _col2 
(type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: 
decimal(30,10)), _col5 (type: bigint), _col6 (type: decimal(23,14)), _col7 
(type: decimal(23,14)), _col8 (type: decimal(33,14))
@@ -268,7 +268,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15
   Statistics: Num rows: 6144 Data size: 1082441 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (_col15 > 1) (type: boolean)
+predicate: (_col15 > 1L) (type: boolean)
 Statistics: Num rows: 2048 Data size: 360813 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: _col0 (type: int), _col1 (type: bigint), _col2 
(type: decimal(20,10)), _col3 (type: decimal(20,10)), _col4 (type: 
decimal(30,10)), _col5 (type: decimal(24,14)), _col6 (type: double), _col7 
(type: double), _col8 (type: bigint), _col9 (type: decimal(23,14)), _col10 
(type: decimal(23,14)), _col11 (type: decimal(33,14)), _col12 (type: 
decimal(27,18)), _col13 (type: double), _col14 (type: double)
@@ -443,7 +443,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col9
   Statistics: Num rows: 6144 Data size: 173221 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (_col9 > 1) (type: boolean)
+predicate: (_col9 > 1L) (type: boolean)
 Statistics: Num rows: 2048 Data size: 57740 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: _col0 (type: int), _col1 (type: bigint), _col2 
(type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: 
decimal(21,5)), _col5 (type: bigint), _col6 (type: decimal(16,0)), _col7 (type: 
decimal(16,0)), _col8 (type: decimal(26,0))
@@ -603,7 +603,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15
   Statistics: Num rows: 6144 Data size: 173221 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (_col15 > 1) (type: boolean)
+predicate: (_col15 > 1L) (type: boolean)
 Statistics: Num rows: 2048 Data size: 57740 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: _col0 (type: int), _col1 (type: bigint), _col2 
(type: decimal(11,5)), _col3 (type: decimal(11,5)), _col4 (type: 
decimal(21,5)), _col5 (type: decimal(15,9)), _col6 (type: double), _col7 (type: 
double), _col8 (type: bigint), _col9 (type: decimal(16,0)), _col10 (type: 
decimal(16,0)), _col11 (type: decimal(26,0)), _col12 (type: decimal(20,4)), 
_col13 (type: double), _col14 (type: double)

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out
--
diff --git a/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out 
b/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out
index 0ee65eb..b8581e4 100644
--- a/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out
+++ b/ql/src/test/results/clientpositive/vector_decimal_math_funcs.q.out
@@ -121,10 +121,10 @@ STAGE PLANS:
   className: VectorFilterOperator
   native: true
   predicateExpression: FilterExprAndExpr(children: 
FilterLongColEqualLongScalar(col 5:bigint, val 0)(children: 
LongColModuloLongScalar(col 0:bigint, val 500) -> 5:bigint), 
FilterDoubleColGreaterEqualDoubleScalar(col 7:double, val -1.0)(children: 
FuncSinDoubleToDouble(col 6:double)(children: CastDecimalToDouble(col 
2:decimal(20,10)) -> 6:double) -> 7:double))
- 

[06/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/spark/subquery_notin.q.out
--
diff --git a/ql/src/test/results/clientpositive/spark/subquery_notin.q.out 
b/ql/src/test/results/clientpositive/spark/subquery_notin.q.out
index e2f26a9..82a1304 100644
--- a/ql/src/test/results/clientpositive/spark/subquery_notin.q.out
+++ b/ql/src/test/results/clientpositive/spark/subquery_notin.q.out
@@ -104,7 +104,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col3, _col5
 Statistics: Num rows: 550 Data size: 15193 Basic stats: 
COMPLETE Column stats: NONE
 Filter Operator
-  predicate: ((_col2 = 0) or (_col5 is null and _col0 is not 
null and (_col3 >= _col2))) (type: boolean)
+  predicate: ((_col2 = 0L) or (_col5 is null and _col0 is not 
null and (_col3 >= _col2))) (type: boolean)
   Statistics: Num rows: 366 Data size: 10110 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
 expressions: _col0 (type: string), _col1 (type: string)
@@ -377,7 +377,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col4, _col5, _col8
 Statistics: Num rows: 30 Data size: 3807 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: (not CASE WHEN ((_col4 = 0)) THEN (false) WHEN 
(_col4 is null) THEN (false) WHEN (_col8 is not null) THEN (true) WHEN (_col0 
is null) THEN (null) WHEN ((_col5 < _col4)) THEN (true) ELSE (false) END) 
(type: boolean)
+  predicate: (not CASE WHEN ((_col4 = 0L)) THEN (false) WHEN 
(_col4 is null) THEN (false) WHEN (_col8 is not null) THEN (true) WHEN (_col0 
is null) THEN (null) WHEN ((_col5 < _col4)) THEN (true) ELSE (false) END) 
(type: boolean)
   Statistics: Num rows: 15 Data size: 1903 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
 expressions: _col1 (type: string), _col0 (type: string), 
_col2 (type: int)
@@ -645,7 +645,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col3, _col5
 Statistics: Num rows: 28 Data size: 5892 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((_col2 = 0) or (_col5 is null and _col1 is not 
null and (_col3 >= _col2))) (type: boolean)
+  predicate: ((_col2 = 0L) or (_col5 is null and _col1 is not 
null and (_col3 >= _col2))) (type: boolean)
   Statistics: Num rows: 18 Data size: 3787 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
 expressions: _col0 (type: string), _col1 (type: int)
@@ -1036,7 +1036,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col6, _col7, _col10
 Statistics: Num rows: 9 Data size: 1345 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: (not CASE WHEN ((_col6 = 0)) THEN (false) WHEN 
(_col6 is null) THEN (false) WHEN (_col10 is not null) THEN (true) WHEN (_col2 
is null) THEN (null) WHEN ((_col7 < _col6)) THEN (true) ELSE (false) END) 
(type: boolean)
+  predicate: (not CASE WHEN ((_col6 = 0L)) THEN (false) WHEN 
(_col6 is null) THEN (false) WHEN (_col10 is not null) THEN (true) WHEN (_col2 
is null) THEN (null) WHEN ((_col7 < _col6)) THEN (true) ELSE (false) END) 
(type: boolean)
   Statistics: Num rows: 5 Data size: 747 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: _col1 (type: string), _col0 (type: string), 
_col2 (type: int)
@@ -1513,7 +1513,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col4
 Statistics: Num rows: 182 Data size: 5043 Basic stats: 
COMPLETE Column stats: NONE
 Filter Operator
-  predicate: ((_col1 = 0) or (_col4 is null and _col0 is not 
null and (_col2 >= _col1))) (type: boolean)
+  predicate: ((_col1 = 0L) or (_col4 is null and _col0 is not 
null and (_col2 >= _col1))) (type: boolean)
   Statistics: Num rows: 121 Data size: 3352 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
 expressions: _col0 (type: string)
@@ -1685,7 +1685,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col10, _col11, _col14
 Statistics: Num rows: 30 Data size: 3807 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: (not CASE WHEN ((_col10 = 0)) THEN (false) WHEN 
(_col10 is null) THEN (false) WHEN (_col14 is not null) THEN (true) WHEN (_col5 
is null) THEN (null) WHEN ((_col11

[19/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/subquery_views.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/subquery_views.q.out 
b/ql/src/test/results/clientpositive/llap/subquery_views.q.out
index 01a86d1..01f1252 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_views.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_views.q.out
@@ -234,7 +234,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col4, _col5, _col8
 Statistics: Num rows: 87 Data size: 17226 Basic stats: 
COMPLETE Column stats: COMPLETE
 Filter Operator
-  predicate: CASE WHEN ((_col4 = 0)) THEN (true) WHEN (_col4 
is null) THEN (true) WHEN (_col8 is not null) THEN (false) WHEN (_col0 is null) 
THEN (null) WHEN ((_col5 < _col4)) THEN (false) ELSE (true) END (type: boolean)
+  predicate: CASE WHEN ((_col4 = 0L)) THEN (true) WHEN (_col4 
is null) THEN (true) WHEN (_col8 is not null) THEN (false) WHEN (_col0 is null) 
THEN (null) WHEN ((_col5 < _col4)) THEN (false) ELSE (true) END (type: boolean)
   Statistics: Num rows: 43 Data size: 8514 Basic stats: 
COMPLETE Column stats: COMPLETE
   Select Operator
 expressions: _col0 (type: string), _col1 (type: string)
@@ -334,7 +334,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col4, _col5, _col8
 Statistics: Num rows: 87 Data size: 9309 Basic stats: COMPLETE 
Column stats: COMPLETE
 Filter Operator
-  predicate: CASE WHEN ((_col4 = 0)) THEN (true) WHEN (_col4 
is null) THEN (true) WHEN (_col8 is not null) THEN (false) WHEN (_col0 is null) 
THEN (null) WHEN ((_col5 < _col4)) THEN (false) ELSE (true) END (type: boolean)
+  predicate: CASE WHEN ((_col4 = 0L)) THEN (true) WHEN (_col4 
is null) THEN (true) WHEN (_col8 is not null) THEN (false) WHEN (_col0 is null) 
THEN (null) WHEN ((_col5 < _col4)) THEN (false) ELSE (true) END (type: boolean)
   Statistics: Num rows: 43 Data size: 4601 Basic stats: 
COMPLETE Column stats: COMPLETE
   Select Operator
 expressions: _col0 (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/temp_table.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/temp_table.q.out 
b/ql/src/test/results/clientpositive/llap/temp_table.q.out
index b72fcfe..b04c643 100644
--- a/ql/src/test/results/clientpositive/llap/temp_table.q.out
+++ b/ql/src/test/results/clientpositive/llap/temp_table.q.out
@@ -20,7 +20,7 @@ STAGE PLANS:
   alias: src
   Statistics: Num rows: 500 Data size: 89000 Basic stats: 
COMPLETE Column stats: COMPLETE
   Filter Operator
-predicate: ((UDFToDouble(key) % 2.0) = 0.0) (type: boolean)
+predicate: ((UDFToDouble(key) % 2.0D) = 0.0D) (type: 
boolean)
 Statistics: Num rows: 250 Data size: 44500 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
   expressions: key (type: string), value (type: string)
@@ -93,7 +93,7 @@ STAGE PLANS:
   alias: src
   Statistics: Num rows: 500 Data size: 89000 Basic stats: 
COMPLETE Column stats: COMPLETE
   Filter Operator
-predicate: ((UDFToDouble(key) % 2.0) = 1.0) (type: boolean)
+predicate: ((UDFToDouble(key) % 2.0D) = 1.0D) (type: 
boolean)
 Statistics: Num rows: 250 Data size: 44500 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
   expressions: key (type: string), value (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out 
b/ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out
index 642bda2..c471cd6 100644
--- a/ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/tez_dynpart_hashjoin_2.q.out
@@ -41,7 +41,7 @@ STAGE PLANS:
   alias: a
   Statistics: Num rows: 12288 Data size: 3093170 Basic stats: 
COMPLETE Column stats: COMPLETE
   Filter Operator
-predicate: (csmallint < 100) (type: boolean)
+predicate: (csmallint < 100S) (type: boolean)
 Statistics: Num rows: 4096 Data size: 1031250 Basic stats: 
COMPLETE Column stats: COMPLETE

[08/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/spark/spark_dynamic_partition_pruning.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/spark/spark_dynamic_partition_pruning.q.out
 
b/ql/src/test/results/clientpositive/spark/spark_dynamic_partition_pruning.q.out
index 79766b0..9ebb1c5 100644
--- 
a/ql/src/test/results/clientpositive/spark/spark_dynamic_partition_pruning.q.out
+++ 
b/ql/src/test/results/clientpositive/spark/spark_dynamic_partition_pruning.q.out
@@ -1024,10 +1024,10 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: srcpart_hour
-  filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+  filterExpr: ((UDFToDouble(hour) = 11.0D) and hr is not null) 
(type: boolean)
   Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+predicate: ((UDFToDouble(hour) = 11.0D) and hr is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: hr (type: string)
@@ -1093,10 +1093,10 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: srcpart_hour
-  filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+  filterExpr: ((UDFToDouble(hour) = 11.0D) and hr is not null) 
(type: boolean)
   Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+predicate: ((UDFToDouble(hour) = 11.0D) and hr is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: hr (type: string)
@@ -1241,10 +1241,10 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: srcpart_hour
-  filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+  filterExpr: ((UDFToDouble(hour) = 11.0D) and hr is not null) 
(type: boolean)
   Statistics: Num rows: 2 Data size: 10 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+predicate: ((UDFToDouble(hour) = 11.0D) and hr is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 5 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: hr (type: string)
@@ -1359,10 +1359,10 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: srcpart_date_hour
-  filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 
11.0) and ds is not null and hr is not null) (type: boolean)
+  filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 
11.0D) and ds is not null and hr is not null) (type: boolean)
   Statistics: Num rows: 4 Data size: 108 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: ((UDFToDouble(hour) = 11.0) and (date = 
'2008-04-08') and ds is not null and hr is not null) (type: boolean)
+predicate: ((UDFToDouble(hour) = 11.0D) and (date = 
'2008-04-08') and ds is not null and hr is not null) (type: boolean)
 Statistics: Num rows: 1 Data size: 27 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: ds (type: string), hr (type: string)
@@ -1422,10 +1422,10 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: srcpart_date_hour
-  filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 
11.0) and ds is not null and hr is not null) (type: boolean)
+  filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 
11.0D) and ds is not null and hr is not null) (type: boolean)
   Statistics: Num rows: 4 Data size: 108 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: ((UDFToDouble(hour) = 11.0) and (date = 
'2008-04-08') and ds is not null and hr is not null) (type: boolean)
+predicate: ((UDFToDouble(hour) = 11.0D) and (date = 
'2008-04-08') and ds is not null and hr i

[17/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out 
b/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out
index 7548686..2ee7502 100644
--- a/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_interval_2.q.out
@@ -130,7 +130,7 @@ STAGE PLANS:
   TableScan Vectorization:
   native: true
   Select Operator
-expressions: str1 (type: string), (CAST( str1 AS INTERVAL 
YEAR TO MONTH) = CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( 
str1 AS INTERVAL YEAR TO MONTH) <= CAST( str1 AS INTERVAL YEAR TO MONTH)) 
(type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) <= CAST( str2 AS 
INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO 
MONTH) < CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS 
INTERVAL YEAR TO MONTH) >= CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: 
boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) >= CAST( str1 AS INTERVAL YEAR 
TO MONTH)) (type: boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) > CAST( str1 
AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO 
MONTH) <> CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 
AS INTERVAL YEAR TO MONTH) = 1-2) (type: boolean), (CAST( str1 AS INTERVAL YEAR 
TO MONTH) <= 1-2) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MON
 TH) <= 1-3) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) < 1-3) 
(type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) >= 1-2) (type: 
boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) >= 1-2) (type: boolean), 
(CAST( str2 AS INTERVAL YEAR TO MONTH) > 1-2) (type: boolean), (CAST( str1 AS 
INTERVAL YEAR TO MONTH) <> 1-3) (type: boolean), (1-2 = CAST( str1 AS INTERVAL 
YEAR TO MONTH)) (type: boolean), (1-2 <= CAST( str1 AS INTERVAL YEAR TO MONTH)) 
(type: boolean), (1-2 <= CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: 
boolean), (1-2 < CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-2 
>= CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-3 >= CAST( str1 
AS INTERVAL YEAR TO MONTH)) (type: boolean), (1-3 > CAST( str1 AS INTERVAL YEAR 
TO MONTH)) (type: boolean), (1-2 <> CAST( str2 AS INTERVAL YEAR TO MONTH)) 
(type: boolean)
+expressions: str1 (type: string), (CAST( str1 AS INTERVAL 
YEAR TO MONTH) = CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( 
str1 AS INTERVAL YEAR TO MONTH) <= CAST( str1 AS INTERVAL YEAR TO MONTH)) 
(type: boolean), (CAST( str1 AS INTERVAL YEAR TO MONTH) <= CAST( str2 AS 
INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO 
MONTH) < CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS 
INTERVAL YEAR TO MONTH) >= CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: 
boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) >= CAST( str1 AS INTERVAL YEAR 
TO MONTH)) (type: boolean), (CAST( str2 AS INTERVAL YEAR TO MONTH) > CAST( str1 
AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 AS INTERVAL YEAR TO 
MONTH) <> CAST( str2 AS INTERVAL YEAR TO MONTH)) (type: boolean), (CAST( str1 
AS INTERVAL YEAR TO MONTH) = INTERVAL'1-2') (type: boolean), (CAST( str1 AS 
INTERVAL YEAR TO MONTH) <= INTERVAL'1-2') (type: boolean), (CAST( str1 AS 
 INTERVAL YEAR TO MONTH) <= INTERVAL'1-3') (type: boolean), (CAST( str1 AS 
INTERVAL YEAR TO MONTH) < INTERVAL'1-3') (type: boolean), (CAST( str1 AS 
INTERVAL YEAR TO MONTH) >= INTERVAL'1-2') (type: boolean), (CAST( str2 AS 
INTERVAL YEAR TO MONTH) >= INTERVAL'1-2') (type: boolean), (CAST( str2 AS 
INTERVAL YEAR TO MONTH) > INTERVAL'1-2') (type: boolean), (CAST( str1 AS 
INTERVAL YEAR TO MONTH) <> INTERVAL'1-3') (type: boolean), (INTERVAL'1-2' = 
CAST( str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (INTERVAL'1-2' <= CAST( 
str1 AS INTERVAL YEAR TO MONTH)) (type: boolean), (INTERVAL'1-2' <= CAST( str2 
AS INTERVAL YEAR TO MONTH)) (type: boolean), (INTERVAL'1-2' < CAST( str2 AS 
INTERVAL YEAR TO MONTH)) (type: boolean), (INTERVAL'1-2' >= CAST( str1 AS 
INTERVAL YEAR TO MONTH)) (type: boolean), (INTERVAL'1-3' >= CAST( str1 AS 
INTERVAL YEAR TO MONTH)) (type: boolean), (INTERVAL'1-3' > CAST( str1 AS 
INTERVAL YEAR TO MONTH)) (type: boolean), (INTERVAL'1-2' <> CAST( str2 AS 
INTERVAL YEAR TO MONTH)
 ) (type: boolean)
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
_col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, 
_col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24
 Select Vectorization:
 className: VectorSelectOperator
@@ -336,7 +336,7 @@ STAGE PLANS:
   TableScan Vectorization:

[22/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out 
b/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out
index 68801f0..11a99db 100644
--- a/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out
+++ b/ql/src/test/results/clientpositive/llap/acid_vectorization_original.q.out
@@ -428,10 +428,10 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: over10k_orc_bucketed
-  filterExpr: ((b = 4294967363) and (t < 100)) (type: boolean)
+  filterExpr: ((b = 4294967363L) and (t < 100Y)) (type: 
boolean)
   Statistics: Num rows: 2098 Data size: 41920 Basic stats: 
COMPLETE Column stats: COMPLETE
   Filter Operator
-predicate: ((b = 4294967363) and (t < 100)) (type: boolean)
+predicate: ((b = 4294967363L) and (t < 100Y)) (type: 
boolean)
 Statistics: Num rows: 2 Data size: 40 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
   expressions: t (type: tinyint), si (type: smallint), i 
(type: int)
@@ -498,10 +498,10 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: over10k_orc_bucketed
-  filterExpr: ((b = 4294967363) and (t < 100)) (type: boolean)
+  filterExpr: ((b = 4294967363L) and (t < 100Y)) (type: 
boolean)
   Statistics: Num rows: 2098 Data size: 41920 Basic stats: 
COMPLETE Column stats: COMPLETE
   Filter Operator
-predicate: ((b = 4294967363) and (t < 100)) (type: boolean)
+predicate: ((b = 4294967363L) and (t < 100Y)) (type: 
boolean)
 Statistics: Num rows: 2 Data size: 40 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
   expressions: ROW__ID (type: 
struct), t (type: tinyint), si 
(type: smallint), i (type: int)
@@ -571,10 +571,10 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: over10k_orc_bucketed
-  filterExpr: ((b = 4294967363) and (t < 100)) (type: boolean)
+  filterExpr: ((b = 4294967363L) and (t < 100Y)) (type: 
boolean)
   Statistics: Num rows: 2098 Data size: 706986 Basic stats: 
COMPLETE Column stats: COMPLETE
   Filter Operator
-predicate: ((b = 4294967363) and (t < 100)) (type: boolean)
+predicate: ((b = 4294967363L) and (t < 100Y)) (type: 
boolean)
 Statistics: Num rows: 2 Data size: 674 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
   expressions: ROW__ID (type: 
struct), t (type: tinyint), si 
(type: smallint), f (type: float), d (type: double), bo (type: boolean), s 
(type: string), ts (type: timestamp), dec (type: decimal(4,2)), bin (type: 
binary)
@@ -592,7 +592,7 @@ STAGE PLANS:
 Execution mode: vectorized, llap
 Reduce Operator Tree:
   Select Operator
-expressions: KEY.reducesinkkey0 (type: 
struct), VALUE._col0 (type: 
tinyint), VALUE._col1 (type: smallint), 0 (type: int), 4294967363 (type: 
bigint), VALUE._col3 (type: float), VALUE._col4 (type: double), VALUE._col5 
(type: boolean), VALUE._col6 (type: string), VALUE._col7 (type: timestamp), 
VALUE._col8 (type: decimal(4,2)), VALUE._col9 (type: binary)
+expressions: KEY.reducesinkkey0 (type: 
struct), VALUE._col0 (type: 
tinyint), VALUE._col1 (type: smallint), 0 (type: int), 4294967363L (type: 
bigint), VALUE._col3 (type: float), VALUE._col4 (type: double), VALUE._col5 
(type: boolean), VALUE._col6 (type: string), VALUE._col7 (type: timestamp), 
VALUE._col8 (type: decimal(4,2)), VALUE._col9 (type: binary)
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11
 Statistics: Num rows: 2 Data size: 834 Basic stats: COMPLETE 
Column stats: COMPLETE
 File Output Operator
@@ -694,7 +694,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1
 Statistics: Num rows: 1049 Data size: 88116 Basic stats: 
COMPLETE Column stats: COMPLETE
 Filter Operator
-  predicate: (_col1 > 1) (type: boolean)
+  predicate: (_col1 > 1L) (type: boolean)
   Statistics: Num rows: 349 Data size: 29316 Basic stats: 
COMPLETE Column stats: COMPLETE
   File Output Operator
 compressed: false

http://git-wip-us.apache.org/repos/asf

[09/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/spark/parquet_vectorization_1.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/spark/parquet_vectorization_1.q.out 
b/ql/src/test/results/clientpositive/spark/parquet_vectorization_1.q.out
index 34b273c..0d201f6 100644
--- a/ql/src/test/results/clientpositive/spark/parquet_vectorization_1.q.out
+++ b/ql/src/test/results/clientpositive/spark/parquet_vectorization_1.q.out
@@ -129,7 +129,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
 Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
-  expressions: _col0 (type: double), (_col0 / -26.28) (type: 
double), _col1 (type: double), (-1.389 + _col1) (type: double), (_col1 * 
(-1.389 + _col1)) (type: double), _col2 (type: tinyint), (- (_col1 * (-1.389 + 
_col1))) (type: double), _col3 (type: int), (CAST( _col3 AS decimal(10,0)) * 
79.553) (type: decimal(16,3)), _col4 (type: double), (10.175 % (- (_col1 * 
(-1.389 + _col1 (type: double), _col5 (type: bigint), (-563 % _col3) (type: 
int)
+  expressions: _col0 (type: double), (_col0 / -26.28D) (type: 
double), _col1 (type: double), (-1.389D + _col1) (type: double), (_col1 * 
(-1.389D + _col1)) (type: double), _col2 (type: tinyint), (- (_col1 * (-1.389D 
+ _col1))) (type: double), _col3 (type: int), (CAST( _col3 AS decimal(10,0)) * 
79.553) (type: decimal(16,3)), _col4 (type: double), (10.175D % (- (_col1 * 
(-1.389D + _col1 (type: double), _col5 (type: bigint), (-563 % _col3) 
(type: int)
   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12
   Statistics: Num rows: 1 Data size: 184 Basic stats: COMPLETE 
Column stats: NONE
   File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/spark/parquet_vectorization_10.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/spark/parquet_vectorization_10.q.out 
b/ql/src/test/results/clientpositive/spark/parquet_vectorization_10.q.out
index 25ff960..c21e77f 100644
--- a/ql/src/test/results/clientpositive/spark/parquet_vectorization_10.q.out
+++ b/ql/src/test/results/clientpositive/spark/parquet_vectorization_10.q.out
@@ -72,10 +72,10 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprOrExpr(children: 
FilterStringGroupColLessEqualStringScalar(col 7:string, val 10), 
FilterExprAndExpr(children: FilterDoubleColGreaterDoubleColumn(col 13:double, 
col 5:double)(children: CastLongToDouble(col 0:tinyint) -> 13:double), 
FilterDecimalScalarGreaterEqualDecimalColumn(val -5638.15, col 
14:decimal(6,2))(children: CastLongToDecimal(col 0:tinyint) -> 
14:decimal(6,2))), FilterExprAndExpr(children: 
FilterDoubleColGreaterDoubleScalar(col 5:double, val 6981.0), 
FilterExprOrExpr(children: FilterDecimalColEqualDecimalScalar(col 
15:decimal(11,4), val 9763215.5639)(children: CastLongToDecimal(col 1:smallint) 
-> 15:decimal(11,4)), FilterStringColLikeStringScalar(col 6:string, pattern 
%a
-predicate: (((UDFToDouble(ctinyint) > cdouble) and 
(-5638.15 >= CAST( ctinyint AS decimal(6,2 or ((cdouble > 6981.0) and 
((CAST( csmallint AS decimal(11,4)) = 9763215.5639) or (cstring1 like '%a'))) 
or (cstring2 <= '10')) (type: boolean)
+predicate: (((UDFToDouble(ctinyint) > cdouble) and 
(-5638.15 >= CAST( ctinyint AS decimal(6,2 or ((cdouble > 6981.0D) and 
((CAST( csmallint AS decimal(11,4)) = 9763215.5639) or (cstring1 like '%a'))) 
or (cstring2 <= '10')) (type: boolean)
 Statistics: Num rows: 9557 Data size: 114684 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
-  expressions: cdouble (type: double), ctimestamp1 (type: 
timestamp), ctinyint (type: tinyint), cboolean1 (type: boolean), cstring1 
(type: string), (- cdouble) (type: double), (cdouble + UDFToDouble(csmallint)) 
(type: double), ((cdouble + UDFToDouble(csmallint)) % 33.0) (type: double), (- 
cdouble) (type: double), (UDFToDouble(ctinyint) % cdouble) (type: double), 
(UDFToShort(ctinyint) % csmallint) (type: smallint), (- cdouble) (type: 
double), (cbigint * UDFToLong((UDFToShort(ctinyint) % csmallint))) (type: 
bigint), (9763215.5639 - (cdouble + UDFToDouble(csmallint))) (type: double), (- 
(- cdouble)) (type: double)
+  expressions: cdouble (type: double), ctimestamp1 (type: 
timestamp), ctinyint (type: tinyint), cboolean1 (type: boolean), cstring1 
(type: string), (- cdouble) (type: double), (cdouble + UDFToDouble(csma

[25/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/auto_join4.q.out
--
diff --git a/ql/src/test/results/clientpositive/auto_join4.q.out 
b/ql/src/test/results/clientpositive/auto_join4.q.out
index 4b08f93..b5efad6 100644
--- a/ql/src/test/results/clientpositive/auto_join4.q.out
+++ b/ql/src/test/results/clientpositive/auto_join4.q.out
@@ -56,7 +56,7 @@ STAGE PLANS:
 alias: src2
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((UDFToDouble(key) < 20.0) and (UDFToDouble(key) > 
15.0)) (type: boolean)
+  predicate: ((UDFToDouble(key) < 20.0D) and (UDFToDouble(key) > 
15.0D)) (type: boolean)
   Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string), value (type: string)
@@ -74,7 +74,7 @@ STAGE PLANS:
 alias: src1
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((UDFToDouble(key) < 20.0) and (UDFToDouble(key) > 
10.0)) (type: boolean)
+  predicate: ((UDFToDouble(key) < 20.0D) and (UDFToDouble(key) > 
10.0D)) (type: boolean)
   Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string), value (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/auto_join5.q.out
--
diff --git a/ql/src/test/results/clientpositive/auto_join5.q.out 
b/ql/src/test/results/clientpositive/auto_join5.q.out
index 03262fe..f91cf7a 100644
--- a/ql/src/test/results/clientpositive/auto_join5.q.out
+++ b/ql/src/test/results/clientpositive/auto_join5.q.out
@@ -56,7 +56,7 @@ STAGE PLANS:
 alias: src1
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((UDFToDouble(key) < 20.0) and (UDFToDouble(key) > 
15.0)) (type: boolean)
+  predicate: ((UDFToDouble(key) < 20.0D) and (UDFToDouble(key) > 
15.0D)) (type: boolean)
   Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string), value (type: string)
@@ -74,7 +74,7 @@ STAGE PLANS:
 alias: src2
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((UDFToDouble(key) < 25.0) and (UDFToDouble(key) > 
15.0)) (type: boolean)
+  predicate: ((UDFToDouble(key) < 25.0D) and (UDFToDouble(key) > 
15.0D)) (type: boolean)
   Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string), value (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/auto_join6.q.out
--
diff --git a/ql/src/test/results/clientpositive/auto_join6.q.out 
b/ql/src/test/results/clientpositive/auto_join6.q.out
index d8c58d4..166ecda 100644
--- a/ql/src/test/results/clientpositive/auto_join6.q.out
+++ b/ql/src/test/results/clientpositive/auto_join6.q.out
@@ -50,7 +50,7 @@ STAGE PLANS:
 alias: src1
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((UDFToDouble(key) < 20.0) and (UDFToDouble(key) > 
10.0)) (type: boolean)
+  predicate: ((UDFToDouble(key) < 20.0D) and (UDFToDouble(key) > 
10.0D)) (type: boolean)
   Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string), value (type: string)
@@ -66,7 +66,7 @@ STAGE PLANS:
 alias: src2
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((UDFToDouble(key) < 25.0) and (UDFToDouble(key) > 
15.0)) (type: boolean)
+  predicate: ((UDFToDouble(key) < 25.0D) and (UDFToDouble(key) > 
15.0D)) (type: boolean)
   Statistics: Num rows: 55 Data size: 584 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string), value (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/auto_join7.q.out
---

[18/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out 
b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out
index 74f6289..058006c 100644
--- a/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_groupby_grouping_id2.q.out
@@ -77,7 +77,7 @@ STAGE PLANS:
   native: false
   vectorProcessingMode: HASH
   projectedOutputColumnNums: [0]
-  keys: _col0 (type: int), _col1 (type: int), 0 (type: 
bigint)
+  keys: _col0 (type: int), _col1 (type: int), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 18 Data size: 144 Basic stats: 
COMPLETE Column stats: NONE
@@ -279,7 +279,7 @@ STAGE PLANS:
   native: false
   vectorProcessingMode: HASH
   projectedOutputColumnNums: [0]
-  keys: _col0 (type: int), _col1 (type: int), 0 (type: 
bigint)
+  keys: _col0 (type: int), _col1 (type: int), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 18 Data size: 144 Basic stats: 
COMPLETE Column stats: NONE
@@ -491,7 +491,7 @@ STAGE PLANS:
   native: false
   vectorProcessingMode: HASH
   projectedOutputColumnNums: []
-  keys: _col0 (type: int), _col1 (type: int), 0 (type: 
bigint)
+  keys: _col0 (type: int), _col1 (type: int), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2
   Statistics: Num rows: 18 Data size: 144 Basic stats: 
COMPLETE Column stats: NONE
@@ -798,7 +798,7 @@ STAGE PLANS:
   native: false
   vectorProcessingMode: HASH
   projectedOutputColumnNums: []
-  keys: _col0 (type: int), _col1 (type: int), 0 (type: 
bigint)
+  keys: _col0 (type: int), _col1 (type: int), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2
   Statistics: Num rows: 18 Data size: 144 Basic stats: 
COMPLETE Column stats: NONE
@@ -1101,7 +1101,7 @@ STAGE PLANS:
   native: false
   vectorProcessingMode: HASH
   projectedOutputColumnNums: []
-  keys: _col0 (type: int), _col1 (type: int), 0 (type: 
bigint)
+  keys: _col0 (type: int), _col1 (type: int), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2
   Statistics: Num rows: 18 Data size: 144 Basic stats: 
COMPLETE Column stats: NONE
@@ -1429,7 +1429,7 @@ STAGE PLANS:
   native: false
   vectorProcessingMode: HASH
   projectedOutputColumnNums: []
-  keys: _col0 (type: int), _col1 (type: int), 0 (type: 
bigint)
+  keys: _col0 (type: int), _col1 (type: int), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2
   Statistics: Num rows: 18 Data size: 144 Basic stats: 
COMPLETE Column stats: NONE
@@ -1750,7 +1750,7 @@ STAGE PLANS:
   native: false
   vectorProcessingMode: HASH
   projectedOutputColumnNums: [0]
-  keys: _col0 (type: int), _col1 (type: int), 0 (type: 
bigint)
+  keys: _col0 (type: int), _col1 (type: int), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 18 Data size: 144 Basic stats: 
COMPLETE Column stats: NONE
@@ -1916,7 +1916,7 @@ STAGE PLANS:
   native: false
   vectorProcessingMode: HASH
   projectedOutputColumnNums: []
-  keys: _col0 (type: int), _col1 (type: int), 0 (type: 
bigint)
+  keys: _col0 (type: int), _col1 (type: int), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2
   Statistics: Num rows: 18 Data size: 1

[13/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
--
diff --git a/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out 
b/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
index 55b6afc..fa77db1 100644
--- a/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
+++ b/ql/src/test/results/clientpositive/parquet_vectorization_13.q.out
@@ -88,7 +88,7 @@ STAGE PLANS:
   className: VectorFilterOperator
   native: true
   predicateExpression: FilterExprOrExpr(children: 
FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 
3569.0), FilterDoubleScalarGreaterEqualDoubleColumn(val 10.175, col 5:double), 
FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), 
FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, 
val 11.0)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), 
FilterDoubleColNotEqualDoubleScalar(col 13:double, val 12.0)(children: 
CastTimestampToDouble(col 9:timestamp) -> 13:double), 
FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 
9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4
-  predicate: (((UDFToDouble(ctimestamp1) > 11.0) and 
(UDFToDouble(ctimestamp2) <> 12.0) and (CAST( ctinyint AS decimal(11,4)) < 
9763215.5639)) or ((cfloat < 3569) and (10.175 >= cdouble) and (cboolean1 <> 
1))) (type: boolean)
+  predicate: (((UDFToDouble(ctimestamp1) > 11.0D) and 
(UDFToDouble(ctimestamp2) <> 12.0D) and (CAST( ctinyint AS decimal(11,4)) < 
9763215.5639)) or ((cfloat < 3569) and (10.175D >= cdouble) and (cboolean1 <> 
1))) (type: boolean)
   Statistics: Num rows: 2730 Data size: 32760 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
 expressions: ctinyint (type: tinyint), cfloat (type: float), 
cstring1 (type: string), ctimestamp1 (type: timestamp), cboolean1 (type: 
boolean)
@@ -151,7 +151,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, _col6, 
_col7, _col8, _col9, _col10
   Statistics: Num rows: 1365 Data size: 16380 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
-expressions: _col0 (type: boolean), _col1 (type: tinyint), _col2 
(type: timestamp), _col3 (type: float), _col4 (type: string), (- _col1) (type: 
tinyint), _col5 (type: tinyint), ((- _col1) + _col5) (type: tinyint), _col6 
(type: double), (_col6 * UDFToDouble(((- _col1) + _col5))) (type: double), (- 
_col6) (type: double), (79.553 * _col3) (type: float), _col7 (type: double), (- 
_col6) (type: double), _col8 (type: double), (CAST( ((- _col1) + _col5) AS 
decimal(3,0)) - 10.175) (type: decimal(7,3)), (- (- _col6)) (type: double), 
(-26.28 / (- (- _col6))) (type: double), _col9 (type: float), ((_col6 * 
UDFToDouble(((- _col1) + _col5))) / UDFToDouble(_col1)) (type: double), _col10 
(type: tinyint)
+expressions: _col0 (type: boolean), _col1 (type: tinyint), _col2 
(type: timestamp), _col3 (type: float), _col4 (type: string), (- _col1) (type: 
tinyint), _col5 (type: tinyint), ((- _col1) + _col5) (type: tinyint), _col6 
(type: double), (_col6 * UDFToDouble(((- _col1) + _col5))) (type: double), (- 
_col6) (type: double), (79.553 * _col3) (type: float), _col7 (type: double), (- 
_col6) (type: double), _col8 (type: double), (CAST( ((- _col1) + _col5) AS 
decimal(3,0)) - 10.175) (type: decimal(7,3)), (- (- _col6)) (type: double), 
(-26.28D / (- (- _col6))) (type: double), _col9 (type: float), ((_col6 * 
UDFToDouble(((- _col1) + _col5))) / UDFToDouble(_col1)) (type: double), _col10 
(type: tinyint)
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, _col15, 
_col16, _col17, _col18, _col19, _col20
 Statistics: Num rows: 1365 Data size: 16380 Basic stats: COMPLETE 
Column stats: NONE
 File Output Operator
@@ -417,7 +417,7 @@ STAGE PLANS:
   className: VectorFilterOperator
   native: true
   predicateExpression: FilterExprOrExpr(children: 
FilterExprAndExpr(children: FilterDoubleColLessDoubleScalar(col 4:float, val 
3569.0), FilterDoubleScalarGreaterEqualDoubleColumn(val 10.175, col 5:double), 
FilterLongColNotEqualLongScalar(col 10:boolean, val 1)), 
FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 13:double, 
val -1.388)(children: CastTimestampToDouble(col 8:timestamp) -> 13:double), 
FilterDoubleColNotEqualDoubleScalar(col 13:double, val 
-1.3359)(children: CastTimestampToDouble(col 9:timestamp) -> 
13:double), FilterDecimalColLessDecimalScalar(col 14:decimal(11,4), val 
9763215.5639)(children: CastLongToDecimal(col 0:tinyint) -> 14:decimal(11,4
-  p

[26/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier 
with literals (Vineet Garg, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/17441e48
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/17441e48
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/17441e48

Branch: refs/heads/master
Commit: 17441e48574bd5a920c8da31c99e6cb2e5c5dcaa
Parents: fc8a45b
Author: Vineet Garg 
Authored: Fri Mar 2 10:46:08 2018 -0800
Committer: Vineet Garg 
Committed: Fri Mar 2 10:46:08 2018 -0800

--
 .../results/positive/accumulo_queries.q.out |   2 +-
 .../test/results/clientpositive/dboutput.q.out  |   2 +-
 .../clientpositive/udaf_example_avg.q.out   |   2 +-
 .../clientpositive/udaf_example_max.q.out   |   2 +-
 .../clientpositive/udaf_example_max_n.q.out |   2 +-
 .../clientpositive/udaf_example_min.q.out   |   2 +-
 .../clientpositive/udaf_example_min_n.q.out |   2 +-
 .../clientpositive/udf_example_add.q.out|   2 +-
 .../src/test/results/positive/hbase_ddl.q.out   |   2 +-
 .../test/results/positive/hbase_queries.q.out   |   2 +-
 .../test/results/positive/hbase_timestamp.q.out |   8 +-
 .../hive/ql/plan/ExprNodeConstantDesc.java  |  25 +++
 .../clientpositive/allcolref_in_udf.q.out   |  10 +-
 .../annotate_stats_deep_filters.q.out   |   4 +-
 .../clientpositive/annotate_stats_groupby.q.out |  32 ++--
 .../annotate_stats_groupby2.q.out   |   6 +-
 .../clientpositive/annotate_stats_select.q.out  |   8 +-
 .../results/clientpositive/auto_join11.q.out|   4 +-
 .../results/clientpositive/auto_join12.q.out|   6 +-
 .../results/clientpositive/auto_join13.q.out|   6 +-
 .../results/clientpositive/auto_join14.q.out|   4 +-
 .../results/clientpositive/auto_join16.q.out|   4 +-
 .../results/clientpositive/auto_join27.q.out|   6 +-
 .../results/clientpositive/auto_join33.q.out|  12 +-
 .../results/clientpositive/auto_join4.q.out |   4 +-
 .../results/clientpositive/auto_join5.q.out |   4 +-
 .../results/clientpositive/auto_join6.q.out |   4 +-
 .../results/clientpositive/auto_join7.q.out |   6 +-
 .../results/clientpositive/auto_join8.q.out |   4 +-
 .../auto_join_without_localtask.q.out   |  12 +-
 ql/src/test/results/clientpositive/cast1.q.out  |   4 +-
 .../clientpositive/cast_on_constant.q.out   |  16 +-
 .../test/results/clientpositive/cbo_const.q.out |   8 +-
 .../clientpositive/cbo_rp_outer_join_ppr.q.out  |   8 +-
 .../cbo_rp_udaf_percentile_approx_23.q.out  |   2 +-
 .../clientpositive/columnstats_partlvl.q.out|  50 +++---
 .../clientpositive/columnstats_partlvl_dp.q.out |  20 +--
 .../test/results/clientpositive/comments.q.out  |   4 +-
 .../clientpositive/constant_prop_3.q.out|   2 +-
 .../clientpositive/constantfolding.q.out|   6 +-
 .../results/clientpositive/constprog2.q.out |   2 +-
 .../results/clientpositive/constprog_type.q.out |   2 +-
 .../clientpositive/correlationoptimizer10.q.out |  24 +--
 .../clientpositive/correlationoptimizer8.q.out  |  30 ++--
 .../results/clientpositive/create_view.q.out|   2 +-
 .../clientpositive/cross_join_merge.q.out   |   2 +-
 .../results/clientpositive/ctas_colname.q.out   |   8 +-
 ql/src/test/results/clientpositive/cte_5.q.out  |   2 +-
 .../results/clientpositive/decimal_udf.q.out|  16 +-
 .../results/clientpositive/decimal_udf2.q.out   |   4 +-
 .../druid/druidmini_dynamic_partition.q.out |   6 +-
 .../results/clientpositive/druid_basic2.q.out   |   8 +-
 .../results/clientpositive/druid_basic3.q.out   |   6 +-
 .../clientpositive/druid_intervals.q.out|   4 +-
 .../clientpositive/druid_timeseries.q.out   |  12 +-
 .../results/clientpositive/druid_topn.q.out |   4 +-
 .../results/clientpositive/except_all.q.out |  26 +--
 .../extrapolate_part_stats_date.q.out   |   2 +-
 .../clientpositive/filter_cond_pushdown.q.out   |   6 +-
 .../clientpositive/fold_eq_with_case_when.q.out |   4 +-
 .../clientpositive/fouter_join_ppr.q.out|  16 +-
 .../clientpositive/fp_literal_arithmetic.q.out  |   4 +-
 .../test/results/clientpositive/gby_star.q.out  |   8 +-
 .../results/clientpositive/groupby_cube1.q.out  |  16 +-
 .../clientpositive/groupby_cube_multi_gby.q.out |   4 +-
 .../clientpositive/groupby_grouping_id3.q.out   |  18 +-
 .../clientpositive/groupby_grouping_sets1.q.out |  10 +-
 .../clientpositive/groupby_grouping_sets2.q.out |   8 +-
 .../clientpositive/groupby_grouping_sets3.q.out |   6 +-
 .../clientpositive/groupby_grouping_sets4.q.out |  24 +--
 .../clientpositive/groupby_grouping_sets5.q.out |   6 +-
 .../clientpositive/groupby_grouping_sets6.q.out |   8 +-
 .../groupby_grouping_sets_grouping.q.out|  48 +++---
 .../groupby_grouping_sets_limit.q.out   |   8 +-
 ...

[15/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out 
b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
index bd5e284..993bfd5 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_short_regress.q.out
@@ -100,7 +100,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprOrExpr(children: 
FilterLongScalarEqualLongColumn(val 762, col 3:bigint), 
FilterExprAndExpr(children: FilterDoubleColLessDoubleColumn(col 13:float, col 
4:float)(children: CastLongToFloatViaLongToDouble(col 1:smallint) -> 13:float), 
FilterDoubleColGreaterDoubleScalar(col 13:double, val -5.0)(children: 
CastTimestampToDouble(col 9:timestamp) -> 13:double), 
FilterDoubleColNotEqualDoubleColumn(col 5:double, col 13:double)(children: 
CastLongToDouble(col 2:int) -> 13:double)), 
FilterStringGroupColEqualStringScalar(col 6:string, val a), 
FilterExprAndExpr(children: FilterDecimalColLessEqualDecimalScalar(col 
14:decimal(22,3), val -1.389)(children: CastLongToDecimal(col 3:bigint) -> 
14:decimal(22,3)), FilterStringGroupColNotEqualStringScalar(col 7:string, val 
a), FilterDecimalScalarNotEqualDecimalColumn(val 79.553, col 
15:decimal(13,3))(children: CastLongToDecimal(col 2:int) -> 15:decimal(13,3)), 
FilterLongColNotEqualLongColumn(col 11:boole
 an, col 10:boolean)))
-predicate: (((CAST( cbigint AS decimal(22,3)) <= -1.389) 
and (cstring2 <> 'a') and (79.553 <> CAST( cint AS decimal(13,3))) and 
(cboolean2 <> cboolean1)) or ((UDFToFloat(csmallint) < cfloat) and 
(UDFToDouble(ctimestamp2) > -5.0) and (cdouble <> UDFToDouble(cint))) or (762 = 
cbigint) or (cstring1 = 'a')) (type: boolean)
+predicate: (((CAST( cbigint AS decimal(22,3)) <= -1.389) 
and (cstring2 <> 'a') and (79.553 <> CAST( cint AS decimal(13,3))) and 
(cboolean2 <> cboolean1)) or ((UDFToFloat(csmallint) < cfloat) and 
(UDFToDouble(ctimestamp2) > -5.0D) and (cdouble <> UDFToDouble(cint))) or (762L 
= cbigint) or (cstring1 = 'a')) (type: boolean)
 Statistics: Num rows: 5465 Data size: 1157230 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
   expressions: ctinyint (type: tinyint), csmallint (type: 
smallint), cint (type: int), cfloat (type: float), cdouble (type: double)
@@ -155,7 +155,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8
 Statistics: Num rows: 1 Data size: 68 Basic stats: COMPLETE 
Column stats: COMPLETE
 Select Operator
-  expressions: _col0 (type: double), (_col0 + -3728.0) (type: 
double), (- (_col0 + -3728.0)) (type: double), (- (- (_col0 + -3728.0))) (type: 
double), ((- (- (_col0 + -3728.0))) * (_col0 + -3728.0)) (type: double), _col1 
(type: double), (- _col0) (type: double), _col2 (type: double), (((- (- (_col0 
+ -3728.0))) * (_col0 + -3728.0)) * (- (- (_col0 + -3728.0 (type: double), 
_col3 (type: double), (- _col2) (type: double), (_col2 - (- (- (_col0 + 
-3728.0 (type: double), ((_col2 - (- (- (_col0 + -3728.0 * _col2) 
(type: double), _col4 (type: double), _col5 (type: double), (10.175 - _col4) 
(type: double), (- (10.175 - _col4)) (type: double), ((- _col2) / -563.0) 
(type: double), _col6 (type: double), (- ((- _col2) / -563.0)) (type: double), 
(_col0 / _col1) (type: double), _col7 (type: tinyint), _col8 (type: bigint), 
(UDFToDouble(_col7) / ((- _col2) / -563.0)) (type: double), (- (_col0 / _col1)) 
(type: double)
+  expressions: _col0 (type: double), (_col0 + -3728.0D) (type: 
double), (- (_col0 + -3728.0D)) (type: double), (- (- (_col0 + -3728.0D))) 
(type: double), ((- (- (_col0 + -3728.0D))) * (_col0 + -3728.0D)) (type: 
double), _col1 (type: double), (- _col0) (type: double), _col2 (type: double), 
(((- (- (_col0 + -3728.0D))) * (_col0 + -3728.0D)) * (- (- (_col0 + 
-3728.0D (type: double), _col3 (type: double), (- _col2) (type: double), 
(_col2 - (- (- (_col0 + -3728.0D (type: double), ((_col2 - (- (- (_col0 + 
-3728.0D * _col2) (type: double), _col4 (type: double), _col5 (type: 
double), (10.175D - _col4) (type: double), (- (10.175D - _col4)) (type: 
double), ((- _col2) / -563.0D) (type: double), _col6 (type: double), (- ((- 
_col2) / -563.0D)) (type: double), (_col0 / _col1) (type: double), _col7 (type: 
tinyint), _col8 (type: bigint), (UDFToDouble(_col7) / ((- _col2) / -563.0D)) 
(type: double), (- (_col0 / _col1)) (type: double)
   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6

[14/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out 
b/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out
index 36f1bbf..35786eb 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_math_funcs.q.out
@@ -127,10 +127,10 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprAndExpr(children: 
FilterLongColEqualLongScalar(col 13:bigint, val 0)(children: 
LongColModuloLongScalar(col 3:bigint, val 500) -> 13:bigint), 
FilterDoubleColGreaterEqualDoubleScalar(col 14:double, val -1.0)(children: 
FuncSinDoubleToDouble(col 4:float) -> 14:double))
-predicate: (((cbigint % 500) = 0) and (sin(cfloat) >= 
-1.0)) (type: boolean)
+predicate: (((cbigint % 500) = 0) and (sin(cfloat) >= 
-1.0D)) (type: boolean)
 Statistics: Num rows: 2048 Data size: 48960 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
-  expressions: cdouble (type: double), round(cdouble, 2) 
(type: double), floor(cdouble) (type: bigint), ceil(cdouble) (type: bigint), 
rand() (type: double), rand(98007) (type: double), exp(ln(cdouble)) (type: 
double), ln(cdouble) (type: double), ln(cfloat) (type: double), log10(cdouble) 
(type: double), log2(cdouble) (type: double), log2((cdouble - 15601.0)) (type: 
double), log2(cfloat) (type: double), log2(cbigint) (type: double), log2(cint) 
(type: double), log2(csmallint) (type: double), log2(ctinyint) (type: double), 
log(2, cdouble) (type: double), power(log2(cdouble), 2) (type: double), 
power(log2(cdouble), 2) (type: double), sqrt(cdouble) (type: double), 
sqrt(cbigint) (type: double), bin(cbigint) (type: string), hex(cdouble) (type: 
string), conv(cbigint, 10, 16) (type: string), abs(cdouble) (type: double), 
abs(ctinyint) (type: int), (cint pmod 3) (type: int), sin(cdouble) (type: 
double), asin(cdouble) (type: double), cos(cdouble) (type: double), 
acos(cdouble)
  (type: double), atan(cdouble) (type: double), degrees(cdouble) (type: 
double), radians(cdouble) (type: double), cdouble (type: double), cbigint 
(type: bigint), (- cdouble) (type: double), sign(cdouble) (type: double), 
sign(cbigint) (type: double), cos(((- sin(log(cdouble))) + 3.14159)) (type: 
double)
+  expressions: cdouble (type: double), round(cdouble, 2) 
(type: double), floor(cdouble) (type: bigint), ceil(cdouble) (type: bigint), 
rand() (type: double), rand(98007) (type: double), exp(ln(cdouble)) (type: 
double), ln(cdouble) (type: double), ln(cfloat) (type: double), log10(cdouble) 
(type: double), log2(cdouble) (type: double), log2((cdouble - 15601.0D)) (type: 
double), log2(cfloat) (type: double), log2(cbigint) (type: double), log2(cint) 
(type: double), log2(csmallint) (type: double), log2(ctinyint) (type: double), 
log(2, cdouble) (type: double), power(log2(cdouble), 2) (type: double), 
power(log2(cdouble), 2) (type: double), sqrt(cdouble) (type: double), 
sqrt(cbigint) (type: double), bin(cbigint) (type: string), hex(cdouble) (type: 
string), conv(cbigint, 10, 16) (type: string), abs(cdouble) (type: double), 
abs(ctinyint) (type: int), (cint pmod 3) (type: int), sin(cdouble) (type: 
double), asin(cdouble) (type: double), cos(cdouble) (type: double), acos(cdouble
 ) (type: double), atan(cdouble) (type: double), degrees(cdouble) (type: 
double), radians(cdouble) (type: double), cdouble (type: double), cbigint 
(type: bigint), (- cdouble) (type: double), sign(cdouble) (type: double), 
sign(cbigint) (type: double), cos(((- sin(log(cdouble))) + 3.14159D)) (type: 
double)
   outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
_col5, _col6, _col7, _col8, _col9, _col10, _col11, _col12, _col13, _col14, 
_col15, _col16, _col17, _col18, _col19, _col20, _col21, _col22, _col23, _col24, 
_col25, _col26, _col27, _col28, _col29, _col30, _col31, _col32, _col33, _col34, 
_col35, _col36, _col37, _col38, _col39, _col40
   Select Vectorization:
   className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out 
b/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out
index 9652d36..9645a32 100644
--- a/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorized_timestamp.q.out
@@ -273,7 +273,7 @@ STAGE PLANS:

[12/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/perf/spark/query67.q.out
--
diff --git a/ql/src/test/results/clientpositive/perf/spark/query67.q.out 
b/ql/src/test/results/clientpositive/perf/spark/query67.q.out
index b0fc41c..26f6775 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query67.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query67.q.out
@@ -221,7 +221,7 @@ STAGE PLANS:
   Statistics: Num rows: 766650239 Data size: 67634106676 Basic 
stats: COMPLETE Column stats: NONE
   Group By Operator
 aggregations: sum(_col8)
-keys: _col0 (type: string), _col1 (type: string), _col2 
(type: string), _col3 (type: string), _col4 (type: int), _col5 (type: int), 
_col6 (type: int), _col7 (type: string), 0 (type: bigint)
+keys: _col0 (type: string), _col1 (type: string), _col2 
(type: string), _col3 (type: string), _col4 (type: int), _col5 (type: int), 
_col6 (type: int), _col7 (type: string), 0L (type: bigint)
 mode: hash
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, 
_col5, _col6, _col7, _col8, _col9
 Statistics: Num rows: 6899852151 Data size: 608706960084 
Basic stats: COMPLETE Column stats: NONE

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/perf/spark/query70.q.out
--
diff --git a/ql/src/test/results/clientpositive/perf/spark/query70.q.out 
b/ql/src/test/results/clientpositive/perf/spark/query70.q.out
index 7ebb776..df07790 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query70.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query70.q.out
@@ -339,7 +339,7 @@ STAGE PLANS:
   Statistics: Num rows: 766650239 Data size: 67634106676 Basic 
stats: COMPLETE Column stats: NONE
   Group By Operator
 aggregations: sum(_col2)
-keys: _col0 (type: string), _col1 (type: string), 0 (type: 
bigint)
+keys: _col0 (type: string), _col1 (type: string), 0L 
(type: bigint)
 mode: hash
 outputColumnNames: _col0, _col1, _col2, _col3
 Statistics: Num rows: 2299950717 Data size: 202902320028 
Basic stats: COMPLETE Column stats: NONE

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/perf/spark/query72.q.out
--
diff --git a/ql/src/test/results/clientpositive/perf/spark/query72.q.out 
b/ql/src/test/results/clientpositive/perf/spark/query72.q.out
index ea43033..d204803 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query72.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query72.q.out
@@ -358,7 +358,7 @@ STAGE PLANS:
 outputColumnNames: _col4, _col6, _col7, _col9, _col10, _col16, 
_col18, _col20
 Statistics: Num rows: 510191624 Data size: 69090195216 Basic 
stats: COMPLETE Column stats: NONE
 Filter Operator
-  predicate: (UDFToDouble(_col20) > (UDFToDouble(_col9) + 
5.0)) (type: boolean)
+  predicate: (UDFToDouble(_col20) > (UDFToDouble(_col9) + 
5.0D)) (type: boolean)
   Statistics: Num rows: 170063874 Data size: 23030064981 Basic 
stats: COMPLETE Column stats: NONE
   Select Operator
 expressions: _col18 (type: string), _col4 (type: int), 
_col6 (type: int), _col7 (type: int), _col10 (type: int), _col16 (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/perf/spark/query73.q.out
--
diff --git a/ql/src/test/results/clientpositive/perf/spark/query73.q.out 
b/ql/src/test/results/clientpositive/perf/spark/query73.q.out
index b25a16e..7fec0e1 100644
--- a/ql/src/test/results/clientpositive/perf/spark/query73.q.out
+++ b/ql/src/test/results/clientpositive/perf/spark/query73.q.out
@@ -86,7 +86,7 @@ STAGE PLANS:
   alias: household_demographics
   Statistics: Num rows: 7200 Data size: 770400 Basic stats: 
COMPLETE Column stats: NONE
   Filter Operator
-predicate: (((hd_buy_potential = '>1') or 
(hd_buy_potential = 'unknown')) and (hd_vehicle_count > 0) and CASE WHEN 
((hd_vehicle_count > 0)) THEN (((UDFToDouble(hd_dep_count) / 
UDFToDouble(hd_vehicle_count)) > 1.0)) ELSE (null) END and hd_demo_sk is not 
null) (type: boolean)
+predicate: (((hd_buy_potential = '>1') or 
(hd_buy_potential = 'unknown')) and (hd_vehicle_count > 0) and CASE WHEN 
((hd_vehicle_count > 0)) THEN (((UDFToDouble(hd_dep_

[10/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/spark/groupby_cube1.q.out
--
diff --git a/ql/src/test/results/clientpositive/spark/groupby_cube1.q.out 
b/ql/src/test/results/clientpositive/spark/groupby_cube1.q.out
index 71ccea5..92bb775 100644
--- a/ql/src/test/results/clientpositive/spark/groupby_cube1.q.out
+++ b/ql/src/test/results/clientpositive/spark/groupby_cube1.q.out
@@ -42,7 +42,7 @@ STAGE PLANS:
 Statistics: Num rows: 1 Data size: 300 Basic stats: 
COMPLETE Column stats: NONE
 Group By Operator
   aggregations: count()
-  keys: key (type: string), val (type: string), 0 (type: 
bigint)
+  keys: key (type: string), val (type: string), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 4 Data size: 1200 Basic stats: 
COMPLETE Column stats: NONE
@@ -107,7 +107,7 @@ STAGE PLANS:
 Statistics: Num rows: 1 Data size: 300 Basic stats: 
COMPLETE Column stats: NONE
 Group By Operator
   aggregations: count()
-  keys: key (type: string), val (type: string), 0 (type: 
bigint)
+  keys: key (type: string), val (type: string), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 4 Data size: 1200 Basic stats: 
COMPLETE Column stats: NONE
@@ -198,7 +198,7 @@ STAGE PLANS:
 Statistics: Num rows: 1 Data size: 300 Basic stats: 
COMPLETE Column stats: NONE
 Group By Operator
   aggregations: count()
-  keys: _col0 (type: string), _col1 (type: string), 0 
(type: bigint)
+  keys: _col0 (type: string), _col1 (type: string), 0L 
(type: bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 4 Data size: 1200 Basic stats: 
COMPLETE Column stats: NONE
@@ -288,7 +288,7 @@ STAGE PLANS:
 Statistics: Num rows: 1 Data size: 300 Basic stats: 
COMPLETE Column stats: NONE
 Group By Operator
   aggregations: count(DISTINCT val)
-  keys: key (type: string), 0 (type: bigint), val (type: 
string)
+  keys: key (type: string), 0L (type: bigint), val (type: 
string)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 2 Data size: 600 Basic stats: 
COMPLETE Column stats: NONE
@@ -367,7 +367,7 @@ STAGE PLANS:
 Statistics: Num rows: 1 Data size: 300 Basic stats: 
COMPLETE Column stats: NONE
 Group By Operator
   aggregations: count()
-  keys: key (type: string), val (type: string), 0 (type: 
bigint)
+  keys: key (type: string), val (type: string), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 4 Data size: 1200 Basic stats: 
COMPLETE Column stats: NONE
@@ -473,7 +473,7 @@ STAGE PLANS:
 Statistics: Num rows: 1 Data size: 300 Basic stats: 
COMPLETE Column stats: NONE
 Group By Operator
   aggregations: count(DISTINCT val)
-  keys: key (type: string), 0 (type: bigint), val (type: 
string)
+  keys: key (type: string), 0L (type: bigint), val (type: 
string)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 2 Data size: 600 Basic stats: 
COMPLETE Column stats: NONE
@@ -591,7 +591,7 @@ STAGE PLANS:
 Statistics: Num rows: 1 Data size: 300 Basic stats: 
COMPLETE Column stats: NONE
 Group By Operator
   aggregations: count(1)
-  keys: key (type: string), val (type: string), 0 (type: 
bigint)
+  keys: key (type: string), val (type: string), 0L (type: 
bigint)
   mode: hash
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 4 Data size: 1200 Basic stats: 
COMPLETE Column stats: NONE
@@ -612,7 +612,7 @@ STAGE PLANS:
 Statistics: Num rows: 1 Data size: 300 Basic stats: 
COMPLETE Column stats: NONE
 Group By Operator
   aggregations: sum(1)
-  keys: key (type: string

[04/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out 
b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
index 9683efa..339ba9e 100644
--- a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
@@ -99,7 +99,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprOrExpr(children: 
FilterLongScalarEqualLongColumn(val 762, col 3:bigint), 
FilterExprAndExpr(children: FilterDoubleColLessDoubleColumn(col 13:float, col 
4:float)(children: CastLongToFloatViaLongToDouble(col 1:smallint) -> 13:float), 
FilterDoubleColGreaterDoubleScalar(col 13:double, val -5.0)(children: 
CastTimestampToDouble(col 9:timestamp) -> 13:double), 
FilterDoubleColNotEqualDoubleColumn(col 5:double, col 13:double)(children: 
CastLongToDouble(col 2:int) -> 13:double)), 
FilterStringGroupColEqualStringScalar(col 6:string, val a), 
FilterExprAndExpr(children: FilterDecimalColLessEqualDecimalScalar(col 
14:decimal(22,3), val -1.389)(children: CastLongToDecimal(col 3:bigint) -> 
14:decimal(22,3)), FilterStringGroupColNotEqualStringScalar(col 7:string, val 
a), FilterDecimalScalarNotEqualDecimalColumn(val 79.553, col 
15:decimal(13,3))(children: CastLongToDecimal(col 2:int) -> 15:decimal(13,3)), 
FilterLongColNotEqualLongColumn(col 11:boole
 an, col 10:boolean)))
-predicate: (((CAST( cbigint AS decimal(22,3)) <= -1.389) 
and (cstring2 <> 'a') and (79.553 <> CAST( cint AS decimal(13,3))) and 
(cboolean2 <> cboolean1)) or ((UDFToFloat(csmallint) < cfloat) and 
(UDFToDouble(ctimestamp2) > -5.0) and (cdouble <> UDFToDouble(cint))) or (762 = 
cbigint) or (cstring1 = 'a')) (type: boolean)
+predicate: (((CAST( cbigint AS decimal(22,3)) <= -1.389) 
and (cstring2 <> 'a') and (79.553 <> CAST( cint AS decimal(13,3))) and 
(cboolean2 <> cboolean1)) or ((UDFToFloat(csmallint) < cfloat) and 
(UDFToDouble(ctimestamp2) > -5.0D) and (cdouble <> UDFToDouble(cint))) or (762L 
= cbigint) or (cstring1 = 'a')) (type: boolean)
 Statistics: Num rows: 12288 Data size: 2641964 Basic 
stats: COMPLETE Column stats: NONE
 Select Operator
   expressions: ctinyint (type: tinyint), csmallint (type: 
smallint), cint (type: int), cfloat (type: float), cdouble (type: double)
@@ -152,7 +152,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8
 Statistics: Num rows: 1 Data size: 492 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
-  expressions: _col0 (type: double), (_col0 + -3728.0) (type: 
double), (- (_col0 + -3728.0)) (type: double), (- (- (_col0 + -3728.0))) (type: 
double), ((- (- (_col0 + -3728.0))) * (_col0 + -3728.0)) (type: double), _col1 
(type: double), (- _col0) (type: double), _col2 (type: double), (((- (- (_col0 
+ -3728.0))) * (_col0 + -3728.0)) * (- (- (_col0 + -3728.0 (type: double), 
_col3 (type: double), (- _col2) (type: double), (_col2 - (- (- (_col0 + 
-3728.0 (type: double), ((_col2 - (- (- (_col0 + -3728.0 * _col2) 
(type: double), _col4 (type: double), _col5 (type: double), (10.175 - _col4) 
(type: double), (- (10.175 - _col4)) (type: double), ((- _col2) / -563.0) 
(type: double), _col6 (type: double), (- ((- _col2) / -563.0)) (type: double), 
(_col0 / _col1) (type: double), _col7 (type: tinyint), _col8 (type: bigint), 
(UDFToDouble(_col7) / ((- _col2) / -563.0)) (type: double), (- (_col0 / _col1)) 
(type: double)
+  expressions: _col0 (type: double), (_col0 + -3728.0D) (type: 
double), (- (_col0 + -3728.0D)) (type: double), (- (- (_col0 + -3728.0D))) 
(type: double), ((- (- (_col0 + -3728.0D))) * (_col0 + -3728.0D)) (type: 
double), _col1 (type: double), (- _col0) (type: double), _col2 (type: double), 
(((- (- (_col0 + -3728.0D))) * (_col0 + -3728.0D)) * (- (- (_col0 + 
-3728.0D (type: double), _col3 (type: double), (- _col2) (type: double), 
(_col2 - (- (- (_col0 + -3728.0D (type: double), ((_col2 - (- (- (_col0 + 
-3728.0D * _col2) (type: double), _col4 (type: double), _col5 (type: 
double), (10.175D - _col4) (type: double), (- (10.175D - _col4)) (type: 
double), ((- _col2) / -563.0D) (type: double), _col6 (type: double), (- ((- 
_col2) / -563.0D)) (type: double), (_col0 / _col1) (type: double), _col7 (type: 
tinyint), _col8 (type: bigint), (UDFToDouble(_col7) / ((- _col2) / -563.0D)) 
(type: double), (- (_col0 / _col1)) (type: double)
   outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _

[16/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out 
b/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out
index 7e9a564..5405bd8 100644
--- a/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_windowing_expressions.q.out
@@ -348,7 +348,7 @@ STAGE PLANS:
   window frame: ROWS PRECEDING(MAX)~CURRENT
   Statistics: Num rows: 26 Data size: 9828 Basic stats: 
COMPLETE Column stats: COMPLETE
   Select Operator
-expressions: _col2 (type: string), _col7 (type: double), 
_col5 (type: int), rank_window_0 (type: int), sum_window_1 (type: double), 
(sum_window_1 - 5.0) (type: double)
+expressions: _col2 (type: string), _col7 (type: double), 
_col5 (type: int), rank_window_0 (type: int), sum_window_1 (type: double), 
(sum_window_1 - 5.0D) (type: double)
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5
 Statistics: Num rows: 26 Data size: 3380 Basic stats: 
COMPLETE Column stats: COMPLETE
 File Output Operator
@@ -2002,7 +2002,7 @@ STAGE PLANS:
   Lead/Lag information: lag(...) (type: double)
   Statistics: Num rows: 26 Data size: 9828 Basic stats: 
COMPLETE Column stats: COMPLETE
   Select Operator
-expressions: _col2 (type: string), _col7 (type: double), 
_col5 (type: int), ((round(sum_window_0, 2) + 50.0) = round((sum_window_1 + 
last_value_window_2), 2)) (type: boolean)
+expressions: _col2 (type: string), _col7 (type: double), 
_col5 (type: int), ((round(sum_window_0, 2) + 50.0D) = round((sum_window_1 + 
last_value_window_2), 2)) (type: boolean)
 outputColumnNames: _col0, _col1, _col2, _col3
 Statistics: Num rows: 26 Data size: 2964 Basic stats: 
COMPLETE Column stats: COMPLETE
 Limit

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out 
b/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out
index 8dc3fa7..caf9963 100644
--- a/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_windowing_navfn.q.out
@@ -1424,7 +1424,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprAndExpr(children: 
FilterExprOrExpr(children: FilterStringGroupColEqualStringScalar(col 7:string, 
val oscar allen), FilterStringGroupColEqualStringScalar(col 7:string, val oscar 
carson)), FilterLongColEqualLongScalar(col 0:tinyint, val 10))
-predicate: (((s = 'oscar allen') or (s = 'oscar carson')) 
and (t = 10)) (type: boolean)
+predicate: (((s = 'oscar allen') or (s = 'oscar carson')) 
and (t = 10Y)) (type: boolean)
 Statistics: Num rows: 1 Data size: 192 Basic stats: 
COMPLETE Column stats: NONE
 Reduce Output Operator
   key expressions: UDFToByte(10) (type: tinyint), s (type: 
string)
@@ -1516,7 +1516,7 @@ STAGE PLANS:
   streamingColumns: []
   Statistics: Num rows: 1 Data size: 192 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
-expressions: 10 (type: tinyint), _col7 (type: string), 
_col2 (type: int), last_value_window_0 (type: int)
+expressions: 10Y (type: tinyint), _col7 (type: string), 
_col2 (type: int), last_value_window_0 (type: int)
 outputColumnNames: _col0, _col1, _col2, _col3
 Select Vectorization:
 className: VectorSelectOperator

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out 
b/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out
index 8aa904f..c5dd540 100644
--- a/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out
+++ b/ql/src/test/results/clientpositive/llap/vector_windowing_order_null.q.out
@@ -1040,7 +1040,7 @@ STAGE PLANS:
   streamingColumns: []
   Statistics: Num rows:

[24/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/druid_timeseries.q.out
--
diff --git a/ql/src/test/results/clientpositive/druid_timeseries.q.out 
b/ql/src/test/results/clientpositive/druid_timeseries.q.out
index 785cbd2..19a5af3 100644
--- a/ql/src/test/results/clientpositive/druid_timeseries.q.out
+++ b/ql/src/test/results/clientpositive/druid_timeseries.q.out
@@ -24,13 +24,13 @@ STAGE PLANS:
   Map Operator Tree:
   TableScan
 alias: druid_table_1
-filterExpr: (((__time >= 2009-12-31 16:00:00.0 US/Pacific) and 
(__time <= 2012-02-29 16:00:00.0 US/Pacific)) or (added <= 0)) (type: boolean)
+filterExpr: (((__time >= TIMESTAMPLOCALTZ'2009-12-31 16:00:00.0 
US/Pacific') and (__time <= TIMESTAMPLOCALTZ'2012-02-29 16:00:00.0 
US/Pacific')) or (added <= 0)) (type: boolean)
 properties:
   druid.query.json 
{"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":["robot","namespace","anonymous","unpatrolled","page","language","newpage","user"],"metrics":["count","added","delta","variation","deleted"],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
   druid.query.type select
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
 Filter Operator
-  predicate: (((__time >= 2009-12-31 16:00:00.0 US/Pacific) and 
(__time <= 2012-02-29 16:00:00.0 US/Pacific)) or (added <= 0)) (type: boolean)
+  predicate: (((__time >= TIMESTAMPLOCALTZ'2009-12-31 16:00:00.0 
US/Pacific') and (__time <= TIMESTAMPLOCALTZ'2012-02-29 16:00:00.0 
US/Pacific')) or (added <= 0)) (type: boolean)
   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
   Select Operator
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
@@ -443,13 +443,13 @@ STAGE PLANS:
   Map Operator Tree:
   TableScan
 alias: druid_table_1
-filterExpr: floor_hour(__time) BETWEEN 2010-01-01 00:00:00.0 
US/Pacific AND 2014-01-01 00:00:00.0 US/Pacific (type: boolean)
+filterExpr: floor_hour(__time) BETWEEN TIMESTAMPLOCALTZ'2010-01-01 
00:00:00.0 US/Pacific' AND TIMESTAMPLOCALTZ'2014-01-01 00:00:00.0 US/Pacific' 
(type: boolean)
 properties:
   druid.query.json 
{"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":[],"metrics":["added","variation"],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
   druid.query.type select
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
 Filter Operator
-  predicate: floor_hour(__time) BETWEEN 2010-01-01 00:00:00.0 
US/Pacific AND 2014-01-01 00:00:00.0 US/Pacific (type: boolean)
+  predicate: floor_hour(__time) BETWEEN 
TIMESTAMPLOCALTZ'2010-01-01 00:00:00.0 US/Pacific' AND 
TIMESTAMPLOCALTZ'2014-01-01 00:00:00.0 US/Pacific' (type: boolean)
   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
   Select Operator
 expressions: floor_hour(__time) (type: timestamp with local 
time zone), added (type: float), variation (type: float)
@@ -520,13 +520,13 @@ STAGE PLANS:
   Map Operator Tree:
   TableScan
 alias: druid_table_1
-filterExpr: floor_hour(__time) BETWEEN 2010-01-01 00:00:00.0 
US/Pacific AND 2014-01-01 00:00:00.0 US/Pacific (type: boolean)
+filterExpr: floor_hour(__time) BETWEEN TIMESTAMPLOCALTZ'2010-01-01 
00:00:00.0 US/Pacific' AND TIMESTAMPLOCALTZ'2014-01-01 00:00:00.0 US/Pacific' 
(type: boolean)
 properties:
   druid.query.json 
{"queryType":"select","dataSource":"wikipedia","descending":false,"intervals":["1900-01-01T00:00:00.000Z/3000-01-01T00:00:00.000Z"],"dimensions":[],"metrics":["added","variation"],"granularity":"all","pagingSpec":{"threshold":16384,"fromNext":true},"context":{"druid.query.fetch":false}}
   druid.query.type select
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
 Filter Operator
-  predicate: floor_hour(__time) BETWEEN 2010-01-01 00:00:00.0 
US/Pacific AND 2014-01-01 00:00:00.0 US/Pacific (type: boolean)
+  predicate: floor_hour(__time) BETWEEN 
TIMESTAMPLOCALTZ'2010-01-01 00:00:00.0 US/Pacific' AND 
TIMESTAMPLOCALTZ'2014-01-01 00:00:00.0 US/Pacific' (type: boolean)
   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
   Selec

[05/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/spark/vector_between_in.q.out
--
diff --git a/ql/src/test/results/clientpositive/spark/vector_between_in.q.out 
b/ql/src/test/results/clientpositive/spark/vector_between_in.q.out
index 75397d7..9f5fa2a 100644
--- a/ql/src/test/results/clientpositive/spark/vector_between_in.q.out
+++ b/ql/src/test/results/clientpositive/spark/vector_between_in.q.out
@@ -55,7 +55,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterLongColumnInList(col 
3:date, values [-67, -171])
-predicate: (cdate) IN (1969-10-26, 1969-07-14) (type: 
boolean)
+predicate: (cdate) IN (DATE'1969-10-26', DATE'1969-07-14') 
(type: boolean)
 Statistics: Num rows: 6145 Data size: 1233908 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: cdate (type: date)
@@ -148,7 +148,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: SelectColumnIsFalse(col 
5:boolean)(children: LongColumnInList(col 3, values [-67, -171, 20]) -> 
5:boolean)
-predicate: (not (cdate) IN (1969-10-26, 1969-07-14, 
1970-01-21)) (type: boolean)
+predicate: (not (cdate) IN (DATE'1969-10-26', 
DATE'1969-07-14', DATE'1970-01-21')) (type: boolean)
 Statistics: Num rows: 6144 Data size: 1233707 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   Select Vectorization:
@@ -455,7 +455,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterLongColumnBetween(col 
3:date, left -2, right 1)
-predicate: cdate BETWEEN 1969-12-30 AND 1970-01-02 (type: 
boolean)
+predicate: cdate BETWEEN DATE'1969-12-30' AND 
DATE'1970-01-02' (type: boolean)
 Statistics: Num rows: 1365 Data size: 274090 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: cdate (type: date)
@@ -548,7 +548,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterLongColumnNotBetween(col 
3:date, left -610, right 608)
-predicate: cdate NOT BETWEEN 1968-05-01 AND 1971-09-01 
(type: boolean)
+predicate: cdate NOT BETWEEN DATE'1968-05-01' AND 
DATE'1971-09-01' (type: boolean)
 Statistics: Num rows: 10924 Data size: 2193525 Basic 
stats: COMPLETE Column stats: NONE
 Select Operator
   expressions: cdate (type: date)
@@ -1083,7 +1083,7 @@ STAGE PLANS:
   TableScan Vectorization:
   native: true
   Select Operator
-expressions: (cdate) IN (1969-10-26, 1969-07-14) (type: 
boolean)
+expressions: (cdate) IN (DATE'1969-10-26', 
DATE'1969-07-14') (type: boolean)
 outputColumnNames: _col0
 Select Vectorization:
 className: VectorSelectOperator
@@ -1355,13 +1355,13 @@ STAGE PLANS:
   TableScan Vectorization:
   native: true
   Select Operator
-expressions: cdate BETWEEN 1969-12-30 AND 1970-01-02 
(type: boolean)
+expressions: cdate BETWEEN DATE'1969-12-30' AND 
DATE'1970-01-02' (type: boolean)
 outputColumnNames: _col0
 Select Vectorization:
 className: VectorSelectOperator
 native: true
 projectedOutputColumnNums: [5]
-selectExpressions: VectorUDFAdaptor(cdate BETWEEN 
1969-12-30 AND 1970-01-02) -> 5:boolean
+selectExpressions: VectorUDFAdaptor(cdate BETWEEN 
DATE'1969-12-30' AND DATE'1970-01-02') -> 5:boolean
 Statistics: Num rows: 12289 Data size: 2467616 Basic 
stats: COMPLETE Column stats: NONE
 Group By Operator
   aggregations: count()

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out 
b/ql/src/test/results/clientpositive/spark/vector_cast_constant.q.out
index 9449e68..d81781e 100644
--- a/ql/src/test/results/clientposi

[03/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out
--
diff --git a/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out 
b/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out
index bc5ceb3..1966078 100644
--- a/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_ints_casts.q.out
@@ -49,7 +49,7 @@ STAGE PLANS:
   predicate: ((cbigint % 250) = 0) (type: boolean)
   Statistics: Num rows: 6144 Data size: 1320982 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
-expressions: CAST( ctinyint AS TIMESTAMP) (type: timestamp), 
CAST( csmallint AS TIMESTAMP) (type: timestamp), CAST( cint AS TIMESTAMP) 
(type: timestamp), CAST( cbigint AS TIMESTAMP) (type: timestamp), CAST( cfloat 
AS TIMESTAMP) (type: timestamp), CAST( cdouble AS TIMESTAMP) (type: timestamp), 
CAST( cboolean1 AS TIMESTAMP) (type: timestamp), CAST( (cbigint * 0) AS 
TIMESTAMP) (type: timestamp), ctimestamp1 (type: timestamp), CAST( cstring1 AS 
TIMESTAMP) (type: timestamp), CAST( substr(cstring1, 1, 1) AS TIMESTAMP) (type: 
timestamp)
+expressions: CAST( ctinyint AS TIMESTAMP) (type: timestamp), 
CAST( csmallint AS TIMESTAMP) (type: timestamp), CAST( cint AS TIMESTAMP) 
(type: timestamp), CAST( cbigint AS TIMESTAMP) (type: timestamp), CAST( cfloat 
AS TIMESTAMP) (type: timestamp), CAST( cdouble AS TIMESTAMP) (type: timestamp), 
CAST( cboolean1 AS TIMESTAMP) (type: timestamp), CAST( (cbigint * 0L) AS 
TIMESTAMP) (type: timestamp), ctimestamp1 (type: timestamp), CAST( cstring1 AS 
TIMESTAMP) (type: timestamp), CAST( substr(cstring1, 1, 1) AS TIMESTAMP) (type: 
timestamp)
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10
 Statistics: Num rows: 6144 Data size: 1320982 Basic stats: 
COMPLETE Column stats: NONE
 File Output Operator
@@ -181,7 +181,7 @@ STAGE PLANS:
   predicate: ((cbigint % 250) = 0) (type: boolean)
   Statistics: Num rows: 6144 Data size: 1320982 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
-expressions: CAST( ctinyint AS TIMESTAMP) (type: timestamp), 
CAST( csmallint AS TIMESTAMP) (type: timestamp), CAST( cint AS TIMESTAMP) 
(type: timestamp), CAST( cbigint AS TIMESTAMP) (type: timestamp), CAST( cfloat 
AS TIMESTAMP) (type: timestamp), CAST( cdouble AS TIMESTAMP) (type: timestamp), 
CAST( cboolean1 AS TIMESTAMP) (type: timestamp), CAST( (cbigint * 0) AS 
TIMESTAMP) (type: timestamp), ctimestamp1 (type: timestamp), CAST( cstring1 AS 
TIMESTAMP) (type: timestamp), CAST( substr(cstring1, 1, 1) AS TIMESTAMP) (type: 
timestamp)
+expressions: CAST( ctinyint AS TIMESTAMP) (type: timestamp), 
CAST( csmallint AS TIMESTAMP) (type: timestamp), CAST( cint AS TIMESTAMP) 
(type: timestamp), CAST( cbigint AS TIMESTAMP) (type: timestamp), CAST( cfloat 
AS TIMESTAMP) (type: timestamp), CAST( cdouble AS TIMESTAMP) (type: timestamp), 
CAST( cboolean1 AS TIMESTAMP) (type: timestamp), CAST( (cbigint * 0L) AS 
TIMESTAMP) (type: timestamp), ctimestamp1 (type: timestamp), CAST( cstring1 AS 
TIMESTAMP) (type: timestamp), CAST( substr(cstring1, 1, 1) AS TIMESTAMP) (type: 
timestamp)
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10
 Statistics: Num rows: 6144 Data size: 1320982 Basic stats: 
COMPLETE Column stats: NONE
 File Output Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/timestamp_literal.q.out
--
diff --git a/ql/src/test/results/clientpositive/timestamp_literal.q.out 
b/ql/src/test/results/clientpositive/timestamp_literal.q.out
index 4e06969..67750bb 100644
--- a/ql/src/test/results/clientpositive/timestamp_literal.q.out
+++ b/ql/src/test/results/clientpositive/timestamp_literal.q.out
@@ -17,7 +17,7 @@ STAGE PLANS:
   Row Limit Per Split: 1
   Statistics: Num rows: 1 Data size: 10 Basic stats: COMPLETE Column 
stats: COMPLETE
   Select Operator
-expressions: 2011-01-01 01:01:01.0 (type: timestamp)
+expressions: TIMESTAMP'2011-01-01 01:01:01.0' (type: timestamp)
 outputColumnNames: _col0
 Statistics: Num rows: 1 Data size: 40 Basic stats: COMPLETE Column 
stats: COMPLETE
 ListSink

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/timestamptz.q.out
--
diff --git a/ql/src/test/results/clientpositive/timestamptz.q.out 
b/ql/src/test/results/clientpositive/timestamptz.q.out
index 7cff9

[07/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out
 
b/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out
index d359c36..cdfd3e3 100644
--- 
a/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out
+++ 
b/ql/src/test/results/clientpositive/spark/spark_vectorized_dynamic_partition_pruning.q.out
@@ -1878,7 +1878,7 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: srcpart_hour
-  filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+  filterExpr: ((UDFToDouble(hour) = 11.0D) and hr is not null) 
(type: boolean)
   Statistics: Num rows: 2 Data size: 344 Basic stats: COMPLETE 
Column stats: NONE
   TableScan Vectorization:
   native: true
@@ -1888,7 +1888,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprAndExpr(children: 
FilterDoubleColEqualDoubleScalar(col 3:double, val 11.0)(children: 
CastStringToDouble(col 1:string) -> 3:double), SelectColumnIsNotNull(col 
0:string))
-predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+predicate: ((UDFToDouble(hour) = 11.0D) and hr is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 172 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: hr (type: string)
@@ -2048,7 +2048,7 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: srcpart_hour
-  filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+  filterExpr: ((UDFToDouble(hour) = 11.0D) and hr is not null) 
(type: boolean)
   Statistics: Num rows: 2 Data size: 344 Basic stats: COMPLETE 
Column stats: NONE
   TableScan Vectorization:
   native: true
@@ -2058,7 +2058,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprAndExpr(children: 
FilterDoubleColEqualDoubleScalar(col 3:double, val 11.0)(children: 
CastStringToDouble(col 1:string) -> 3:double), SelectColumnIsNotNull(col 
0:string))
-predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+predicate: ((UDFToDouble(hour) = 11.0D) and hr is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 172 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: hr (type: string)
@@ -2330,7 +2330,7 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: srcpart_hour
-  filterExpr: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+  filterExpr: ((UDFToDouble(hour) = 11.0D) and hr is not null) 
(type: boolean)
   Statistics: Num rows: 2 Data size: 344 Basic stats: COMPLETE 
Column stats: NONE
   TableScan Vectorization:
   native: true
@@ -2340,7 +2340,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprAndExpr(children: 
FilterDoubleColEqualDoubleScalar(col 3:double, val 11.0)(children: 
CastStringToDouble(col 1:string) -> 3:double), SelectColumnIsNotNull(col 
0:string))
-predicate: ((UDFToDouble(hour) = 11.0) and hr is not null) 
(type: boolean)
+predicate: ((UDFToDouble(hour) = 11.0D) and hr is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 172 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: hr (type: string)
@@ -2519,7 +2519,7 @@ STAGE PLANS:
 Map Operator Tree:
 TableScan
   alias: srcpart_date_hour
-  filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 
11.0) and ds is not null and hr is not null) (type: boolean)
+  filterExpr: ((date = '2008-04-08') and (UDFToDouble(hour) = 
11.0D) and ds is not null and hr is not null) (type: boolean)
   Statistics: Num rows: 4 Data size: 1440 Basic stats: 
COMPLETE Column stats: NONE
   TableScan Vectorization:
   native: true

[11/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/perf/tez/query95.q.out
--
diff --git a/ql/src/test/results/clientpositive/perf/tez/query95.q.out 
b/ql/src/test/results/clientpositive/perf/tez/query95.q.out
index 15920d9..46f9ae2 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query95.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query95.q.out
@@ -225,7 +225,7 @@ Stage-0
 Select Operator [SEL_5] (rows=8116 
width=1119)
   Output:["_col0"]
   Filter Operator [FIL_107] 
(rows=8116 width=1119)
-predicate:(CAST( d_date AS 
TIMESTAMP) BETWEEN 1999-05-01 00:00:00.0 AND 1999-06-30 00:00:00.0 and 
d_date_sk is not null)
+predicate:(CAST( d_date AS 
TIMESTAMP) BETWEEN TIMESTAMP'1999-05-01 00:00:00.0' AND TIMESTAMP'1999-06-30 
00:00:00.0' and d_date_sk is not null)
 TableScan [TS_3] (rows=73049 
width=1119)
   
default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/perf/tez/query98.q.out
--
diff --git a/ql/src/test/results/clientpositive/perf/tez/query98.q.out 
b/ql/src/test/results/clientpositive/perf/tez/query98.q.out
index da534cd..19bb49c 100644
--- a/ql/src/test/results/clientpositive/perf/tez/query98.q.out
+++ b/ql/src/test/results/clientpositive/perf/tez/query98.q.out
@@ -128,7 +128,7 @@ Stage-0
 Select Operator [SEL_5] (rows=8116 
width=1119)
   Output:["_col0"]
   Filter Operator [FIL_35] (rows=8116 
width=1119)
-predicate:(CAST( d_date AS TIMESTAMP) 
BETWEEN 2001-01-12 00:00:00.0 AND 2001-02-11 00:00:00.0 and d_date_sk is not 
null)
+predicate:(CAST( d_date AS TIMESTAMP) 
BETWEEN TIMESTAMP'2001-01-12 00:00:00.0' AND TIMESTAMP'2001-02-11 00:00:00.0' 
and d_date_sk is not null)
 TableScan [TS_3] (rows=73049 
width=1119)
   
default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_date"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/ppd2.q.out
--
diff --git a/ql/src/test/results/clientpositive/ppd2.q.out 
b/ql/src/test/results/clientpositive/ppd2.q.out
index 43119be..ab5a37f 100644
--- a/ql/src/test/results/clientpositive/ppd2.q.out
+++ b/ql/src/test/results/clientpositive/ppd2.q.out
@@ -61,7 +61,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1
   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (_col1 > 1) (type: boolean)
+predicate: (_col1 > 1L) (type: boolean)
 Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE 
Column stats: NONE
 File Output Operator
   compressed: false
@@ -451,7 +451,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1
   Statistics: Num rows: 250 Data size: 2656 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (_col1 > 1) (type: boolean)
+predicate: (_col1 > 1L) (type: boolean)
 Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE 
Column stats: NONE
 File Output Operator
   compressed: false

http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/ppd_gby.q.out
--
diff --git a/ql/src/test/results/clientpositive/ppd_gby.q.out 
b/ql/src/test/results/clientpositive/ppd_gby.q.out
index c21e5a3..1498177 100644
--- a/ql/src/test/results/clientpositive/ppd_gby.q.out
+++ b/ql/src/test/results/clientpositive/ppd_gby.q.out
@@ -44,7 +44,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1
   Statistics: Num rows: 83 Data size: 881 Basic stats: COMPLETE Column 
stats: NONE
   Filter Operator
-predicate: ((_col0 < 'val_400') or (_col1 > 30)) (type: boolean)
+predicate: ((_col0 < 'val_400') or (_col1 > 30L)) (type: boolean)
 Statistics: Num rows: 54 Data size: 573 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: _col0 (type: string)
@@ -253,7 +253,7 @@ STAGE PLANS:
   outputColumn

[21/26] hive git commit: HIVE-18797 : ExprConstNodeDesc's getExprString should put appropriate qualifier with literals (Vineet Garg, reviewed by Ashutosh Chauhan)

2018-03-02 Thread vgarg
http://git-wip-us.apache.org/repos/asf/hive/blob/17441e48/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out 
b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
index 45b2b4f..98743eb 100644
--- a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
@@ -477,7 +477,7 @@ Stage-0
 Select Operator [SEL_23] (rows=1 width=20)
   Output:["_col1","_col4"]
   Merge Join Operator [MERGEJOIN_41] (rows=1 width=20)
-
Conds:RS_19._col0=RS_20._col0(Inner),RS_20._col0=RS_21._col0(Inner),Output:["_col1","_col3","_col4","_col6"],residual
 filter predicates:{((_col1 >= 1) or (_col4 >= 1))} {((UDFToLong(_col1) + 
_col4) >= 0)} {((_col3 + _col6) >= 0)}
+
Conds:RS_19._col0=RS_20._col0(Inner),RS_20._col0=RS_21._col0(Inner),Output:["_col1","_col3","_col4","_col6"],residual
 filter predicates:{((_col1 >= 1) or (_col4 >= 1L))} {((UDFToLong(_col1) + 
_col4) >= 0)} {((_col3 + _col6) >= 0)}
   <-Map 1 [SIMPLE_EDGE] llap
 SHUFFLE [RS_19]
   PartitionCols:_col0
@@ -624,7 +624,7 @@ Stage-0
   Select Operator [SEL_23] (rows=1 width=20)
 Output:["_col1","_col4"]
 Merge Join Operator [MERGEJOIN_40] (rows=1 width=20)
-  
Conds:RS_19._col0=RS_20._col0(Inner),RS_20._col0=RS_21._col0(Inner),Output:["_col1","_col3","_col4","_col6"],residual
 filter predicates:{((_col1 >= 1) or (_col4 >= 1))} {((UDFToLong(_col1) + 
_col4) >= 0)} {((_col3 + _col6) >= 0)}
+  
Conds:RS_19._col0=RS_20._col0(Inner),RS_20._col0=RS_21._col0(Inner),Output:["_col1","_col3","_col4","_col6"],residual
 filter predicates:{((_col1 >= 1) or (_col4 >= 1L))} {((UDFToLong(_col1) + 
_col4) >= 0)} {((_col3 + _col6) >= 0)}
 <-Map 1 [SIMPLE_EDGE] llap
   SHUFFLE [RS_19]
 PartitionCols:_col0
@@ -929,7 +929,7 @@ Stage-0
 Select Operator [SEL_2] (rows=6 width=85)
   Output:["_col0"]
   Filter Operator [FIL_13] (rows=6 width=85)
-predicate:(UDFToDouble(key) >= 1.0)
+predicate:(UDFToDouble(key) >= 1.0D)
 TableScan [TS_0] (rows=20 width=80)
   
default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key"]
 <-Map 3 [SIMPLE_EDGE] llap
@@ -938,7 +938,7 @@ Stage-0
 Select Operator [SEL_5] (rows=6 width=85)
   Output:["_col0"]
   Filter Operator [FIL_14] (rows=6 width=85)
-predicate:(UDFToDouble(key) >= 1.0)
+predicate:(UDFToDouble(key) >= 1.0D)
 TableScan [TS_3] (rows=20 width=80)
   
default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key"]
 
@@ -1565,7 +1565,7 @@ Stage-0
   Select Operator [SEL_19] (rows=6 width=85)
 Output:["_col0"]
 Filter Operator [FIL_44] (rows=6 width=85)
-  predicate:(UDFToDouble(key) > 0.0)
+  predicate:(UDFToDouble(key) > 0.0D)
   TableScan [TS_17] (rows=20 width=80)
 
default@cbo_t3,cbo_t3,Tbl:COMPLETE,Col:COMPLETE,Output:["key"]
 <-Reducer 2 [SIMPLE_EDGE] llap
@@ -1574,7 +1574,7 @@ Stage-0
 Select Operator [SEL_8] (rows=1 width=93)
   Output:["_col0","_col1"]
   Filter Operator [FIL_7] (rows=1 width=101)
-predicate:(((UDFToDouble(_col2) + 
UDFToDouble(_col3)) >= 0.0) and ((UDFToDouble(_col2) >= 1.0) or (_col3 >= 1)))
+predicate:(((UDFToDouble(_col2) + 
UDFToDouble(_col3)) >= 0.0D) and ((UDFToDouble(_col2) >= 1.0D) or (_col3 >= 
1L)))
 Select Operator [SEL_6] (rows=1 width=101)
   Output:["_col2","_col3"]
   Group By Operator [GBY_5] (rows=1 width=101)
@@ -1585,7 +1585,7 @@ Stage-0
   Group By Operator [GBY_3] (rows=1 width=101)
 
Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, 
c_int, c_float
 Filter Operator [FIL_42] (rows=1 width=93)
-  predicate:c_int + 1) + 1) >= 0) and 
(((c_int + 1) > 0) or (UDFToDouble(key) >= 0.0)) and ((UDFToFloat(c_int) + 
c_float) >= 0) and ((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and 
((c_int >= 1) or (c_float >= 1)) and (UDFToDouble(key) > 0.0) and (c_float > 

[1/2] hive git commit: HIVE-18828 : improve error handling for codecs in LLAP IO (Sergey Shelukhin, reviewed by Gopal Vijayaraghavan)

2018-03-02 Thread sershe
Repository: hive
Updated Branches:
  refs/heads/master 17441e485 -> a4198f584


HIVE-18828 : improve error handling for codecs in LLAP IO (Sergey Shelukhin, 
reviewed by Gopal Vijayaraghavan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/5e3d85c4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/5e3d85c4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/5e3d85c4

Branch: refs/heads/master
Commit: 5e3d85c409b14afb6eb94bad01348d013a536503
Parents: 17441e4
Author: sergey 
Authored: Thu Mar 1 15:50:04 2018 -0800
Committer: sergey 
Committed: Fri Mar 2 11:24:34 2018 -0800

--
 .../llap/io/encoded/OrcEncodedDataReader.java   | 19 +-
 .../ql/io/orc/encoded/EncodedReaderImpl.java| 26 +++-
 2 files changed, 33 insertions(+), 12 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/5e3d85c4/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
--
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
index a6d2a04..9219d28 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/encoded/OrcEncodedDataReader.java
@@ -752,14 +752,21 @@ public class OrcEncodedDataReader extends 
CallableWithNdc
 CompressionKind kind = orcReader.getCompressionKind();
 boolean isPool = useCodecPool;
 CompressionCodec codec = isPool ? OrcCodecPool.getCodec(kind) : 
WriterImpl.createCodec(kind);
+boolean isCodecError = true;
 try {
-  return buildStripeFooter(Lists.newArrayList(new 
BufferChunk(bb, 0)),
-  bb.remaining(), codec, orcReader.getCompressionSize());
+  OrcProto.StripeFooter result = 
buildStripeFooter(Lists.newArrayList(
+  new BufferChunk(bb, 0)), bb.remaining(), codec, 
orcReader.getCompressionSize());
+  isCodecError = false;
+  return result;
 } finally {
-  if (isPool) {
-OrcCodecPool.returnCodec(kind, codec);
-  } else {
-codec.close();
+  try {
+if (isPool && !isCodecError) {
+  OrcCodecPool.returnCodec(kind, codec);
+} else {
+  codec.close();
+}
+  } catch (Exception ex) {
+LOG.error("Ignoring codec cleanup error", ex);
   }
 }
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/5e3d85c4/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
index 32bdf6e..893a2bb 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/encoded/EncodedReaderImpl.java
@@ -130,6 +130,7 @@ class EncodedReaderImpl implements EncodedReader {
   private boolean isDataReaderOpen = false;
   private final CompressionCodec codec;
   private final boolean isCodecFromPool;
+  private boolean isCodecFailure = false;
   private final boolean isCompressed;
   private final org.apache.orc.CompressionKind compressionKind;
   private final int bufferSize;
@@ -677,12 +678,17 @@ class EncodedReaderImpl implements EncodedReader {
 
   @Override
   public void close() throws IOException {
-if (isCodecFromPool) {
-  OrcCodecPool.returnCodec(compressionKind, codec);
-} else {
-  codec.close();
+try {
+  if (isCodecFromPool && !isCodecFailure) {
+OrcCodecPool.returnCodec(compressionKind, codec);
+  } else {
+codec.close();
+  }
+} catch (Exception ex) {
+  LOG.error("Ignoring error from codec", ex);
+} finally {
+  dataReader.close();
 }
-dataReader.close();
   }
 
   /**
@@ -870,7 +876,15 @@ class EncodedReaderImpl implements EncodedReader {
 for (ProcCacheChunk chunk : toDecompress) {
   ByteBuffer dest = chunk.getBuffer().getByteBufferRaw();
   if (chunk.isOriginalDataCompressed) {
-decompressChunk(chunk.originalData, codec, dest);
+boolean isOk = false;
+try {
+  decompressChunk(chunk.originalData, codec, dest);
+  isOk = true;
+} finally {
+  if (!isOk) {
+isCodecFailure = true;
+  }
+}
   } else {
 copyUncompressedChunk(chunk.originalData, dest);
   }



[2/2] hive git commit: HIVE-18824 : ValidWriteIdList config should be defined on tables which has to collect stats after insert (Sergey Shelukhin, reviewed by Eugene Koifman and Sankar Hariappan)

2018-03-02 Thread sershe
HIVE-18824 : ValidWriteIdList config should be defined on tables which has to 
collect stats after insert (Sergey Shelukhin, reviewed by Eugene Koifman and 
Sankar Hariappan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a4198f58
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a4198f58
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a4198f58

Branch: refs/heads/master
Commit: a4198f584aa0792a16d1e1eeb2ef3147403b8acb
Parents: 5e3d85c
Author: sergey 
Authored: Fri Mar 2 11:25:49 2018 -0800
Committer: sergey 
Committed: Fri Mar 2 11:25:49 2018 -0800

--
 ql/src/java/org/apache/hadoop/hive/ql/Driver.java | 15 +--
 1 file changed, 9 insertions(+), 6 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a4198f58/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java 
b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 94999fe..6999777 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -25,6 +25,7 @@ import java.io.PrintStream;
 import java.io.Serializable;
 import java.net.InetAddress;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.LinkedHashMap;
@@ -1244,15 +1245,18 @@ public class Driver implements IDriver {
 
   // Make the list of transactional tables list which are getting read or 
written by current txn
   private List getTransactionalTableList(QueryPlan plan) {
-List tableList = new ArrayList<>();
+Set tableList = new HashSet<>();
 
 for (ReadEntity input : plan.getInputs()) {
   addTableFromEntity(input, tableList);
 }
-return tableList;
+for (WriteEntity output : plan.getOutputs()) {
+  addTableFromEntity(output, tableList);
+}
+return new ArrayList(tableList);
   }
 
-  private void addTableFromEntity(Entity entity, List tableList) {
+  private void addTableFromEntity(Entity entity, Collection tableList) 
{
 Table tbl;
 switch (entity.getType()) {
   case TABLE: {
@@ -1268,10 +1272,9 @@ public class Driver implements IDriver {
 return;
   }
 }
+if (!AcidUtils.isTransactionalTable(tbl)) return;
 String fullTableName = AcidUtils.getFullTableName(tbl.getDbName(), 
tbl.getTableName());
-if (AcidUtils.isTransactionalTable(tbl) && 
!tableList.contains(fullTableName)) {
-  tableList.add(fullTableName);
-}
+tableList.add(fullTableName);
   }
 
   private String getUserFromUGI() {



hive git commit: HIVE-18836 : backport HIVE-17600 to hive 1 (Sergey Shelukhin, reviewed by Prasanth Jayachandran)

2018-03-02 Thread sershe
Repository: hive
Updated Branches:
  refs/heads/branch-1 476a8e8c9 -> db72a865c


HIVE-18836 : backport HIVE-17600 to hive 1 (Sergey Shelukhin, reviewed by 
Prasanth Jayachandran)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/db72a865
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/db72a865
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/db72a865

Branch: refs/heads/branch-1
Commit: db72a865c8515aa3153e9973c4dd4d3c89d7967a
Parents: 476a8e8
Author: sergey 
Authored: Fri Mar 2 12:45:51 2018 -0800
Committer: sergey 
Committed: Fri Mar 2 12:46:08 2018 -0800

--
 .../java/org/apache/hadoop/hive/conf/HiveConf.java  |  4 
 .../org/apache/hadoop/hive/ql/io/orc/OrcFile.java   | 10 +-
 .../org/apache/hadoop/hive/ql/io/orc/OutStream.java | 15 +++
 .../apache/hadoop/hive/ql/io/orc/WriterImpl.java|  1 +
 .../apache/hadoop/hive/ql/io/orc/TestOutStream.java | 16 
 5 files changed, 45 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/db72a865/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index e0c8e82..9b44e30 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2174,6 +2174,10 @@ public class HiveConf extends Configuration {
 HIVEOPTLISTBUCKETING("hive.optimize.listbucketing", false,
 "Enable list bucketing optimizer. Default value is false so that we 
disable it by default."),
 
+ORC_ENFORCE_COMPRESSION_BUFFER_SIZE("hive.exec.orc.buffer.size.enforce", 
false,
+  "Defines whether to enforce ORC compression buffer size."),
+
+
 // Allow TCP Keep alive socket option for for HiveServer or a maximum 
timeout for the socket.
 SERVER_READ_SOCKET_TIMEOUT("hive.server.read.socket.timeout", "10s",
 new TimeValidator(TimeUnit.SECONDS),

http://git-wip-us.apache.org/repos/asf/hive/blob/db72a865/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
index dc00e38..c83b8c1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.io.filters.BloomFilterIO;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
@@ -161,7 +162,8 @@ public final class OrcFile {
 BLOCK_PADDING("orc.block.padding"),
 ENCODING_STRATEGY("orc.encoding.strategy"),
 BLOOM_FILTER_COLUMNS("orc.bloom.filter.columns"),
-BLOOM_FILTER_FPP("orc.bloom.filter.fpp");
+BLOOM_FILTER_FPP("orc.bloom.filter.fpp"),
+ENFORCE_BUFFER_SIZE("orc.buffer.size.enforce");
 
 private final String propName;
 
@@ -314,6 +316,12 @@ public final class OrcFile {
   : CompressionKind.valueOf(propValue.toUpperCase());
 
   propValue = tableProperties == null ? null
+  : 
tableProperties.getProperty(OrcTableProperties.ENFORCE_BUFFER_SIZE.propName);
+  enforceBufferSize = propValue == null ? HiveConf.getBoolVar(conf, 
ConfVars.ORC_ENFORCE_COMPRESSION_BUFFER_SIZE)
+  : Boolean.parseBoolean(propValue);
+
+
+  propValue = tableProperties == null ? null
   : 
tableProperties.getProperty(OrcTableProperties.BLOOM_FILTER_COLUMNS.propName);
   bloomFilterColumns = propValue;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/db72a865/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OutStream.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OutStream.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OutStream.java
index e2096eb..32992e9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OutStream.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OutStream.java
@@ -282,5 +282,20 @@ class OutStream extends PositionedOutputStream {
   public boolean isSuppressed() {
 return suppress;
   }
+
+  /**
+   * Throws exception if the bufferSize argument equals or exceeds 2^(3*8 - 1).
+   * See {@link OutStream#writeHeader(ByteBuffer, int, int, boolean)}.
+   * The bufferSize needs to be expressible in 3 bytes, and uses the least 
significant byte
+   

hive git commit: HIVE-18788: Clean up inputs in JDBC PreparedStatement (Daniel Dai, reviewed by Thejas Nair)

2018-03-02 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-2.3 de82776f7 -> 63df42966


HIVE-18788: Clean up inputs in JDBC PreparedStatement (Daniel Dai, reviewed by 
Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/63df4296
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/63df4296
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/63df4296

Branch: refs/heads/branch-2.3
Commit: 63df42966cf44ffdd20d3fcdcfb70738c0432aba
Parents: de82776
Author: Daniel Dai 
Authored: Fri Mar 2 15:36:36 2018 -0800
Committer: Daniel Dai 
Committed: Fri Mar 2 15:36:36 2018 -0800

--
 .../org/apache/hive/jdbc/TestJdbcDriver2.java   | 20 ++
 .../apache/hive/jdbc/HivePreparedStatement.java | 28 +---
 2 files changed, 45 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/63df4296/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index 6e9223a..c2b4ce4 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -45,6 +45,7 @@ import org.junit.rules.ExpectedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.ByteArrayInputStream;
 import java.io.InputStream;
 import java.lang.Exception;
 import java.lang.Object;
@@ -491,6 +492,25 @@ public class TestJdbcDriver2 {
 expectedException);
   }
 
+  @Test
+  public void testPrepareStatementWithSetBinaryStream() throws SQLException {
+PreparedStatement stmt = con.prepareStatement("select under_col from " + 
tableName + " where value=?");
+stmt.setBinaryStream(1, new ByteArrayInputStream("'val_238' or under_col 
<> 0".getBytes()));
+ResultSet res = stmt.executeQuery();
+assertFalse(res.next());
+  }
+
+  @Test
+  public void testPrepareStatementWithSetString() throws SQLException {
+PreparedStatement stmt = con.prepareStatement("select under_col from " + 
tableName + " where value=?");
+stmt.setString(1, "val_238\\' or under_col <> 0 --");
+ResultSet res = stmt.executeQuery();
+assertFalse(res.next());
+stmt.setString(1,  "anyStringHere\\' or 1=1 --");
+res = stmt.executeQuery();
+assertFalse(res.next());
+  }
+
   private PreparedStatement createPreapredStatementUsingSetObject(String sql) 
throws SQLException {
 PreparedStatement ps = con.prepareStatement(sql);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/63df4296/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
--
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
index b842634..a455a6d 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
@@ -276,7 +276,7 @@ public class HivePreparedStatement extends HiveStatement 
implements PreparedStat
 
   public void setBinaryStream(int parameterIndex, InputStream x) throws 
SQLException {
 String str = new Scanner(x, "UTF-8").useDelimiter("\\A").next();
-this.parameters.put(parameterIndex, str);
+setString(parameterIndex, str);
   }
 
   /*
@@ -696,6 +696,27 @@ public class HivePreparedStatement extends HiveStatement 
implements PreparedStat
 this.parameters.put(parameterIndex,""+x);
   }
 
+  private String replaceBackSlashSingleQuote(String x) {
+// scrutinize escape pair, specifically, replace \' to '
+StringBuffer newX = new StringBuffer();
+for (int i = 0; i < x.length(); i++) {
+  char c = x.charAt(i);
+  if (c == '\\' && i < x.length()-1) {
+char c1 = x.charAt(i+1);
+if (c1 == '\'') {
+  newX.append(c1);
+} else {
+  newX.append(c);
+  newX.append(c1);
+}
+i++;
+  } else {
+newX.append(c);
+  }
+}
+return newX.toString();
+  }
+
   /*
* (non-Javadoc)
*
@@ -703,8 +724,9 @@ public class HivePreparedStatement extends HiveStatement 
implements PreparedStat
*/
 
   public void setString(int parameterIndex, String x) throws SQLException {
- x=x.replace("'", "\\'");
- this.parameters.put(parameterIndex,"'"+x+"'");
+x = replaceBackSlashSingleQuote(x);
+x=x.replace("'", "\\'");
+this.parameters.put(parameterIndex, "'"+x+"'");
   }
 
   /*



hive git commit: HIVE-18815: Remove unused feature in HPL/SQL (Daniel Dai, reviewed by Thejas Nair)

2018-03-02 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-2.3 63df42966 -> b25b029e6


HIVE-18815: Remove unused feature in HPL/SQL (Daniel Dai, reviewed by Thejas 
Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/b25b029e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/b25b029e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/b25b029e

Branch: refs/heads/branch-2.3
Commit: b25b029e668273a35b4d6822f41930c2d90883d2
Parents: 63df429
Author: Daniel Dai 
Authored: Fri Mar 2 15:38:28 2018 -0800
Committer: Daniel Dai 
Committed: Fri Mar 2 15:38:28 2018 -0800

--
 .../antlr4/org/apache/hive/hplsql/Hplsql.g4 |  19 -
 .../main/java/org/apache/hive/hplsql/Exec.java  |  10 +-
 .../main/java/org/apache/hive/hplsql/Ftp.java   | 439 ---
 hplsql/src/test/queries/db/copy_from_ftp.sql|   2 -
 .../src/test/results/db/copy_from_ftp.out.txt   |   2 -
 5 files changed, 1 insertion(+), 471 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/b25b029e/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
--
diff --git a/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4 
b/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
index 66c12e7..4a3a405 100644
--- a/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
+++ b/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
@@ -51,7 +51,6 @@ stmt :
  | collect_stats_stmt
  | close_stmt
  | cmp_stmt
- | copy_from_ftp_stmt
  | copy_from_local_stmt
  | copy_stmt
  | commit_stmt
@@ -617,10 +616,6 @@ cmp_source :
   (table_name where_clause? | T_OPEN_P select_stmt T_CLOSE_P) (T_AT ident)?
  ;
  
-copy_from_ftp_stmt :  
-   T_COPY T_FROM T_FTP expr copy_ftp_option*
- ;
-
 copy_from_local_stmt :  // COPY FROM LOCAL statement
T_COPY T_FROM T_LOCAL copy_source (T_COMMA copy_source)* T_TO 
copy_target copy_file_option*
  ;
@@ -650,18 +645,6 @@ copy_file_option :
  | T_OVERWRITE
  ;
  
-copy_ftp_option :
-   T_USER expr
- | T_PWD expr
- | T_DIR (file_name | expr) 
- | T_FILES expr
- | T_NEW
- | T_OVERWRITE
- | T_SUBDIR
- | T_SESSIONS expr
- | T_TO T_LOCAL? (file_name | expr)
- ;
- 
 commit_stmt :   // COMMIT statement
T_COMMIT T_WORK?
  ;
@@ -1340,7 +1323,6 @@ non_reserved_words :  // Tokens that 
are not reserved words
  | T_FORMAT 
  | T_FOUND
  | T_FROM  
- | T_FTP 
  | T_FULL 
  | T_FUNCTION
  | T_GET
@@ -1663,7 +1645,6 @@ T_FOREIGN : F O R E I G N ;
 T_FORMAT  : F O R M A T ;
 T_FOUND   : F O U N D ;
 T_FROM: F R O M ; 
-T_FTP : F T P ;
 T_FULL: F U L L ;
 T_FUNCTION: F U N C T I O N ;
 T_GET : G E T ;

http://git-wip-us.apache.org/repos/asf/hive/blob/b25b029e/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
--
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java 
b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
index 2ad3ea3..4109701 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
@@ -1302,15 +1302,7 @@ public class Exec extends HplsqlBaseVisitor {
   public Integer visitCopy_stmt(HplsqlParser.Copy_stmtContext ctx) { 
 return new Copy(exec).run(ctx); 
   }
-  
-  /**
-   * COPY FROM FTP statement
-   */
-  @Override 
-  public Integer 
visitCopy_from_ftp_stmt(HplsqlParser.Copy_from_ftp_stmtContext ctx) { 
-return new Ftp(exec).run(ctx); 
-  }
-  
+
   /**
* COPY FROM LOCAL statement
*/

http://git-wip-us.apache.org/repos/asf/hive/blob/b25b029e/hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java
--
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java 
b/hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java
deleted file mode 100644
index 2d0dfd6..000
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java
+++ /dev/null
@@ -1,439 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed

[2/2] hive git commit: HIVE-18815: Remove unused feature in HPL/SQL (Daniel Dai, reviewed by Thejas Nair)

2018-03-02 Thread daijy
HIVE-18815: Remove unused feature in HPL/SQL (Daniel Dai, reviewed by Thejas 
Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d3908524
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d3908524
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d3908524

Branch: refs/heads/branch-2
Commit: d3908524d13efe4afb13aeed05b5e7e7ae95ec6d
Parents: c803e96
Author: Daniel Dai 
Authored: Fri Mar 2 15:38:28 2018 -0800
Committer: Daniel Dai 
Committed: Fri Mar 2 16:34:03 2018 -0800

--
 .../antlr4/org/apache/hive/hplsql/Hplsql.g4 |  19 -
 .../main/java/org/apache/hive/hplsql/Exec.java  |  10 +-
 .../main/java/org/apache/hive/hplsql/Ftp.java   | 439 ---
 hplsql/src/test/queries/db/copy_from_ftp.sql|   2 -
 .../src/test/results/db/copy_from_ftp.out.txt   |   2 -
 5 files changed, 1 insertion(+), 471 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d3908524/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
--
diff --git a/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4 
b/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
index 66c12e7..4a3a405 100644
--- a/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
+++ b/hplsql/src/main/antlr4/org/apache/hive/hplsql/Hplsql.g4
@@ -51,7 +51,6 @@ stmt :
  | collect_stats_stmt
  | close_stmt
  | cmp_stmt
- | copy_from_ftp_stmt
  | copy_from_local_stmt
  | copy_stmt
  | commit_stmt
@@ -617,10 +616,6 @@ cmp_source :
   (table_name where_clause? | T_OPEN_P select_stmt T_CLOSE_P) (T_AT ident)?
  ;
  
-copy_from_ftp_stmt :  
-   T_COPY T_FROM T_FTP expr copy_ftp_option*
- ;
-
 copy_from_local_stmt :  // COPY FROM LOCAL statement
T_COPY T_FROM T_LOCAL copy_source (T_COMMA copy_source)* T_TO 
copy_target copy_file_option*
  ;
@@ -650,18 +645,6 @@ copy_file_option :
  | T_OVERWRITE
  ;
  
-copy_ftp_option :
-   T_USER expr
- | T_PWD expr
- | T_DIR (file_name | expr) 
- | T_FILES expr
- | T_NEW
- | T_OVERWRITE
- | T_SUBDIR
- | T_SESSIONS expr
- | T_TO T_LOCAL? (file_name | expr)
- ;
- 
 commit_stmt :   // COMMIT statement
T_COMMIT T_WORK?
  ;
@@ -1340,7 +1323,6 @@ non_reserved_words :  // Tokens that 
are not reserved words
  | T_FORMAT 
  | T_FOUND
  | T_FROM  
- | T_FTP 
  | T_FULL 
  | T_FUNCTION
  | T_GET
@@ -1663,7 +1645,6 @@ T_FOREIGN : F O R E I G N ;
 T_FORMAT  : F O R M A T ;
 T_FOUND   : F O U N D ;
 T_FROM: F R O M ; 
-T_FTP : F T P ;
 T_FULL: F U L L ;
 T_FUNCTION: F U N C T I O N ;
 T_GET : G E T ;

http://git-wip-us.apache.org/repos/asf/hive/blob/d3908524/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
--
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java 
b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
index 2ad3ea3..4109701 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Exec.java
@@ -1302,15 +1302,7 @@ public class Exec extends HplsqlBaseVisitor {
   public Integer visitCopy_stmt(HplsqlParser.Copy_stmtContext ctx) { 
 return new Copy(exec).run(ctx); 
   }
-  
-  /**
-   * COPY FROM FTP statement
-   */
-  @Override 
-  public Integer 
visitCopy_from_ftp_stmt(HplsqlParser.Copy_from_ftp_stmtContext ctx) { 
-return new Ftp(exec).run(ctx); 
-  }
-  
+
   /**
* COPY FROM LOCAL statement
*/

http://git-wip-us.apache.org/repos/asf/hive/blob/d3908524/hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java
--
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java 
b/hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java
deleted file mode 100644
index 2d0dfd6..000
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Ftp.java
+++ /dev/null
@@ -1,439 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDI

[1/2] hive git commit: HIVE-18788: Clean up inputs in JDBC PreparedStatement (Daniel Dai, reviewed by Thejas Nair)

2018-03-02 Thread daijy
Repository: hive
Updated Branches:
  refs/heads/branch-2 2bcab1467 -> d3908524d


HIVE-18788: Clean up inputs in JDBC PreparedStatement (Daniel Dai, reviewed by 
Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c803e962
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c803e962
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c803e962

Branch: refs/heads/branch-2
Commit: c803e962104d9c0e80f61e9c5afc236113e4987e
Parents: 2bcab14
Author: Daniel Dai 
Authored: Fri Mar 2 15:36:36 2018 -0800
Committer: Daniel Dai 
Committed: Fri Mar 2 16:33:55 2018 -0800

--
 .../org/apache/hive/jdbc/TestJdbcDriver2.java   | 20 ++
 .../apache/hive/jdbc/HivePreparedStatement.java | 28 +---
 2 files changed, 45 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c803e962/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java 
b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
index 7223fcb..6572931 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
@@ -45,6 +45,7 @@ import org.junit.rules.ExpectedException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.ByteArrayInputStream;
 import java.io.InputStream;
 import java.lang.Exception;
 import java.lang.Object;
@@ -491,6 +492,25 @@ public class TestJdbcDriver2 {
 expectedException);
   }
 
+  @Test
+  public void testPrepareStatementWithSetBinaryStream() throws SQLException {
+PreparedStatement stmt = con.prepareStatement("select under_col from " + 
tableName + " where value=?");
+stmt.setBinaryStream(1, new ByteArrayInputStream("'val_238' or under_col 
<> 0".getBytes()));
+ResultSet res = stmt.executeQuery();
+assertFalse(res.next());
+  }
+
+  @Test
+  public void testPrepareStatementWithSetString() throws SQLException {
+PreparedStatement stmt = con.prepareStatement("select under_col from " + 
tableName + " where value=?");
+stmt.setString(1, "val_238\\' or under_col <> 0 --");
+ResultSet res = stmt.executeQuery();
+assertFalse(res.next());
+stmt.setString(1,  "anyStringHere\\' or 1=1 --");
+res = stmt.executeQuery();
+assertFalse(res.next());
+  }
+
   private PreparedStatement createPreapredStatementUsingSetObject(String sql) 
throws SQLException {
 PreparedStatement ps = con.prepareStatement(sql);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c803e962/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
--
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java 
b/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
index b842634..a455a6d 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HivePreparedStatement.java
@@ -276,7 +276,7 @@ public class HivePreparedStatement extends HiveStatement 
implements PreparedStat
 
   public void setBinaryStream(int parameterIndex, InputStream x) throws 
SQLException {
 String str = new Scanner(x, "UTF-8").useDelimiter("\\A").next();
-this.parameters.put(parameterIndex, str);
+setString(parameterIndex, str);
   }
 
   /*
@@ -696,6 +696,27 @@ public class HivePreparedStatement extends HiveStatement 
implements PreparedStat
 this.parameters.put(parameterIndex,""+x);
   }
 
+  private String replaceBackSlashSingleQuote(String x) {
+// scrutinize escape pair, specifically, replace \' to '
+StringBuffer newX = new StringBuffer();
+for (int i = 0; i < x.length(); i++) {
+  char c = x.charAt(i);
+  if (c == '\\' && i < x.length()-1) {
+char c1 = x.charAt(i+1);
+if (c1 == '\'') {
+  newX.append(c1);
+} else {
+  newX.append(c);
+  newX.append(c1);
+}
+i++;
+  } else {
+newX.append(c);
+  }
+}
+return newX.toString();
+  }
+
   /*
* (non-Javadoc)
*
@@ -703,8 +724,9 @@ public class HivePreparedStatement extends HiveStatement 
implements PreparedStat
*/
 
   public void setString(int parameterIndex, String x) throws SQLException {
- x=x.replace("'", "\\'");
- this.parameters.put(parameterIndex,"'"+x+"'");
+x = replaceBackSlashSingleQuote(x);
+x=x.replace("'", "\\'");
+this.parameters.put(parameterIndex, "'"+x+"'");
   }
 
   /*



[1/2] hive git commit: HIVE-18738 : LLAP IO ACID - includes handling is broken (Sergey Shelukhin, reviewed by Teddy Choi)

2018-03-02 Thread sershe
Repository: hive
Updated Branches:
  refs/heads/master a4198f584 -> 1a3090f85


http://git-wip-us.apache.org/repos/asf/hive/blob/1a3090f8/ql/src/test/results/clientpositive/llap/llap_acid2.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/llap_acid2.q.out 
b/ql/src/test/results/clientpositive/llap/llap_acid2.q.out
new file mode 100644
index 000..d2b8b45
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/llap_acid2.q.out
@@ -0,0 +1,392 @@
+PREHOOK: query: DROP TABLE orc_llap
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE orc_llap
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: CREATE TABLE orc_llap (
+cint INT,
+cbigint BIGINT,
+cfloat FLOAT,
+cdouble DOUBLE,
+cint0 INT,
+cbigint0 BIGINT,
+cfloat0 FLOAT,
+cdouble0 DOUBLE,
+cint1 INT,
+cbigint1 BIGINT,
+cfloat1 FLOAT,
+cdouble1 DOUBLE,
+cstring1 string,
+cfloat2 float
+)  stored as orc TBLPROPERTIES ('transactional'='true')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_llap
+POSTHOOK: query: CREATE TABLE orc_llap (
+cint INT,
+cbigint BIGINT,
+cfloat FLOAT,
+cdouble DOUBLE,
+cint0 INT,
+cbigint0 BIGINT,
+cfloat0 FLOAT,
+cdouble0 DOUBLE,
+cint1 INT,
+cbigint1 BIGINT,
+cfloat1 FLOAT,
+cdouble1 DOUBLE,
+cstring1 string,
+cfloat2 float
+)  stored as orc TBLPROPERTIES ('transactional'='true')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@orc_llap
+PREHOOK: query: insert into table orc_llap
+select cint, cbigint, cfloat, cdouble,
+ cint as c1, cbigint as c2, cfloat as c3, cdouble as c4,
+ cint as c8, cbigint as c7, cfloat as c6, cdouble as c5,
+ cstring1, cfloat as c9 from alltypesorc order by cdouble asc  limit 30
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+PREHOOK: Output: default@orc_llap
+POSTHOOK: query: insert into table orc_llap
+select cint, cbigint, cfloat, cdouble,
+ cint as c1, cbigint as c2, cfloat as c3, cdouble as c4,
+ cint as c8, cbigint as c7, cfloat as c6, cdouble as c5,
+ cstring1, cfloat as c9 from alltypesorc order by cdouble asc  limit 30
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+POSTHOOK: Output: default@orc_llap
+POSTHOOK: Lineage: orc_llap.cbigint SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), 
]
+POSTHOOK: Lineage: orc_llap.cbigint0 SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), 
]
+POSTHOOK: Lineage: orc_llap.cbigint1 SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cbigint, type:bigint, comment:null), 
]
+POSTHOOK: Lineage: orc_llap.cdouble SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), 
]
+POSTHOOK: Lineage: orc_llap.cdouble0 SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), 
]
+POSTHOOK: Lineage: orc_llap.cdouble1 SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cdouble, type:double, comment:null), 
]
+POSTHOOK: Lineage: orc_llap.cfloat SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ]
+POSTHOOK: Lineage: orc_llap.cfloat0 SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ]
+POSTHOOK: Lineage: orc_llap.cfloat1 SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ]
+POSTHOOK: Lineage: orc_llap.cfloat2 SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cfloat, type:float, comment:null), ]
+POSTHOOK: Lineage: orc_llap.cint SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ]
+POSTHOOK: Lineage: orc_llap.cint0 SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ]
+POSTHOOK: Lineage: orc_llap.cint1 SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cint, type:int, comment:null), ]
+POSTHOOK: Lineage: orc_llap.cstring1 SIMPLE 
[(alltypesorc)alltypesorc.FieldSchema(name:cstring1, type:string, 
comment:null), ]
+PREHOOK: query: CREATE TABLE orc_llap2 (
+cint INT,
+cbigint BIGINT,
+cfloat FLOAT,
+cdouble DOUBLE,
+cint0 INT,
+cbigint0 BIGINT,
+cfloat0 FLOAT,
+cdouble0 DOUBLE,
+cint1 INT,
+cbigint1 BIGINT,
+cfloat1 FLOAT,
+cdouble1 DOUBLE,
+cstring1 string,
+cfloat2 float
+)  stored as orc TBLPROPERTIES ('transactional'='false')
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@orc_llap2
+POSTHOOK: query: CREATE TABLE orc_llap2 (
+cint INT,
+cbigint BIGINT,
+cfloat FLOAT,
+cdouble DOUBLE,
+cint0 INT,
+cbigint0 BIGINT,
+cfloat0 FLOAT,
+cdouble0 DOUBLE,
+cint1 INT,
+cbigint1 BIGINT,
+cfloat1 FLOAT,
+cdouble1 DOUBLE,
+cstring1 string,
+cfloat2 float
+)  stored as orc TBLPROPERTIES ('transactional'='false')
+POSTHOOK: type: CREATETABLE
+POSTHOO

[2/2] hive git commit: HIVE-18738 : LLAP IO ACID - includes handling is broken (Sergey Shelukhin, reviewed by Teddy Choi)

2018-03-02 Thread sershe
HIVE-18738 : LLAP IO ACID - includes handling is broken (Sergey Shelukhin, 
reviewed by Teddy Choi)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1a3090f8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1a3090f8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1a3090f8

Branch: refs/heads/master
Commit: 1a3090f858ce3ac18d1eb799d87a45b3ee0defce
Parents: a4198f5
Author: sergey 
Authored: Fri Mar 2 18:49:59 2018 -0800
Committer: sergey 
Committed: Fri Mar 2 18:49:59 2018 -0800

--
 .../test/resources/testconfiguration.properties |   1 +
 .../llap/io/api/impl/ColumnVectorBatch.java |  42 ++
 .../hive/llap/io/api/impl/LlapInputFormat.java  |   9 +-
 .../hive/llap/io/api/impl/LlapRecordReader.java | 286 +-
 .../llap/io/decode/ColumnVectorProducer.java|  19 +-
 .../io/decode/GenericColumnVectorProducer.java  |  17 +-
 .../llap/io/decode/OrcColumnVectorProducer.java |  17 +-
 .../llap/io/decode/OrcEncodedDataConsumer.java  |  55 +--
 .../hive/llap/io/decode/ReadPipeline.java   |   1 -
 .../llap/io/encoded/OrcEncodedDataReader.java   |  60 +--
 .../llap/io/encoded/SerDeEncodedDataReader.java |  18 +-
 .../hive/ql/exec/vector/VectorExtractRow.java   |   6 +-
 .../ql/exec/vector/VectorSelectOperator.java|   1 +
 .../hadoop/hive/ql/io/orc/OrcInputFormat.java   |  53 ++-
 .../io/orc/VectorizedOrcAcidRowBatchReader.java | 191 -
 .../hive/ql/io/orc/encoded/EncodedReader.java   |   4 +-
 .../ql/io/orc/encoded/EncodedReaderImpl.java|  32 +-
 .../orc/encoded/EncodedTreeReaderFactory.java   |  44 +--
 .../hadoop/hive/ql/io/orc/encoded/Reader.java   |  16 +-
 ql/src/test/queries/clientpositive/llap_acid2.q |  84 
 .../clientpositive/llap/llap_acid2.q.out| 392 +++
 .../common/io/encoded/EncodedColumnBatch.java   |   9 +-
 22 files changed, 1013 insertions(+), 344 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1a3090f8/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index 2776fe9..544c836 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -576,6 +576,7 @@ minillaplocal.query.files=\
   lineage2.q,\
   lineage3.q,\
   list_bucket_dml_10.q,\
+  llap_acid2.q,\
   llap_partitioned.q,\
   llap_vector_nohybridgrace.q,\
   load_data_acid_rename.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/1a3090f8/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/ColumnVectorBatch.java
--
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/ColumnVectorBatch.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/ColumnVectorBatch.java
index 9262bf0..19b0b55 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/ColumnVectorBatch.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/ColumnVectorBatch.java
@@ -43,4 +43,46 @@ public class ColumnVectorBatch {
 other[otherIx] = cols[ix];
 cols[ix] = old;
   }
+
+  
+  @Override
+  public String toString() {
+if (size == 0) {
+  return "";
+}
+StringBuilder b = new StringBuilder();
+b.append("Column vector types: ");
+for (int k = 0; k < cols.length; k++) {
+  ColumnVector cv = cols[k];
+  b.append(k);
+  b.append(":");
+  b.append(cv == null ? "null" : 
cv.getClass().getSimpleName().replace("ColumnVector", ""));
+}
+b.append('\n');
+
+
+for (int i = 0; i < size; i++) {
+  b.append('[');
+  for (int k = 0; k < cols.length; k++) {
+ColumnVector cv = cols[k];
+if (k > 0) {
+  b.append(", ");
+}
+if (cv == null) continue;
+if (cv != null) {
+  try {
+cv.stringifyValue(b, i);
+  } catch (Exception ex) {
+b.append("invalid");
+  }
+}
+  }
+  b.append(']');
+  if (i < size - 1) {
+b.append('\n');
+  }
+}
+
+return b.toString();
+  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/1a3090f8/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
--
diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
index bb319f0..6d29163 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/io/api/impl/LlapInputFormat.java
++