hive git commit: HIVE-17112: Reduce logging in HiveSparkClientFactory and RemoteHiveSparkClient (Sahil Takiar, reviewed by Peter Vary)

2017-09-20 Thread stakiar
Repository: hive
Updated Branches:
  refs/heads/master c29ecc664 -> d72121f0c


HIVE-17112: Reduce logging in HiveSparkClientFactory and RemoteHiveSparkClient 
(Sahil Takiar, reviewed by Peter Vary)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d72121f0
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d72121f0
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d72121f0

Branch: refs/heads/master
Commit: d72121f0c2afdd0bbc68c0f7a9b5e89560a3a4aa
Parents: c29ecc6
Author: Sahil Takiar 
Authored: Wed Sep 20 21:18:25 2017 -0700
Committer: Sahil Takiar 
Committed: Wed Sep 20 21:18:25 2017 -0700

--
 .../hive/ql/exec/spark/HiveSparkClientFactory.java  | 16 
 .../hive/ql/exec/spark/RemoteHiveSparkClient.java   |  6 +++---
 2 files changed, 11 insertions(+), 11 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d72121f0/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
index 6e9ba7c..194585e 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/HiveSparkClientFactory.java
@@ -95,21 +95,21 @@ public class HiveSparkClientFactory {
   inputStream = HiveSparkClientFactory.class.getClassLoader()
 .getResourceAsStream(SPARK_DEFAULT_CONF_FILE);
   if (inputStream != null) {
-LOG.info("loading spark properties from:" + SPARK_DEFAULT_CONF_FILE);
+LOG.info("loading spark properties from: " + SPARK_DEFAULT_CONF_FILE);
 Properties properties = new Properties();
 properties.load(new InputStreamReader(inputStream, 
CharsetNames.UTF_8));
 for (String propertyName : properties.stringPropertyNames()) {
   if (propertyName.startsWith("spark")) {
 String value = properties.getProperty(propertyName);
 sparkConf.put(propertyName, properties.getProperty(propertyName));
-LOG.info(String.format(
+LOG.debug(String.format(
   "load spark property from %s (%s -> %s).",
   SPARK_DEFAULT_CONF_FILE, propertyName, 
LogUtils.maskIfPassword(propertyName,value)));
   }
 }
   }
 } catch (IOException e) {
-  LOG.info("Failed to open spark configuration file:"
+  LOG.info("Failed to open spark configuration file: "
 + SPARK_DEFAULT_CONF_FILE, e);
 } finally {
   if (inputStream != null) {
@@ -156,7 +156,7 @@ public class HiveSparkClientFactory {
   if (propertyName.startsWith("spark")) {
 String value = hiveConf.get(propertyName);
 sparkConf.put(propertyName, value);
-LOG.info(String.format(
+LOG.debug(String.format(
   "load spark property from hive configuration (%s -> %s).",
   propertyName, LogUtils.maskIfPassword(propertyName,value)));
   } else if (propertyName.startsWith("yarn") &&
@@ -166,7 +166,7 @@ public class HiveSparkClientFactory {
 // started with spark prefix, Spark would remove spark.hadoop prefix 
lately and add
 // it to its hadoop configuration.
 sparkConf.put("spark.hadoop." + propertyName, value);
-LOG.info(String.format(
+LOG.debug(String.format(
   "load yarn property from hive configuration in %s mode (%s -> %s).",
   sparkMaster, propertyName, 
LogUtils.maskIfPassword(propertyName,value)));
   } else if 
(propertyName.equals(CommonConfigurationKeysPublic.FS_DEFAULT_NAME_KEY)) {
@@ -180,19 +180,19 @@ public class HiveSparkClientFactory {
 // Spark problem.
 String value = hiveConf.get(propertyName);
 sparkConf.put("spark.hadoop." + propertyName, value);
-LOG.info(String.format(
+LOG.debug(String.format(
   "load HBase configuration (%s -> %s).", propertyName, 
LogUtils.maskIfPassword(propertyName,value)));
   } else if (propertyName.startsWith("oozie")) {
 String value = hiveConf.get(propertyName);
 sparkConf.put("spark." + propertyName, value);
-LOG.info(String.format(
+LOG.debug(String.format(
   "Pass Oozie configuration (%s -> %s).", propertyName, 
LogUtils.maskIfPassword(propertyName,value)));
   }
 
   if (RpcConfiguration.HIVE_SPARK_RSC_CONFIGS.contains(propertyName)) {
 String value = RpcConfiguration.getValue(hiveConf, propertyName);
 sparkConf.put(propertyName, value);
-LOG.info(String.format(
+LOG.debug(String.format(
   "load RPC 

hive git commit: HIVE-17542: Make HoS CombineEquivalentWorkResolver Configurable (Sahil Takiar, reviewed by Rui Li, Peter Vary)

2017-09-20 Thread stakiar
Repository: hive
Updated Branches:
  refs/heads/master 8b285947d -> c29ecc664


HIVE-17542: Make HoS CombineEquivalentWorkResolver Configurable (Sahil Takiar, 
reviewed by Rui Li, Peter Vary)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c29ecc66
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c29ecc66
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c29ecc66

Branch: refs/heads/master
Commit: c29ecc664e06ab6735f2798a3f8f27dd78f81120
Parents: 8b28594
Author: Sahil Takiar 
Authored: Wed Sep 20 21:14:39 2017 -0700
Committer: Sahil Takiar 
Committed: Wed Sep 20 21:15:38 2017 -0700

--
 .../org/apache/hadoop/hive/conf/HiveConf.java   |   5 +-
 .../test/resources/testconfiguration.properties |   3 +-
 .../hive/ql/parse/spark/SparkCompiler.java  |   6 +-
 .../spark_combine_equivalent_work_2.q   |  41 
 .../spark/spark_combine_equivalent_work_2.q.out | 232 +++
 5 files changed, 284 insertions(+), 3 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/c29ecc66/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
--
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 8a906ce..1f9a468 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -1656,7 +1656,10 @@ public class HiveConf extends Configuration {
 
 HIVE_SHARED_WORK_OPTIMIZATION("hive.optimize.shared.work", true,
 "Whether to enable shared work optimizer. The optimizer finds scan 
operator over the same table\n" +
-"and follow-up operators in the query plan and merges them if they 
meet some preconditions."),
+"and follow-up operators in the query plan and merges them if they 
meet some preconditions. Tez only."),
+
HIVE_COMBINE_EQUIVALENT_WORK_OPTIMIZATION("hive.combine.equivalent.work.optimization",
 true, "Whether to " +
+"combine equivalent work objects during physical optimization.\n 
This optimization looks for equivalent " +
+"work objects and combines them if they meet certain 
preconditions. Spark only."),
 HIVE_REMOVE_SQ_COUNT_CHECK("hive.optimize.remove.sq_count_check", false,
 "Whether to remove an extra join with sq_count_check for scalar 
subqueries "
 + "with constant group by keys."),

http://git-wip-us.apache.org/repos/asf/hive/blob/c29ecc66/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index efa690d..8f52321 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -1401,7 +1401,8 @@ spark.query.files=add_part_multiple.q, \
 
 # Unlike "spark.query.files" above, these tests only run
 # under Spark engine and only use TestSparkCliDriver.
-spark.only.query.files=spark_union_merge.q
+spark.only.query.files=spark_union_merge.q,\
+  spark_combine_equivalent_work_2.q
 
 # Unlike "miniSparkOnYarn.query.files" below, these tests only run
 # under Spark engine and only use TestMiniSparkOnYarnCliDriver.

http://git-wip-us.apache.org/repos/asf/hive/blob/c29ecc66/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
--
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
index 8144350..965044d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/spark/SparkCompiler.java
@@ -599,7 +599,11 @@ public class SparkCompiler extends TaskCompiler {
   LOG.debug("Skipping stage id rearranger");
 }
 
-new CombineEquivalentWorkResolver().resolve(physicalCtx);
+if 
(conf.getBoolVar(HiveConf.ConfVars.HIVE_COMBINE_EQUIVALENT_WORK_OPTIMIZATION)) {
+  new CombineEquivalentWorkResolver().resolve(physicalCtx);
+} else {
+  LOG.debug("Skipping combine equivalent work optimization");
+}
 
 if (physicalCtx.getContext().getExplainAnalyze() != null) {
   new AnnotateRunTimeStatsOptimizer().resolve(physicalCtx);

http://git-wip-us.apache.org/repos/asf/hive/blob/c29ecc66/ql/src/test/queries/clientpositive/spark_combine_equivalent_work_2.q
--
diff --git 

hive git commit: HIVE-17554: ArithmeticException: / by zero at hplsql component (ZhangBing Lin reviewed by Rui)

2017-09-20 Thread lirui
Repository: hive
Updated Branches:
  refs/heads/master a75b600c9 -> 8b285947d


HIVE-17554: ArithmeticException: / by zero at hplsql component (ZhangBing Lin 
reviewed by Rui)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8b285947
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8b285947
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8b285947

Branch: refs/heads/master
Commit: 8b285947d7aa8e87e63288dc6fe8ff13d6aa985f
Parents: a75b600
Author: ZhangBing Lin 
Authored: Thu Sep 21 10:47:20 2017 +0800
Committer: Rui Li 
Committed: Thu Sep 21 10:47:20 2017 +0800

--
 hplsql/src/main/java/org/apache/hive/hplsql/Copy.java | 6 +-
 1 file changed, 5 insertions(+), 1 deletion(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/8b285947/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
--
diff --git a/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java 
b/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
index 67af0a9..1879f99 100644
--- a/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
+++ b/hplsql/src/main/java/org/apache/hive/hplsql/Copy.java
@@ -28,6 +28,8 @@ import java.util.List;
 import java.io.FileOutputStream;
 import java.io.OutputStream;
 import java.io.IOException;
+import java.math.RoundingMode;
+import java.text.DecimalFormat;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -167,7 +169,9 @@ public class Copy {
 exec.setRowCount(rows);
 long elapsed = timer.stop();
 if (info) {
-  info(ctx, "COPY completed: " + rows + " row(s), " + timer.format() + ", 
" + rows/(elapsed/1000) + " rows/sec");
+  DecimalFormat df = new DecimalFormat("#,##0.00");
+  df.setRoundingMode(RoundingMode.HALF_UP);
+  info(ctx, "COPY completed: " + rows + " row(s), " + timer.format() + ", 
" + df.format(rows/(elapsed/1000.0)) + " rows/sec");
 }
   }
   



hive git commit: HIVE-17512 : Not use doAs if distcp privileged user same as user running hive (Anishek Agarwal, via Thejas Nair)

2017-09-20 Thread thejas
Repository: hive
Updated Branches:
  refs/heads/master 583a9511b -> a75b600c9


HIVE-17512 : Not use doAs if distcp privileged user same as user running hive 
(Anishek Agarwal, via Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/a75b600c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/a75b600c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/a75b600c

Branch: refs/heads/master
Commit: a75b600c9e54c1b63ac64eab520805e268b838cb
Parents: 583a951
Author: Anishek Agarwal 
Authored: Wed Sep 20 15:37:29 2017 -0700
Committer: Thejas M Nair 
Committed: Wed Sep 20 15:38:03 2017 -0700

--
 itests/hive-unit/pom.xml|  8 +-
 .../hadoop/hive/ql/parse/TestExportImport.java  | 10 ++-
 .../hadoop/hive/ql/parse/WarehouseInstance.java | 93 
 .../hadoop/hive/metastore/ObjectStore.java  |  3 +-
 .../hadoop/hive/ql/parse/repl/CopyUtils.java| 11 ++-
 .../ql/parse/repl/dump/io/FileOperations.java   |  5 +-
 6 files changed, 86 insertions(+), 44 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/a75b600c/itests/hive-unit/pom.xml
--
diff --git a/itests/hive-unit/pom.xml b/itests/hive-unit/pom.xml
index bf600c2..eeb6e58 100644
--- a/itests/hive-unit/pom.xml
+++ b/itests/hive-unit/pom.xml
@@ -135,7 +135,13 @@
   ${project.version}
   test
 
-
+  
+  org.apache.hadoop
+  hadoop-distcp
+  ${hadoop.version}
+  test
+  
+  
   org.apache.hive
   hive-cli
   ${project.version}

http://git-wip-us.apache.org/repos/asf/hive/blob/a75b600c/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java
index 1f19dfd..70a57f8 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.parse;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hive.conf.HiveConf;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
@@ -30,6 +31,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
+import java.util.HashMap;
 
 public class TestExportImport {
 
@@ -48,8 +50,12 @@ public class TestExportImport {
 conf.set("dfs.client.use.datanode.hostname", "true");
 MiniDFSCluster miniDFSCluster =
 new MiniDFSCluster.Builder(conf).numDataNodes(1).format(true).build();
-srcHiveWarehouse = new WarehouseInstance(LOG, miniDFSCluster, false);
-destHiveWarehouse = new WarehouseInstance(LOG, miniDFSCluster, false);
+HashMap overridesForHiveConf = new HashMap() {{
+  put(HiveConf.ConfVars.HIVE_IN_TEST.varname, "false");
+}};
+srcHiveWarehouse =
+new WarehouseInstance(LOG, miniDFSCluster, overridesForHiveConf);
+destHiveWarehouse = new WarehouseInstance(LOG, miniDFSCluster, 
overridesForHiveConf);
   }
 
   @AfterClass

http://git-wip-us.apache.org/repos/asf/hive/blob/a75b600c/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
index c084d4d..19ad442 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/WarehouseInstance.java
@@ -46,10 +46,13 @@ import java.io.IOException;
 import java.net.URI;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.stream.Collectors;
 
 import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 class WarehouseInstance implements Closeable {
@@ -64,7 +67,8 @@ class WarehouseInstance implements Closeable {
 
   private final static String LISTENER_CLASS = 
DbNotificationListener.class.getCanonicalName();
 
-  WarehouseInstance(Logger logger, MiniDFSCluster cluster, boolean 
hiveInTests) throws Exception {
+  

hive git commit: HIVE-17535 Select 1 EXCEPT Select 1 fails with NPE (Vineet Garg, reviewed by Ashutosh Chauhan)

2017-09-20 Thread vgarg
Repository: hive
Updated Branches:
  refs/heads/master c5b3ccc41 -> 583a9511b


HIVE-17535 Select 1 EXCEPT Select 1 fails with NPE (Vineet Garg,reviewed by 
Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/583a9511
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/583a9511
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/583a9511

Branch: refs/heads/master
Commit: 583a9511ba8809d81595a5fa4da32ed2c2f8912e
Parents: c5b3ccc
Author: Vineet Garg 
Authored: Wed Sep 20 14:09:32 2017 -0700
Committer: Vineet Garg 
Committed: Wed Sep 20 14:09:32 2017 -0700

--
 .../test/resources/testconfiguration.properties |  1 +
 .../hadoop/hive/ql/parse/CalcitePlanner.java| 19 ++---
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  8 ++-
 .../clientnegative/subquery_missing_from.q  |  1 -
 .../clientnegative/subquery_select_no_source.q  |  2 --
 .../clientnegative/subquery_missing_from.q.out  |  3 ---
 .../subquery_select_no_source.q.out |  1 -
 .../clientpositive/beeline/mapjoin2.q.out   |  4 
 .../beeline/select_dummy_source.q.out   | 22 +---
 .../clientpositive/decimal_precision2.q.out | 10 -
 .../clientpositive/llap/explainuser_1.q.out | 14 +++--
 .../results/clientpositive/llap/mapjoin2.q.out  |  4 
 .../llap/select_dummy_source.q.out  | 10 +++--
 .../test/results/clientpositive/mapjoin2.q.out  |  4 
 .../clientpositive/select_dummy_source.q.out| 22 +---
 .../results/clientpositive/timestamptz_1.q.out  |  2 +-
 .../results/clientpositive/udtf_stack.q.out | 18 +---
 .../vector_tablesample_rows.q.out   |  2 +-
 18 files changed, 90 insertions(+), 57 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/itests/src/test/resources/testconfiguration.properties
--
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index d472bb3..efa690d 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -30,6 +30,7 @@ disabled.query.files=ql_rewrite_gbtoidx.q,\
   cbo_rp_subq_not_in.q,\
   cbo_rp_subq_exists.q,\
   orc_llap.q,\
+  min_structvalue.q,\
   ql_rewrite_gbtoidx_cbo_2.q,\
   rcfile_merge1.q,\
   smb_mapjoin_8.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
index 2645fab..28953b9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
@@ -709,7 +709,7 @@ public class CalcitePlanner extends SemanticAnalyzer {
 boolean isSupportedRoot = root == HiveParser.TOK_QUERY || root == 
HiveParser.TOK_EXPLAIN
 || qb.isCTAS() || qb.isMaterializedView();
 // Queries without a source table currently are not supported by CBO
-boolean isSupportedType = (qb.getIsQuery() && 
!qb.containsQueryWithoutSourceTable())
+boolean isSupportedType = (qb.getIsQuery())
 || qb.isCTAS() || qb.isMaterializedView() || cboCtx.type == 
PreCboCtx.Type.INSERT
 || cboCtx.type == PreCboCtx.Type.MULTI_INSERT;
 boolean noBadTokens = HiveCalciteUtil.validateASTForUnsupportedTokens(ast);
@@ -4164,18 +4164,11 @@ public class CalcitePlanner extends SemanticAnalyzer {
 
   if (aliasToRel.isEmpty()) {
 // // This may happen for queries like select 1; (no source table)
-// We can do following which is same, as what Hive does.
-// With this, we will be able to generate Calcite plan.
-// qb.getMetaData().setSrcForAlias(DUMMY_TABLE, getDummyTable());
-// RelNode op = genTableLogicalPlan(DUMMY_TABLE, qb);
-// qb.addAlias(DUMMY_TABLE);
-// qb.setTabAlias(DUMMY_TABLE, DUMMY_TABLE);
-// aliasToRel.put(DUMMY_TABLE, op);
-// However, Hive trips later while trying to get Metadata for this 
dummy
-// table
-// So, for now lets just disable this. Anyway there is nothing much to
-// optimize in such cases.
-throw new CalciteSemanticException("Unsupported", 
UnsupportedFeature.Others);
+qb.getMetaData().setSrcForAlias(DUMMY_TABLE, getDummyTable());
+qb.addAlias(DUMMY_TABLE);
+qb.setTabAlias(DUMMY_TABLE, DUMMY_TABLE);
+RelNode op = genTableLogicalPlan(DUMMY_TABLE, qb);
+

[04/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/spark/ppd_join3.q.out
--
diff --git a/ql/src/test/results/clientpositive/spark/ppd_join3.q.out 
b/ql/src/test/results/clientpositive/spark/ppd_join3.q.out
index 6706153..91b323d 100644
--- a/ql/src/test/results/clientpositive/spark/ppd_join3.q.out
+++ b/ql/src/test/results/clientpositive/spark/ppd_join3.q.out
@@ -39,7 +39,7 @@ STAGE PLANS:
   alias: src
   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
   Filter Operator
-predicate: ((key <> '13') and (key <> '11') and (key < 
'400') and (key <> '12') and (key <> '1') and (key > '0') and (key <> '4')) 
(type: boolean)
+predicate: ((key < '400') and (key <> '1') and (key <> 
'11') and (key <> '12') and (key <> '13') and (key <> '4') and (key > '0')) 
(type: boolean)
 Statistics: Num rows: 55 Data size: 584 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: key (type: string)
@@ -56,7 +56,7 @@ STAGE PLANS:
   alias: src
   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
   Filter Operator
-predicate: ((key <> '11') and (key < '400') and (key <> 
'12') and (key <> '13') and (key > '0') and ((value <> 'val_500') or (key > 
'1')) and (key <> '4') and (key <> '1')) (type: boolean)
+predicate: (((value <> 'val_500') or (key > '1')) and (key 
< '400') and (key <> '1') and (key <> '11') and (key <> '12') and (key <> '13') 
and (key <> '4') and (key > '0')) (type: boolean)
 Statistics: Num rows: 55 Data size: 584 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: key (type: string)
@@ -73,7 +73,7 @@ STAGE PLANS:
   alias: src
   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
   Filter Operator
-predicate: ((key <> '12') and (key <> '11') and (key < 
'400') and (key <> '13') and (key <> '4') and (key > '0') and (key <> '1')) 
(type: boolean)
+predicate: ((key < '400') and (key <> '1') and (key <> 
'11') and (key <> '12') and (key <> '13') and (key <> '4') and (key > '0')) 
(type: boolean)
 Statistics: Num rows: 55 Data size: 584 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: key (type: string), value (type: string)
@@ -98,7 +98,7 @@ STAGE PLANS:
 outputColumnNames: _col1, _col2, _col3
 Statistics: Num rows: 121 Data size: 1284 Basic stats: 
COMPLETE Column stats: NONE
 Filter Operator
-  predicate: ((_col2 > '10') or (_col1 <> '10')) (type: 
boolean)
+  predicate: ((_col1 <> '10') or (_col2 > '10')) (type: 
boolean)
   Statistics: Num rows: 121 Data size: 1284 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
 expressions: _col1 (type: string), _col3 (type: string)
@@ -1760,7 +1760,7 @@ STAGE PLANS:
   alias: src
   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
   Filter Operator
-predicate: ((key <> '13') and (key <> '11') and (key < 
'400') and (key <> '12') and (key <> '1') and (key > '0') and (key <> '4')) 
(type: boolean)
+predicate: ((key < '400') and (key <> '1') and (key <> 
'11') and (key <> '12') and (key <> '13') and (key <> '4') and (key > '0')) 
(type: boolean)
 Statistics: Num rows: 55 Data size: 584 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: key (type: string)
@@ -1777,7 +1777,7 @@ STAGE PLANS:
   alias: src
   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
   Filter Operator
-predicate: ((key <> '11') and (key < '400') and (key <> 
'12') and (key <> '13') and (key > '0') and ((value <> 'val_500') or (key > 
'1')) and (key <> '4') and (key <> '1')) (type: boolean)
+predicate: (((value <> 'val_500') or (key > '1')) and (key 
< '400') and (key <> '1') and (key <> '11') and (key <> '12') and (key <> '13') 
and (key <> '4') and (key > '0')) (type: boolean)
 Statistics: Num rows: 55 Data size: 584 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: key (type: string)
@@ -1794,7 +1794,7 @@ STAGE PLANS:
   

[14/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/llap/bucketpruning1.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/bucketpruning1.q.out 
b/ql/src/test/results/clientpositive/llap/bucketpruning1.q.out
index 7898832..83070e6 100644
--- a/ql/src/test/results/clientpositive/llap/bucketpruning1.q.out
+++ b/ql/src/test/results/clientpositive/llap/bucketpruning1.q.out
@@ -349,7 +349,7 @@ STAGE PLANS:
   GatherStats: false
   Filter Operator
 isSamplingPred: false
-predicate: ((key = 1) and (ds = '2008-04-08')) (type: 
boolean)
+predicate: ((ds = '2008-04-08') and (key = 1)) (type: 
boolean)
 Statistics: Num rows: 1 Data size: 372 Basic stats: 
COMPLETE Column stats: PARTIAL
 Select Operator
   expressions: 1 (type: int), value (type: string), 
'2008-04-08' (type: string)
@@ -411,7 +411,7 @@ STAGE PLANS:
   GatherStats: false
   Filter Operator
 isSamplingPred: false
-predicate: ((key = 1) and (ds = '2008-04-08') and (value = 
'One')) (type: boolean)
+predicate: ((ds = '2008-04-08') and (key = 1) and (value = 
'One')) (type: boolean)
 Statistics: Num rows: 1 Data size: 372 Basic stats: 
COMPLETE Column stats: PARTIAL
 Select Operator
   expressions: 1 (type: int), 'One' (type: string), 
'2008-04-08' (type: string)
@@ -473,7 +473,7 @@ STAGE PLANS:
   GatherStats: false
   Filter Operator
 isSamplingPred: false
-predicate: ((value = 'One') and (key = 1) and (ds = 
'2008-04-08')) (type: boolean)
+predicate: ((ds = '2008-04-08') and (key = 1) and (value = 
'One')) (type: boolean)
 Statistics: Num rows: 1 Data size: 372 Basic stats: 
COMPLETE Column stats: PARTIAL
 Select Operator
   expressions: 1 (type: int), 'One' (type: string), 
'2008-04-08' (type: string)
@@ -597,7 +597,7 @@ STAGE PLANS:
   GatherStats: false
   Filter Operator
 isSamplingPred: false
-predicate: ((key) IN (2, 3) and (ds = '2008-04-08')) 
(type: boolean)
+predicate: ((ds = '2008-04-08') and (key) IN (2, 3)) 
(type: boolean)
 Statistics: Num rows: 1 Data size: 372 Basic stats: 
COMPLETE Column stats: PARTIAL
 Select Operator
   expressions: key (type: int), value (type: string), 
'2008-04-08' (type: string)
@@ -659,7 +659,7 @@ STAGE PLANS:
   GatherStats: false
   Filter Operator
 isSamplingPred: false
-predicate: ((key) IN (2, 3) and (ds = '2008-04-08') and 
(value = 'One')) (type: boolean)
+predicate: ((ds = '2008-04-08') and (key) IN (2, 3) and 
(value = 'One')) (type: boolean)
 Statistics: Num rows: 1 Data size: 372 Basic stats: 
COMPLETE Column stats: PARTIAL
 Select Operator
   expressions: key (type: int), 'One' (type: string), 
'2008-04-08' (type: string)
@@ -721,7 +721,7 @@ STAGE PLANS:
   GatherStats: false
   Filter Operator
 isSamplingPred: false
-predicate: ((key) IN (2, 3) and (value = 'One') and (ds = 
'2008-04-08')) (type: boolean)
+predicate: ((ds = '2008-04-08') and (key) IN (2, 3) and 
(value = 'One')) (type: boolean)
 Statistics: Num rows: 1 Data size: 372 Basic stats: 
COMPLETE Column stats: PARTIAL
 Select Operator
   expressions: key (type: int), 'One' (type: string), 
'2008-04-08' (type: string)
@@ -843,7 +843,7 @@ STAGE PLANS:
   GatherStats: false
   Filter Operator
 isSamplingPred: false
-predicate: (((key = 1) or (key = 2)) and (value = 'One') 
and (ds = '2008-04-08')) (type: boolean)
+predicate: (((key = 1) or (key = 2)) and (ds = 
'2008-04-08') and (value = 'One')) (type: boolean)
 Statistics: Num rows: 1 Data size: 372 Basic stats: 
COMPLETE Column stats: PARTIAL
 Select Operator
   expressions: key (type: int), 'One' (type: string), 
'2008-04-08' (type: string)
@@ -1027,7 +1027,7 @@ STAGE PLANS:
   GatherStats: false
   Filter Operator
 isSamplingPred: false
-predicate: ((key) IN (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 
12, 13, 14, 15, 16, 17) and (ds = '2008-04-08')) (type: 

[15/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/correlationoptimizer9.q.out
--
diff --git a/ql/src/test/results/clientpositive/correlationoptimizer9.q.out 
b/ql/src/test/results/clientpositive/correlationoptimizer9.q.out
index be54d33..5372408 100644
--- a/ql/src/test/results/clientpositive/correlationoptimizer9.q.out
+++ b/ql/src/test/results/clientpositive/correlationoptimizer9.q.out
@@ -123,7 +123,7 @@ STAGE PLANS:
 alias: x1
 Statistics: Num rows: 1028 Data size: 22964 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((c2 > 100) and (c2 < 120)) (type: boolean)
+  predicate: ((c2 < 120) and (c2 > 100)) (type: boolean)
   Statistics: Num rows: 114 Data size: 2546 Basic stats: COMPLETE 
Column stats: NONE
   Group By Operator
 aggregations: count()
@@ -230,7 +230,7 @@ STAGE PLANS:
 alias: x1
 Statistics: Num rows: 1028 Data size: 22964 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((c2 > 100) and (c2 < 120)) (type: boolean)
+  predicate: ((c2 < 120) and (c2 > 100)) (type: boolean)
   Statistics: Num rows: 114 Data size: 2546 Basic stats: COMPLETE 
Column stats: NONE
   Group By Operator
 aggregations: count()
@@ -438,7 +438,7 @@ STAGE PLANS:
 alias: x1
 Statistics: Num rows: 1028 Data size: 22964 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((c2 > 100) and (c1 < 120) and c3 is not null) (type: 
boolean)
+  predicate: ((c1 < 120) and (c2 > 100) and c3 is not null) (type: 
boolean)
   Statistics: Num rows: 114 Data size: 2546 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: c1 (type: int), c3 (type: string)
@@ -549,7 +549,7 @@ STAGE PLANS:
 alias: x1
 Statistics: Num rows: 1028 Data size: 22964 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((c2 > 100) and (c1 < 120) and c3 is not null) (type: 
boolean)
+  predicate: ((c1 < 120) and (c2 > 100) and c3 is not null) (type: 
boolean)
   Statistics: Num rows: 114 Data size: 2546 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: c1 (type: int), c3 (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/except_all.q.out
--
diff --git a/ql/src/test/results/clientpositive/except_all.q.out 
b/ql/src/test/results/clientpositive/except_all.q.out
index 4c8c4d2..c4753b0 100644
--- a/ql/src/test/results/clientpositive/except_all.q.out
+++ b/ql/src/test/results/clientpositive/except_all.q.out
@@ -579,7 +579,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 187 Data size: 1986 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: ((_col2 > 0) and ((_col2 * 2) = _col3)) (type: boolean)
+predicate: (((_col2 * 2) = _col3) and (_col2 > 0)) (type: boolean)
 Statistics: Num rows: 31 Data size: 329 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: _col0 (type: string), _col1 (type: string)
@@ -651,7 +651,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2, _col3
   Statistics: Num rows: 132 Data size: 1402 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: ((_col2 > 0) and ((_col2 * 2) = _col3)) (type: boolean)
+predicate: (((_col2 * 2) = _col3) and (_col2 > 0)) (type: boolean)
 Statistics: Num rows: 22 Data size: 233 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: _col0 (type: string), _col1 (type: string)
@@ -902,7 +902,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2
   Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column 
stats: NONE
   Filter Operator
-predicate: ((_col1 > 0) and ((_col1 * 2) = _col2)) (type: boolean)
+predicate: (((_col1 * 2) = _col2) and (_col1 > 0)) (type: boolean)
 Statistics: Num rows: 1 Data size: 3 Basic stats: COMPLETE Column 
stats: NONE
 Select Operator
   expressions: _col0 (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/filter_cond_pushdown.q.out
--
diff --git a/ql/src/test/results/clientpositive/filter_cond_pushdown.q.out 

[03/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/spark/skewjoinopt8.q.out
--
diff --git a/ql/src/test/results/clientpositive/spark/skewjoinopt8.q.out 
b/ql/src/test/results/clientpositive/spark/skewjoinopt8.q.out
index 223665b..d3df7a4 100644
--- a/ql/src/test/results/clientpositive/spark/skewjoinopt8.q.out
+++ b/ql/src/test/results/clientpositive/spark/skewjoinopt8.q.out
@@ -72,7 +72,7 @@ STAGE PLANS:
   alias: a
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (key is not null and ((key = '3') or (key = 
'8'))) (type: boolean)
+predicate: (((key = '3') or (key = '8')) and key is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 30 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: key (type: string), val (type: string)
@@ -90,7 +90,7 @@ STAGE PLANS:
   alias: b
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (key is not null and ((key = '3') or (key = 
'8'))) (type: boolean)
+predicate: (((key = '3') or (key = '8')) and key is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 30 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: key (type: string), val (type: string)
@@ -108,7 +108,7 @@ STAGE PLANS:
   alias: c
   Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (key is not null and ((key = '3') or (key = 
'8'))) (type: boolean)
+predicate: (((key = '3') or (key = '8')) and key is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 20 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: key (type: string), val (type: string)
@@ -126,7 +126,7 @@ STAGE PLANS:
   alias: a
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (key is not null and ((key <> '3') and (key <> 
'8'))) (type: boolean)
+predicate: (((key <> '3') and (key <> '8')) and key is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 30 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: key (type: string), val (type: string)
@@ -144,7 +144,7 @@ STAGE PLANS:
   alias: b
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (key is not null and ((key <> '3') and (key <> 
'8'))) (type: boolean)
+predicate: (((key <> '3') and (key <> '8')) and key is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 30 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: key (type: string), val (type: string)
@@ -162,7 +162,7 @@ STAGE PLANS:
   alias: c
   Statistics: Num rows: 1 Data size: 20 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (key is not null and ((key <> '3') and (key <> 
'8'))) (type: boolean)
+predicate: (((key <> '3') and (key <> '8')) and key is not 
null) (type: boolean)
 Statistics: Num rows: 1 Data size: 20 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: key (type: string), val (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/spark/smb_mapjoin_10.q.out
--
diff --git a/ql/src/test/results/clientpositive/spark/smb_mapjoin_10.q.out 
b/ql/src/test/results/clientpositive/spark/smb_mapjoin_10.q.out
index 29e98e9..e3431be 100644
--- a/ql/src/test/results/clientpositive/spark/smb_mapjoin_10.q.out
+++ b/ql/src/test/results/clientpositive/spark/smb_mapjoin_10.q.out
@@ -83,7 +83,7 @@ STAGE PLANS:
   alias: b
   Statistics: Num rows: 3 Data size: 414 Basic stats: COMPLETE 
Column stats: NONE
   Filter Operator
-predicate: (userid is not null and pageid is not null and 
postid is not null and type is not null) (type: boolean)
+

[02/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out 
b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
index 18a54ff..c547dca 100644
--- a/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
+++ b/ql/src/test/results/clientpositive/spark/vectorization_short_regress.q.out
@@ -100,7 +100,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprOrExpr(children: 
FilterLongScalarEqualLongColumn(val 762, col 3) -> boolean, 
FilterExprAndExpr(children: FilterDoubleColLessDoubleColumn(col 12, col 
4)(children: CastLongToFloatViaLongToDouble(col 1) -> 12:double) -> boolean, 
FilterDoubleColGreaterDoubleScalar(col 12, val -5.0)(children: 
CastTimestampToDouble(col 9) -> 12:double) -> boolean, 
FilterDoubleColNotEqualDoubleColumn(col 5, col 12)(children: 
CastLongToDouble(col 2) -> 12:double) -> boolean) -> boolean, 
FilterStringGroupColEqualStringScalar(col 6, val a) -> boolean, 
FilterExprAndExpr(children: FilterDecimalColLessEqualDecimalScalar(col 13, val 
-1.389)(children: CastLongToDecimal(col 3) -> 13:decimal(22,3)) -> boolean, 
FilterStringGroupColNotEqualStringScalar(col 7, val a) -> boolean, 
FilterDecimalScalarNotEqualDecimalColumn(val 79.553, col 14)(children: 
CastLongToDecimal(col 2) -> 14:decimal(13,3)) -> boolean, 
FilterLongColNotEqualLongColumn(col 11, col 10) -> boolean) 
 -> boolean) -> boolean
-predicate: ((762 = cbigint) or ((UDFToFloat(csmallint) < 
cfloat) and (UDFToDouble(ctimestamp2) > -5.0) and (cdouble <> 
UDFToDouble(cint))) or (cstring1 = 'a') or ((CAST( cbigint AS decimal(22,3)) <= 
-1.389) and (cstring2 <> 'a') and (79.553 <> CAST( cint AS decimal(13,3))) and 
(cboolean2 <> cboolean1))) (type: boolean)
+predicate: (((CAST( cbigint AS decimal(22,3)) <= -1.389) 
and (cstring2 <> 'a') and (79.553 <> CAST( cint AS decimal(13,3))) and 
(cboolean2 <> cboolean1)) or ((UDFToFloat(csmallint) < cfloat) and 
(UDFToDouble(ctimestamp2) > -5.0) and (cdouble <> UDFToDouble(cint))) or (762 = 
cbigint) or (cstring1 = 'a')) (type: boolean)
 Statistics: Num rows: 12288 Data size: 2641964 Basic 
stats: COMPLETE Column stats: NONE
 Select Operator
   expressions: ctinyint (type: tinyint), csmallint (type: 
smallint), cint (type: int), cfloat (type: float), cdouble (type: double)
@@ -362,7 +362,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprOrExpr(children: 
FilterExprAndExpr(children: FilterLongColLessEqualLongScalar(col 3, val 197) -> 
boolean, FilterLongColLessLongColumn(col 2, col 3)(children: col 2) -> boolean) 
-> boolean, FilterExprAndExpr(children: 
FilterDoubleColGreaterEqualDoubleScalar(col 5, val -26.28) -> boolean, 
FilterDoubleColGreaterDoubleColumn(col 12, col 5)(children: 
CastLongToDouble(col 1) -> 12:double) -> boolean) -> boolean, 
FilterExprAndExpr(children: FilterDoubleColGreaterDoubleColumn(col 12, col 
4)(children: CastLongToFloatViaLongToDouble(col 0) -> 12:double) -> boolean, 
FilterStringColRegExpStringScalar(col 6, pattern .*ss.*) -> boolean) -> 
boolean, FilterExprAndExpr(children: FilterDoubleColGreaterDoubleScalar(col 4, 
val 79.5530014038086) -> boolean, FilterStringColLikeStringScalar(col 7, 
pattern 10%) -> boolean) -> boolean) -> boolean
-predicate: (((cbigint <= 197) and (UDFToLong(cint) < 
cbigint)) or ((cdouble >= -26.28) and (UDFToDouble(csmallint) > cdouble)) or 
((UDFToFloat(ctinyint) > cfloat) and cstring1 regexp '.*ss.*') or ((cfloat > 
79.553) and (cstring2 like '10%'))) (type: boolean)
+predicate: (((UDFToFloat(ctinyint) > cfloat) and cstring1 
regexp '.*ss.*') or ((cbigint <= 197) and (UDFToLong(cint) < cbigint)) or 
((cdouble >= -26.28) and (UDFToDouble(csmallint) > cdouble)) or ((cfloat > 
79.553) and (cstring2 like '10%'))) (type: boolean)
 Statistics: Num rows: 6826 Data size: 1467614 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: ctinyint (type: tinyint), csmallint (type: 
smallint), cint (type: int), cbigint (type: bigint), cdouble (type: double)
@@ -616,7 +616,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprOrExpr(children: 
FilterTimestampColEqualTimestampColumn(col 8, col 9) -> boolean, 
FilterDoubleScalarEqualDoubleColumn(val 762.0, col 4) -> boolean, 
FilterStringGroupColEqualStringScalar(col 6, 

[11/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/llap/vectorization_17.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_17.q.out 
b/ql/src/test/results/clientpositive/llap/vectorization_17.q.out
index a66ea36..4d6e0a2 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_17.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_17.q.out
@@ -75,7 +75,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprAndExpr(children: 
FilterLongColGreaterLongScalar(col 3, val -23) -> boolean, 
FilterExprOrExpr(children: FilterDoubleColNotEqualDoubleScalar(col 5, val 
98.0) -> boolean, FilterDecimalColGreaterDecimalScalar(col 12, val 
-863.257)(children: CastLongToDecimal(col 2) -> 12:decimal(13,3)) -> boolean) 
-> boolean, FilterExprOrExpr(children: FilterLongColGreaterEqualLongScalar(col 
0, val 33) -> boolean, FilterLongColGreaterEqualLongColumn(col 1, col 
3)(children: col 1) -> boolean, FilterDoubleColEqualDoubleColumn(col 4, col 
5)(children: col 4) -> boolean) -> boolean) -> boolean
-predicate: ((cbigint > -23) and ((cdouble <> 98.0) or 
(CAST( cint AS decimal(13,3)) > -863.257)) and ((ctinyint >= 33) or 
(UDFToLong(csmallint) >= cbigint) or (UDFToDouble(cfloat) = cdouble))) (type: 
boolean)
+predicate: (((cdouble <> 98.0) or (CAST( cint AS 
decimal(13,3)) > -863.257)) and ((ctinyint >= 33) or (UDFToLong(csmallint) >= 
cbigint) or (UDFToDouble(cfloat) = cdouble)) and (cbigint > -23)) (type: 
boolean)
 Statistics: Num rows: 4096 Data size: 549274 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
   expressions: cfloat (type: float), cstring1 (type: 
string), cint (type: int), ctimestamp1 (type: timestamp), cdouble (type: 
double), cbigint (type: bigint), (UDFToDouble(cfloat) / UDFToDouble(ctinyint)) 
(type: double), (UDFToLong(cint) % cbigint) (type: bigint), (- cdouble) (type: 
double), (cdouble + (UDFToDouble(cfloat) / UDFToDouble(ctinyint))) (type: 
double), (cdouble / UDFToDouble(cint)) (type: double), (- (- cdouble)) (type: 
double), (9763215.5639 % CAST( cbigint AS decimal(19,0))) (type: 
decimal(11,4)), (2563.58 + (- (- cdouble))) (type: double)

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/llap/vectorization_2.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_2.q.out 
b/ql/src/test/results/clientpositive/llap/vectorization_2.q.out
index affd1b6..80ac2b6 100644
--- a/ql/src/test/results/clientpositive/llap/vectorization_2.q.out
+++ b/ql/src/test/results/clientpositive/llap/vectorization_2.q.out
@@ -73,7 +73,7 @@ STAGE PLANS:
 className: VectorFilterOperator
 native: true
 predicateExpression: FilterExprOrExpr(children: 
FilterExprAndExpr(children: FilterTimestampColLessTimestampColumn(col 8, col 9) 
-> boolean, FilterStringColLikeStringScalar(col 7, pattern b%) -> boolean, 
FilterDoubleColLessEqualDoubleScalar(col 4, val -5638.14990234375) -> boolean) 
-> boolean, FilterExprAndExpr(children: FilterDoubleColLessDoubleColumn(col 5, 
col 12)(children: CastLongToDouble(col 0) -> 12:double) -> boolean, 
FilterExprOrExpr(children: FilterDoubleScalarNotEqualDoubleColumn(val -10669.0, 
col 12)(children: CastTimestampToDouble(col 9) -> 12:double) -> boolean, 
FilterLongScalarGreaterLongColumn(val 359, col 2) -> boolean) -> boolean) -> 
boolean) -> boolean
-predicate: (((ctimestamp1 < ctimestamp2) and (cstring2 
like 'b%') and (cfloat <= -5638.15)) or ((cdouble < UDFToDouble(ctinyint)) and 
((-10669.0 <> UDFToDouble(ctimestamp2)) or (359 > cint (type: boolean)
+predicate: (((cdouble < UDFToDouble(ctinyint)) and 
((-10669.0 <> UDFToDouble(ctimestamp2)) or (359 > cint))) or ((ctimestamp1 < 
ctimestamp2) and (cstring2 like 'b%') and (cfloat <= -5638.15))) (type: boolean)
 Statistics: Num rows: 4096 Data size: 719232 Basic stats: 
COMPLETE Column stats: COMPLETE
 Select Operator
   expressions: ctinyint (type: tinyint), csmallint (type: 
smallint), cbigint (type: bigint), cfloat (type: float), cdouble (type: double)

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/llap/vectorization_3.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/vectorization_3.q.out 
b/ql/src/test/results/clientpositive/llap/vectorization_3.q.out
index 4154746..991bd89 100644
--- 

[10/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/masking_disablecbo_3.q.out
--
diff --git a/ql/src/test/results/clientpositive/masking_disablecbo_3.q.out 
b/ql/src/test/results/clientpositive/masking_disablecbo_3.q.out
index 83cb6ca..c6c7d09 100644
--- a/ql/src/test/results/clientpositive/masking_disablecbo_3.q.out
+++ b/ql/src/test/results/clientpositive/masking_disablecbo_3.q.out
@@ -603,7 +603,7 @@ STAGE PLANS:
 alias: masking_test_subq
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: (UDFToDouble(key) is not null and (key > 0)) (type: 
boolean)
+  predicate: ((key > 0) and UDFToDouble(key) is not null) (type: 
boolean)
   Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE 
Column stats: NONE
   Reduce Output Operator
 key expressions: UDFToDouble(key) (type: double), 
UDFToDouble(key) (type: double)
@@ -1177,7 +1177,7 @@ STAGE PLANS:
 alias: masking_test_subq
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: (UDFToDouble(key) is not null and (key > 0)) (type: 
boolean)
+  predicate: ((key > 0) and UDFToDouble(key) is not null) (type: 
boolean)
   Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE 
Column stats: NONE
   Reduce Output Operator
 key expressions: UDFToDouble(key) (type: double), 
UDFToDouble(key) (type: double)
@@ -1751,7 +1751,7 @@ STAGE PLANS:
 alias: masking_test_subq
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: (UDFToDouble(key) is not null and (key > 0)) (type: 
boolean)
+  predicate: ((key > 0) and UDFToDouble(key) is not null) (type: 
boolean)
   Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE 
Column stats: NONE
   Reduce Output Operator
 key expressions: UDFToDouble(key) (type: double), 
UDFToDouble(key) (type: double)
@@ -6571,7 +6571,7 @@ STAGE PLANS:
 alias: masking_test_subq
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: (UDFToDouble(key) is not null and (key > 0)) (type: 
boolean)
+  predicate: ((key > 0) and UDFToDouble(key) is not null) (type: 
boolean)
   Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE 
Column stats: NONE
   Reduce Output Operator
 key expressions: UDFToDouble(key) (type: double), 
UDFToDouble(key) (type: double)
@@ -7145,7 +7145,7 @@ STAGE PLANS:
 alias: masking_test_subq
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: (UDFToDouble(key) is not null and (key > 0)) (type: 
boolean)
+  predicate: ((key > 0) and UDFToDouble(key) is not null) (type: 
boolean)
   Statistics: Num rows: 166 Data size: 1763 Basic stats: COMPLETE 
Column stats: NONE
   Reduce Output Operator
 key expressions: UDFToDouble(key) (type: double), 
UDFToDouble(key) (type: double)
@@ -7719,7 +7719,7 @@ STAGE PLANS:
 alias: src
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((key > 0) and (key < 10) and ((key % 2) = 0)) (type: 
boolean)
+  predicate: (((key % 2) = 0) and (key < 10) and (key > 0)) (type: 
boolean)
   Statistics: Num rows: 27 Data size: 286 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string), upper(value) (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/materialized_view_create_rewrite.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/materialized_view_create_rewrite.q.out 
b/ql/src/test/results/clientpositive/materialized_view_create_rewrite.q.out
index 35e9a5d..f6b161b 100644
--- a/ql/src/test/results/clientpositive/materialized_view_create_rewrite.q.out
+++ b/ql/src/test/results/clientpositive/materialized_view_create_rewrite.q.out
@@ -157,7 +157,7 @@ STAGE PLANS:
 alias: cmv_basetable
 Statistics: Num rows: 5 Data size: 81 Basic stats: COMPLETE Column 
stats: NONE
 Filter Operator
-  predicate: ((d = 3) and (3 = a)) (type: boolean)
+  predicate: ((3 = a) and (d = 3)) (type: 

[16/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
HIVE-17510: Make comparison of filter predicates in q files deterministic 
(Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c5b3ccc4
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c5b3ccc4
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c5b3ccc4

Branch: refs/heads/master
Commit: c5b3ccc41016afd94035637cb011eacbeb9e5893
Parents: 0bdc570
Author: Jesus Camacho Rodriguez 
Authored: Tue Sep 12 14:59:10 2017 -0700
Committer: Jesus Camacho Rodriguez 
Committed: Thu Sep 21 05:37:38 2017 +1000

--
 .../results/positive/hbase_custom_key2.q.out|  2 +-
 .../results/positive/hbase_custom_key3.q.out|  2 +-
 .../results/positive/hbase_ppd_key_range.q.out  |  2 +-
 .../test/results/positive/hbase_pushdown.q.out  |  2 +-
 .../test/results/positive/hbase_queries.q.out   |  2 +-
 .../hbase_single_sourced_multi_insert.q.out |  4 +-
 .../apache/hadoop/hive/ql/exec/ExplainTask.java | 63 +++-
 .../org/apache/hadoop/hive/ql/plan/Explain.java |  3 +
 .../hadoop/hive/ql/plan/ExprNodeDesc.java   |  5 ++
 .../hive/ql/plan/ExprNodeGenericFuncDesc.java   | 23 +++
 .../apache/hadoop/hive/ql/plan/FilterDesc.java  | 12 +++-
 .../apache/hadoop/hive/ql/plan/PlanUtils.java   | 17 --
 .../org/apache/hadoop/hive/ql/udf/UDFType.java  |  8 +++
 .../hive/ql/udf/generic/GenericUDFOPAnd.java|  2 +
 .../hive/ql/udf/generic/GenericUDFOPOr.java |  2 +
 .../hadoop/hive/ql/exec/TestExplainTask.java|  8 ++-
 .../annotate_stats_deep_filters.q.out   |  4 +-
 .../clientpositive/annotate_stats_filter.q.out  |  2 +-
 .../annotate_stats_join_pkfk.q.out  |  2 +-
 .../results/clientpositive/auto_join20.q.out|  2 +-
 .../results/clientpositive/auto_join28.q.out|  2 +-
 .../results/clientpositive/auto_join29.q.out| 14 ++---
 .../results/clientpositive/auto_join4.q.out |  4 +-
 .../results/clientpositive/auto_join5.q.out |  2 +-
 .../results/clientpositive/auto_join6.q.out |  4 +-
 .../results/clientpositive/auto_join7.q.out |  6 +-
 .../results/clientpositive/auto_join8.q.out |  4 +-
 .../auto_join_reordering_values.q.out   |  2 +-
 .../materialized_view_create_rewrite.q.out  |  4 +-
 .../clientpositive/beeline/smb_mapjoin_10.q.out |  2 +-
 .../test/results/clientpositive/cbo_const.q.out |  2 +-
 .../clientpositive/cbo_rp_outer_join_ppr.q.out  |  4 +-
 .../results/clientpositive/cbo_union_view.q.out |  6 +-
 .../clientpositive/constant_prop_1.q.out|  2 +-
 .../clientpositive/constant_prop_3.q.out|  2 +-
 .../clientpositive/correlated_join_keys.q.out   |  8 +--
 .../clientpositive/correlationoptimizer10.q.out |  4 +-
 .../clientpositive/correlationoptimizer13.q.out |  2 +-
 .../clientpositive/correlationoptimizer9.q.out  |  8 +--
 .../results/clientpositive/except_all.q.out |  6 +-
 .../clientpositive/filter_cond_pushdown.q.out   |  6 +-
 .../filter_cond_pushdown_HIVE_15647.q.out   | 10 ++--
 .../results/clientpositive/flatten_and_or.q.out |  2 +-
 .../clientpositive/fold_eq_with_case_when.q.out |  2 +-
 .../clientpositive/folder_predicate.q.out   | 14 ++---
 .../clientpositive/fouter_join_ppr.q.out|  8 +--
 .../groupby_grouping_sets_grouping.q.out|  4 +-
 .../groupby_multi_single_reducer3.q.out | 20 +++
 .../clientpositive/groupby_position.q.out   |  2 +-
 .../test/results/clientpositive/having2.q.out   |  2 +-
 .../identity_project_remove_skip.q.out  |  2 +-
 .../results/clientpositive/index_auto.q.out |  6 +-
 .../clientpositive/index_auto_mult_tables.q.out |  6 +-
 .../index_auto_mult_tables_compact.q.out|  6 +-
 .../clientpositive/index_auto_self_join.q.out   | 12 ++--
 .../clientpositive/index_auto_unused.q.out  |  6 +-
 .../index_bitmap_compression.q.out  |  4 +-
 .../clientpositive/index_compression.q.out  |  4 +-
 .../clientpositive/infer_const_type.q.out   |  8 +--
 .../test/results/clientpositive/input12.q.out   |  2 +-
 .../test/results/clientpositive/input13.q.out   |  4 +-
 .../results/clientpositive/input_part2.q.out|  4 +-
 .../results/clientpositive/input_part4.q.out|  2 +-
 .../clientpositive/input_testxpath4.q.out   |  4 +-
 .../test/results/clientpositive/insert1.q.out   |  2 +-
 ql/src/test/results/clientpositive/join19.q.out |  6 +-
 ql/src/test/results/clientpositive/join20.q.out |  2 +-
 ql/src/test/results/clientpositive/join4.q.out  |  4 +-
 ql/src/test/results/clientpositive/join40.q.out |  2 +-
 ql/src/test/results/clientpositive/join45.q.out |  2 +-
 ql/src/test/results/clientpositive/join5.q.out  |  2 +-
 ql/src/test/results/clientpositive/join6.q.out  |  4 +-
 ql/src/test/results/clientpositive/join7.q.out  |  6 +-
 

[13/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out 
b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
index 03ebe37..8ee41d0 100644
--- a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
@@ -469,7 +469,7 @@ Stage-0
 Group By Operator [GBY_6] (rows=1 width=101)
   
Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, 
c_int, c_float
   Filter Operator [FIL_39] (rows=2 width=93)
-predicate:(((c_int + 1) >= 0) and ((c_int 
> 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) 
and ((UDFToFloat(c_int) + c_float) >= 0) and key is not null)
+predicate:(((UDFToFloat(c_int) + c_float) 
>= 0) and ((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and ((c_int >= 
1) or (c_float >= 1)) and (c_float > 0) and key is not null)
 TableScan [TS_3] (rows=20 width=88)
   
default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"]
   <-Reducer 8 [SIMPLE_EDGE] llap
@@ -485,7 +485,7 @@ Stage-0
 Group By Operator [GBY_14] (rows=1 width=93)
   Output:["_col0","_col1","_col2"],keys:key, 
c_int, c_float
   Filter Operator [FIL_40] (rows=2 width=93)
-predicate:(((c_int + 1) >= 0) and ((c_int 
> 0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) 
and ((UDFToFloat(c_int) + c_float) >= 0) and key is not null)
+predicate:(((UDFToFloat(c_int) + c_float) 
>= 0) and ((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and ((c_int >= 
1) or (c_float >= 1)) and (c_float > 0) and key is not null)
 TableScan [TS_11] (rows=20 width=88)
   
default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"]
 
@@ -540,7 +540,7 @@ Stage-0
   Group By Operator [GBY_6] (rows=1 width=101)
 
Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, 
c_int, c_float
 Filter Operator [FIL_36] (rows=2 width=93)
-  predicate:(((c_int + 1) >= 0) and ((c_int > 0) 
or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and 
((UDFToFloat(c_int) + c_float) >= 0) and key is not null)
+  predicate:(((UDFToFloat(c_int) + c_float) >= 0) 
and ((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and ((c_int >= 1) or 
(c_float >= 1)) and (c_float > 0) and key is not null)
   TableScan [TS_3] (rows=20 width=88)
 
default@cbo_t1,cbo_t1,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"]
 <-Reducer 7 [SIMPLE_EDGE] llap
@@ -556,7 +556,7 @@ Stage-0
   Group By Operator [GBY_14] (rows=1 width=93)
 Output:["_col0","_col1","_col2"],keys:key, c_int, 
c_float
 Filter Operator [FIL_37] (rows=2 width=93)
-  predicate:(((c_int + 1) >= 0) and ((c_int > 0) 
or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) and 
((UDFToFloat(c_int) + c_float) >= 0) and key is not null)
+  predicate:(((UDFToFloat(c_int) + c_float) >= 0) 
and ((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and ((c_int >= 1) or 
(c_float >= 1)) and (c_float > 0) and key is not null)
   TableScan [TS_11] (rows=20 width=88)
 
default@cbo_t2,cbo_t2,Tbl:COMPLETE,Col:COMPLETE,Output:["key","c_int","c_float"]
 
@@ -616,7 +616,7 @@ Stage-0
   Group By Operator [GBY_6] (rows=1 width=101)
 
Output:["_col0","_col1","_col2","_col3"],aggregations:["sum(c_int)"],keys:key, 
c_int, c_float
 Filter Operator [FIL_38] (rows=2 width=93)
-  predicate:(((c_int + 1) >= 0) and ((c_int > 
0) or (c_float >= 0)) and (c_float > 0) and ((c_int >= 1) or (c_float >= 1)) 
and ((UDFToFloat(c_int) + c_float) >= 0) and key is not null)
+  predicate:(((UDFToFloat(c_int) + c_float) >= 
0) and ((c_int + 1) >= 0) and ((c_int > 0) or (c_float >= 0)) and ((c_int >= 1) 
or (c_float >= 1)) and (c_float > 0) and key is not null)
 

[01/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
Repository: hive
Updated Branches:
  refs/heads/master 0bdc570b7 -> c5b3ccc41


http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/vectorization_7.q.out
--
diff --git a/ql/src/test/results/clientpositive/vectorization_7.q.out 
b/ql/src/test/results/clientpositive/vectorization_7.q.out
index afa9891..f6160e4 100644
--- a/ql/src/test/results/clientpositive/vectorization_7.q.out
+++ b/ql/src/test/results/clientpositive/vectorization_7.q.out
@@ -75,7 +75,7 @@ STAGE PLANS:
   className: VectorFilterOperator
   native: true
   predicateExpression: FilterExprAndExpr(children: 
FilterLongColNotEqualLongScalar(col 0, val 0) -> boolean, 
FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 12, val 
0.0)(children: CastTimestampToDouble(col 8) -> 12:double) -> boolean, 
FilterLongColEqualLongColumn(col 0, col 2)(children: col 0) -> boolean, 
FilterStringColLikeStringScalar(col 7, pattern ss) -> boolean) -> boolean, 
FilterExprOrExpr(children: FilterDoubleScalarLessDoubleColumn(val 98.0, col 
5) -> boolean, FilterExprAndExpr(children: 
FilterDoubleColGreaterDoubleScalar(col 12, val -15.0)(children: 
CastTimestampToDouble(col 9) -> 12:double) -> boolean, 
FilterDoubleScalarGreaterEqualDoubleColumn(val 3569.0, col 5) -> boolean) -> 
boolean) -> boolean) -> boolean
-  predicate: ((ctinyint <> 0) and ((UDFToDouble(ctimestamp1) <= 
0.0) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((98.0 
< cdouble) or ((UDFToDouble(ctimestamp2) > -15.0) and (3569.0 >= cdouble 
(type: boolean)
+  predicate: (((98.0 < cdouble) or ((UDFToDouble(ctimestamp2) 
> -15.0) and (3569.0 >= cdouble))) and ((UDFToDouble(ctimestamp1) <= 0.0) or 
(UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and (ctinyint <> 0)) 
(type: boolean)
   Statistics: Num rows: 5461 Data size: 1174134 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
 expressions: cboolean1 (type: boolean), cbigint (type: 
bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 
(type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), 
(UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), 
(- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), 
(cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % 
UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- 
ctinyint) % ctinyint) (type: tinyint)
@@ -295,7 +295,7 @@ STAGE PLANS:
   className: VectorFilterOperator
   native: true
   predicateExpression: FilterExprAndExpr(children: 
FilterLongColNotEqualLongScalar(col 0, val 0) -> boolean, 
FilterExprOrExpr(children: FilterDoubleColLessEqualDoubleScalar(col 12, val 
0.0)(children: CastTimestampToDouble(col 8) -> 12:double) -> boolean, 
FilterLongColEqualLongColumn(col 0, col 2)(children: col 0) -> boolean, 
FilterStringColLikeStringScalar(col 7, pattern ss) -> boolean) -> boolean, 
FilterExprOrExpr(children: FilterDoubleScalarLessDoubleColumn(val 98.0, col 
5) -> boolean, FilterExprAndExpr(children: 
FilterDoubleColGreaterDoubleScalar(col 12, val 7.6855)(children: 
CastTimestampToDouble(col 9) -> 12:double) -> boolean, 
FilterDoubleScalarGreaterEqualDoubleColumn(val 3569.0, col 5) -> boolean) -> 
boolean) -> boolean) -> boolean
-  predicate: ((ctinyint <> 0) and ((UDFToDouble(ctimestamp1) <= 
0.0) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and ((98.0 
< cdouble) or ((UDFToDouble(ctimestamp2) > 7.6855) and (3569.0 >= 
cdouble (type: boolean)
+  predicate: (((98.0 < cdouble) or ((UDFToDouble(ctimestamp2) 
> 7.6855) and (3569.0 >= cdouble))) and ((UDFToDouble(ctimestamp1) 
<= 0.0) or (UDFToInteger(ctinyint) = cint) or (cstring2 like 'ss')) and 
(ctinyint <> 0)) (type: boolean)
   Statistics: Num rows: 5461 Data size: 1174134 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
 expressions: cboolean1 (type: boolean), cbigint (type: 
bigint), csmallint (type: smallint), ctinyint (type: tinyint), ctimestamp1 
(type: timestamp), cstring1 (type: string), (cbigint + cbigint) (type: bigint), 
(UDFToInteger(csmallint) % -257) (type: int), (- csmallint) (type: smallint), 
(- ctinyint) (type: tinyint), (UDFToInteger((- ctinyint)) + 17) (type: int), 
(cbigint * UDFToLong((- csmallint))) (type: bigint), (cint % 
UDFToInteger(csmallint)) (type: int), (- ctinyint) (type: tinyint), ((- 
ctinyint) % ctinyint) (type: tinyint)

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/vectorization_8.q.out
--
diff --git 

[06/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/skewjoinopt1.q.out
--
diff --git a/ql/src/test/results/clientpositive/skewjoinopt1.q.out 
b/ql/src/test/results/clientpositive/skewjoinopt1.q.out
index 82d4cbc..dc28cb6 100644
--- a/ql/src/test/results/clientpositive/skewjoinopt1.q.out
+++ b/ql/src/test/results/clientpositive/skewjoinopt1.q.out
@@ -54,7 +54,7 @@ STAGE PLANS:
 alias: a
 Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column 
stats: NONE
 Filter Operator
-  predicate: (key is not null and ((key = '2') or (key = '3'))) 
(type: boolean)
+  predicate: (((key = '2') or (key = '3')) and key is not null) 
(type: boolean)
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string), val (type: string)
@@ -70,7 +70,7 @@ STAGE PLANS:
 alias: b
 Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column 
stats: NONE
 Filter Operator
-  predicate: (key is not null and ((key = '2') or (key = '3'))) 
(type: boolean)
+  predicate: (((key = '2') or (key = '3')) and key is not null) 
(type: boolean)
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string), val (type: string)
@@ -129,7 +129,7 @@ STAGE PLANS:
 alias: a
 Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column 
stats: NONE
 Filter Operator
-  predicate: (key is not null and (not ((key = '2') or (key = 
'3' (type: boolean)
+  predicate: ((not ((key = '2') or (key = '3'))) and key is not 
null) (type: boolean)
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string), val (type: string)
@@ -145,7 +145,7 @@ STAGE PLANS:
 alias: b
 Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column 
stats: NONE
 Filter Operator
-  predicate: (key is not null and (not ((key = '2') or (key = 
'3' (type: boolean)
+  predicate: ((not ((key = '2') or (key = '3'))) and key is not 
null) (type: boolean)
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string), val (type: string)
@@ -376,7 +376,7 @@ STAGE PLANS:
 alias: a
 Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column 
stats: NONE
 Filter Operator
-  predicate: (key is not null and ((key = '2') or (key = '3'))) 
(type: boolean)
+  predicate: (((key = '2') or (key = '3')) and key is not null) 
(type: boolean)
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string)
@@ -391,7 +391,7 @@ STAGE PLANS:
 alias: b
 Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column 
stats: NONE
 Filter Operator
-  predicate: (key is not null and ((key = '2') or (key = '3'))) 
(type: boolean)
+  predicate: (((key = '2') or (key = '3')) and key is not null) 
(type: boolean)
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string)
@@ -465,7 +465,7 @@ STAGE PLANS:
 alias: a
 Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column 
stats: NONE
 Filter Operator
-  predicate: (key is not null and (not ((key = '2') or (key = 
'3' (type: boolean)
+  predicate: ((not ((key = '2') or (key = '3'))) and key is not 
null) (type: boolean)
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string)
@@ -480,7 +480,7 @@ STAGE PLANS:
 alias: b
 Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE Column 
stats: NONE
 Filter Operator
-  predicate: (key is not null and (not ((key = '2') or (key = 
'3' (type: boolean)
+  predicate: ((not ((key = '2') or (key = '3'))) and key is not 
null) (type: boolean)
   Statistics: Num rows: 1 Data size: 30 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string)


[12/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/subquery_multi.q.out 
b/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
index 92786f7..306fce8 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_multi.q.out
@@ -99,7 +99,7 @@ STAGE PLANS:
   alias: part_null
   Statistics: Num rows: 5 Data size: 5600 Basic stats: 
COMPLETE Column stats: NONE
   Filter Operator
-predicate: (p_size is not null and p_brand is not null) 
(type: boolean)
+predicate: (p_brand is not null and p_size is not null) 
(type: boolean)
 Statistics: Num rows: 5 Data size: 5600 Basic stats: 
COMPLETE Column stats: NONE
 Select Operator
   expressions: p_partkey (type: int), p_name (type: 
string), p_mfgr (type: string), p_brand (type: string), p_type (type: string), 
p_size (type: int), p_container (type: string), p_retailprice (type: double), 
p_comment (type: string)
@@ -1015,7 +1015,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col12
 Statistics: Num rows: 5 Data size: 6726 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((_col9 = 0) or (_col12 is null and _col1 is not 
null and (_col10 >= _col9))) (type: boolean)
+  predicate: ((_col12 is null and _col1 is not null and 
(_col10 >= _col9)) or (_col9 = 0)) (type: boolean)
   Statistics: Num rows: 2 Data size: 2690 Basic stats: 
COMPLETE Column stats: NONE
   Select Operator
 expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: 
int), _col6 (type: string), _col7 (type: double), _col8 (type: string)
@@ -2216,7 +2216,7 @@ STAGE PLANS:
   alias: part
   Statistics: Num rows: 26 Data size: 8242 Basic stats: 
COMPLETE Column stats: COMPLETE
   Filter Operator
-predicate: (p_type is not null and p_container is not 
null) (type: boolean)
+predicate: (p_container is not null and p_type is not 
null) (type: boolean)
 Statistics: Num rows: 26 Data size: 8242 Basic stats: 
COMPLETE Column stats: COMPLETE
 Group By Operator
   keys: p_type (type: string), p_name (type: string), 
p_container (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/llap/subquery_notin.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/subquery_notin.q.out 
b/ql/src/test/results/clientpositive/llap/subquery_notin.q.out
index ea3c78b..8c72fb8 100644
--- a/ql/src/test/results/clientpositive/llap/subquery_notin.q.out
+++ b/ql/src/test/results/clientpositive/llap/subquery_notin.q.out
@@ -1921,7 +1921,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col12
 Statistics: Num rows: 26 Data size: 16542 Basic stats: 
COMPLETE Column stats: COMPLETE
 Filter Operator
-  predicate: ((_col9 = 0) or (_col12 is null and _col5 is not 
null and (_col10 >= _col9))) (type: boolean)
+  predicate: ((_col12 is null and _col5 is not null and 
(_col10 >= _col9)) or (_col9 = 0)) (type: boolean)
   Statistics: Num rows: 26 Data size: 16542 Basic stats: 
COMPLETE Column stats: COMPLETE
   Select Operator
 expressions: _col0 (type: int), _col1 (type: string), 
_col2 (type: string), _col3 (type: string), _col4 (type: string), _col5 (type: 
int), _col6 (type: string), _col7 (type: double), _col8 (type: string)
@@ -2136,7 +2136,7 @@ STAGE PLANS:
 outputColumnNames: _col0, _col1, _col2, _col3, _col4, _col5, 
_col6, _col7, _col8, _col9, _col10, _col12
 Statistics: Num rows: 26 Data size: 16538 Basic stats: 
COMPLETE Column stats: COMPLETE
 Filter Operator
-  predicate: ((_col9 = 0) or (_col12 is null and _col0 is not 
null and _col5 is not null and (_col10 >= _col9))) (type: boolean)
+  predicate: ((_col12 is null and _col0 is not null and _col5 
is not null and (_col10 >= _col9)) or (_col9 = 0)) (type: boolean)
   Statistics: Num rows: 26 Data size: 16538 Basic stats: 
COMPLETE Column stats: COMPLETE
   Select Operator
 

[09/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/perf/query27.q.out
--
diff --git a/ql/src/test/results/clientpositive/perf/query27.q.out 
b/ql/src/test/results/clientpositive/perf/query27.q.out
index cbd7d29..c6a1905 100644
--- a/ql/src/test/results/clientpositive/perf/query27.q.out
+++ b/ql/src/test/results/clientpositive/perf/query27.q.out
@@ -125,7 +125,7 @@ Stage-0
 Select Operator [SEL_2] 
(rows=575995635 width=88)
   
Output:["_col0","_col1","_col2","_col3","_col4","_col5","_col6","_col7"]
   Filter Operator [FIL_51] 
(rows=575995635 width=88)
-predicate:(ss_cdemo_sk is not null 
and ss_sold_date_sk is not null and ss_store_sk is not null and ss_item_sk is 
not null)
+predicate:(ss_cdemo_sk is not null 
and ss_item_sk is not null and ss_sold_date_sk is not null and ss_store_sk is 
not null)
 TableScan [TS_0] (rows=575995635 
width=88)
   
default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_cdemo_sk","ss_store_sk","ss_quantity","ss_list_price","ss_sales_price","ss_coupon_amt"]
 <-Map 8 [SIMPLE_EDGE]
@@ -134,7 +134,7 @@ Stage-0
 Select Operator [SEL_5] (rows=232725 
width=385)
   Output:["_col0"]
   Filter Operator [FIL_52] 
(rows=232725 width=385)
-predicate:((cd_gender = 'M') and 
(cd_marital_status = 'U') and (cd_education_status = '2 yr Degree') and 
cd_demo_sk is not null)
+predicate:((cd_education_status = 
'2 yr Degree') and (cd_gender = 'M') and (cd_marital_status = 'U') and 
cd_demo_sk is not null)
 TableScan [TS_3] (rows=1861800 
width=385)
   
default@customer_demographics,customer_demographics,Tbl:COMPLETE,Col:NONE,Output:["cd_demo_sk","cd_gender","cd_marital_status","cd_education_status"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/perf/query28.q.out
--
diff --git a/ql/src/test/results/clientpositive/perf/query28.q.out 
b/ql/src/test/results/clientpositive/perf/query28.q.out
index 48e3dc5..33dc1ae 100644
--- a/ql/src/test/results/clientpositive/perf/query28.q.out
+++ b/ql/src/test/results/clientpositive/perf/query28.q.out
@@ -150,7 +150,7 @@ Stage-0
 Select Operator [SEL_23] (rows=21333171 width=88)
   Output:["ss_list_price"]
   Filter Operator [FIL_55] (rows=21333171 width=88)
-predicate:(ss_quantity BETWEEN 16 AND 20 and 
(ss_list_price BETWEEN 142 AND 152 or ss_coupon_amt BETWEEN 3054 AND 4054 or 
ss_wholesale_cost BETWEEN 80 AND 100))
+predicate:((ss_list_price BETWEEN 142 AND 152 
or ss_coupon_amt BETWEEN 3054 AND 4054 or ss_wholesale_cost BETWEEN 80 AND 100) 
and ss_quantity BETWEEN 16 AND 20)
 TableScan [TS_0] (rows=575995635 width=88)
   
default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_quantity","ss_wholesale_cost","ss_list_price","ss_coupon_amt"]
 <-Reducer 12 [CUSTOM_SIMPLE_EDGE]
@@ -171,7 +171,7 @@ Stage-0
 Select Operator [SEL_30] (rows=21333171 width=88)
   Output:["ss_list_price"]
   Filter Operator [FIL_56] (rows=21333171 width=88)
-predicate:(ss_quantity BETWEEN 11 AND 15 and 
(ss_list_price BETWEEN 66 AND 76 or ss_coupon_amt BETWEEN 920 AND 1920 or 
ss_wholesale_cost BETWEEN 4 AND 24))
+predicate:((ss_list_price BETWEEN 66 AND 76 or 
ss_coupon_amt BETWEEN 920 AND 1920 or ss_wholesale_cost BETWEEN 4 AND 24) and 
ss_quantity BETWEEN 11 AND 15)
  Please refer to the previous TableScan [TS_0]
 <-Reducer 14 [CUSTOM_SIMPLE_EDGE]
   PARTITION_ONLY_SHUFFLE [RS_47]
@@ -191,7 +191,7 @@ Stage-0
 Select Operator [SEL_37] (rows=21333171 width=88)
   Output:["ss_list_price"]
   Filter Operator [FIL_57] (rows=21333171 width=88)
-predicate:(ss_quantity BETWEEN 6 AND 10 and 
(ss_list_price BETWEEN 91 AND 101 or ss_coupon_amt BETWEEN 1430 AND 2430 

[07/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/ppd_gby_join.q.out
--
diff --git a/ql/src/test/results/clientpositive/ppd_gby_join.q.out 
b/ql/src/test/results/clientpositive/ppd_gby_join.q.out
index 8519174..75d0a62 100644
--- a/ql/src/test/results/clientpositive/ppd_gby_join.q.out
+++ b/ql/src/test/results/clientpositive/ppd_gby_join.q.out
@@ -31,7 +31,7 @@ STAGE PLANS:
 alias: src
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((key < '400') and (key > '20') and ((value < 
'val_50') or (key > '2')) and (key <> '4')) (type: boolean)
+  predicate: (((value < 'val_50') or (key > '2')) and (key < 
'400') and (key <> '4') and (key > '20')) (type: boolean)
   Statistics: Num rows: 36 Data size: 382 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string)
@@ -67,7 +67,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1
   Statistics: Num rows: 60 Data size: 642 Basic stats: COMPLETE Column 
stats: NONE
   Filter Operator
-predicate: ((_col1 > '50') or (_col0 < '50')) (type: boolean)
+predicate: ((_col0 < '50') or (_col1 > '50')) (type: boolean)
 Statistics: Num rows: 40 Data size: 428 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: _col0 (type: string)
@@ -301,7 +301,7 @@ STAGE PLANS:
 alias: src
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((key < '400') and (key > '20') and ((value < 
'val_50') or (key > '2')) and (key <> '4')) (type: boolean)
+  predicate: (((value < 'val_50') or (key > '2')) and (key < 
'400') and (key <> '4') and (key > '20')) (type: boolean)
   Statistics: Num rows: 36 Data size: 382 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string)
@@ -337,7 +337,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1
   Statistics: Num rows: 60 Data size: 642 Basic stats: COMPLETE Column 
stats: NONE
   Filter Operator
-predicate: ((_col1 > '50') or (_col0 < '50')) (type: boolean)
+predicate: ((_col0 < '50') or (_col1 > '50')) (type: boolean)
 Statistics: Num rows: 40 Data size: 428 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: _col0 (type: string)

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/ppd_join.q.out
--
diff --git a/ql/src/test/results/clientpositive/ppd_join.q.out 
b/ql/src/test/results/clientpositive/ppd_join.q.out
index 0d09633..02aa5c2 100644
--- a/ql/src/test/results/clientpositive/ppd_join.q.out
+++ b/ql/src/test/results/clientpositive/ppd_join.q.out
@@ -28,7 +28,7 @@ STAGE PLANS:
 alias: src
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((key < '400') and (key > '20') and ((value < 
'val_50') or (key > '2')) and (key <> '4')) (type: boolean)
+  predicate: (((value < 'val_50') or (key > '2')) and (key < 
'400') and (key <> '4') and (key > '20')) (type: boolean)
   Statistics: Num rows: 36 Data size: 382 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string)
@@ -65,7 +65,7 @@ STAGE PLANS:
   outputColumnNames: _col0, _col1, _col2
   Statistics: Num rows: 60 Data size: 642 Basic stats: COMPLETE Column 
stats: NONE
   Filter Operator
-predicate: ((_col1 > '50') or (_col0 < '50')) (type: boolean)
+predicate: ((_col0 < '50') or (_col1 > '50')) (type: boolean)
 Statistics: Num rows: 40 Data size: 428 Basic stats: COMPLETE 
Column stats: NONE
 Select Operator
   expressions: _col0 (type: string), _col2 (type: string)
@@ -553,7 +553,7 @@ STAGE PLANS:
 alias: src
 Statistics: Num rows: 500 Data size: 5312 Basic stats: COMPLETE 
Column stats: NONE
 Filter Operator
-  predicate: ((key < '400') and (key > '20') and ((value < 
'val_50') or (key > '2')) and (key <> '4')) (type: boolean)
+  predicate: (((value < 'val_50') or (key > '2')) and (key < 
'400') and (key <> '4') and (key > '20')) (type: boolean)
   Statistics: Num rows: 36 Data size: 382 Basic stats: COMPLETE 
Column stats: NONE
   Select Operator
 expressions: key (type: string)
@@ -590,7 +590,7 @@ 

[05/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
--
diff --git a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out 
b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
index 1ed388f..4f4 100644
--- a/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
+++ b/ql/src/test/results/clientpositive/spark/dynamic_rdd_cache.q.out
@@ -746,7 +746,7 @@ STAGE PLANS:
   alias: inventory
   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
   Filter Operator
-predicate: (inv_item_sk is not null and inv_warehouse_sk 
is not null and inv_date_sk is not null) (type: boolean)
+predicate: (inv_date_sk is not null and inv_item_sk is not 
null and inv_warehouse_sk is not null) (type: boolean)
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
 Select Operator
   expressions: inv_date_sk (type: int), inv_item_sk (type: 
int), inv_quantity_on_hand (type: int), inv_warehouse_sk (type: int)
@@ -764,7 +764,7 @@ STAGE PLANS:
   alias: date_dim
   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
   Filter Operator
-predicate: ((d_year = 1999) and (d_moy = 3) and d_date_sk 
is not null) (type: boolean)
+predicate: ((d_moy = 3) and (d_year = 1999) and d_date_sk 
is not null) (type: boolean)
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
 Select Operator
   expressions: d_date_sk (type: int)
@@ -781,7 +781,7 @@ STAGE PLANS:
   alias: inventory
   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
   Filter Operator
-predicate: (inv_item_sk is not null and inv_warehouse_sk 
is not null and inv_date_sk is not null) (type: boolean)
+predicate: (inv_date_sk is not null and inv_item_sk is not 
null and inv_warehouse_sk is not null) (type: boolean)
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
 Select Operator
   expressions: inv_date_sk (type: int), inv_item_sk (type: 
int), inv_quantity_on_hand (type: int), inv_warehouse_sk (type: int)
@@ -834,7 +834,7 @@ STAGE PLANS:
   alias: date_dim
   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
   Filter Operator
-predicate: ((d_year = 1999) and (d_moy = 4) and d_date_sk 
is not null) (type: boolean)
+predicate: ((d_moy = 4) and (d_year = 1999) and d_date_sk 
is not null) (type: boolean)
 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
 Select Operator
   expressions: d_date_sk (type: int)

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer3.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer3.q.out 
b/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer3.q.out
index b612747..f9eaa3c 100644
--- 
a/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer3.q.out
+++ 
b/ql/src/test/results/clientpositive/spark/groupby_multi_single_reducer3.q.out
@@ -56,7 +56,7 @@ STAGE PLANS:
   alias: src
   Statistics: Num rows: 500 Data size: 5312 Basic stats: 
COMPLETE Column stats: NONE
   Filter Operator
-predicate: (((value) IN ('val_400', 'val_500') and (key) 
IN (400, 450)) or ((value) IN ('val_100', 'val_200', 'val_300') and (key) IN 
(100, 150, 200))) (type: boolean)
+predicate: (((value) IN ('val_100', 'val_200', 'val_300') 
and (key) IN (100, 150, 200)) or ((value) IN ('val_400', 'val_500') and (key) 
IN (400, 450))) (type: boolean)
 Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
 Reduce Output Operator
   key expressions: key (type: string)
@@ -69,7 +69,7 @@ STAGE PLANS:
   Forward
 Statistics: Num rows: 250 Data size: 2656 Basic stats: 
COMPLETE Column stats: NONE
 Filter Operator
-  predicate: ((VALUE._col0) IN ('val_100', 'val_200', 
'val_300') and (KEY._col0) IN (100, 150, 200)) (type: boolean)
+  predicate: ((KEY._col0) IN (100, 150, 

[08/16] hive git commit: HIVE-17510: Make comparison of filter predicates in q files deterministic (Jesus Camacho Rodriguez, reviewed by Ashutosh Chauhan)

2017-09-20 Thread jcamacho
http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/perf/query60.q.out
--
diff --git a/ql/src/test/results/clientpositive/perf/query60.q.out 
b/ql/src/test/results/clientpositive/perf/query60.q.out
index 13974bc..84a9ada 100644
--- a/ql/src/test/results/clientpositive/perf/query60.q.out
+++ b/ql/src/test/results/clientpositive/perf/query60.q.out
@@ -232,7 +232,7 @@ Stage-0
   Select Operator [SEL_15] (rows=18262 
width=1119)
 Output:["_col0"]
 Filter Operator [FIL_161] 
(rows=18262 width=1119)
-  predicate:((d_year = 1999) and 
(d_moy = 9) and d_date_sk is not null)
+  predicate:((d_moy = 9) and 
(d_year = 1999) and d_date_sk is not null)
   TableScan [TS_13] (rows=73049 
width=1119)
 
default@date_dim,date_dim,Tbl:COMPLETE,Col:NONE,Output:["d_date_sk","d_year","d_moy"]
   <-Map 27 [SIMPLE_EDGE]
@@ -241,7 +241,7 @@ Stage-0
   Select Operator [SEL_49] 
(rows=287989836 width=135)
 
Output:["_col0","_col1","_col2","_col3"]
 Filter Operator [FIL_165] 
(rows=287989836 width=135)
-  predicate:(cs_sold_date_sk is 
not null and cs_bill_addr_sk is not null and cs_item_sk is not null)
+  predicate:(cs_bill_addr_sk is 
not null and cs_item_sk is not null and cs_sold_date_sk is not null)
   TableScan [TS_47] 
(rows=287989836 width=135)
 
default@catalog_sales,catalog_sales,Tbl:COMPLETE,Col:NONE,Output:["cs_sold_date_sk","cs_bill_addr_sk","cs_item_sk","cs_ext_sales_price"]
 <-Reducer 8 [SIMPLE_EDGE]
@@ -332,7 +332,7 @@ Stage-0
   Select Operator [SEL_87] 
(rows=144002668 width=135)
 
Output:["_col0","_col1","_col2","_col3"]
 Filter Operator [FIL_170] 
(rows=144002668 width=135)
-  predicate:(ws_sold_date_sk is 
not null and ws_bill_addr_sk is not null and ws_item_sk is not null)
+  predicate:(ws_bill_addr_sk is 
not null and ws_item_sk is not null and ws_sold_date_sk is not null)
   TableScan [TS_85] 
(rows=144002668 width=135)
 
default@web_sales,web_sales,Tbl:COMPLETE,Col:NONE,Output:["ws_sold_date_sk","ws_item_sk","ws_bill_addr_sk","ws_ext_sales_price"]
 <-Reducer 4 [CONTAINS]
@@ -393,7 +393,7 @@ Stage-0
   Select Operator [SEL_12] 
(rows=575995635 width=88)
 
Output:["_col0","_col1","_col2","_col3"]
 Filter Operator [FIL_160] 
(rows=575995635 width=88)
-  predicate:(ss_sold_date_sk is 
not null and ss_addr_sk is not null and ss_item_sk is not null)
+  predicate:(ss_addr_sk is not 
null and ss_item_sk is not null and ss_sold_date_sk is not null)
   TableScan [TS_10] 
(rows=575995635 width=88)
 
default@store_sales,store_sales,Tbl:COMPLETE,Col:NONE,Output:["ss_sold_date_sk","ss_item_sk","ss_addr_sk","ss_ext_sales_price"]
 

http://git-wip-us.apache.org/repos/asf/hive/blob/c5b3ccc4/ql/src/test/results/clientpositive/perf/query61.q.out
--
diff --git a/ql/src/test/results/clientpositive/perf/query61.q.out 
b/ql/src/test/results/clientpositive/perf/query61.q.out
index 93d4d42..0b4f5fd 100644
--- a/ql/src/test/results/clientpositive/perf/query61.q.out
+++ b/ql/src/test/results/clientpositive/perf/query61.q.out
@@ -183,7 +183,7 @@ Stage-0
   Select Operator [SEL_11] 
(rows=18262 width=1119)
 Output:["_col0"]
 Filter Operator [FIL_135] 
(rows=18262 width=1119)
-  predicate:((d_year = 1999) 
and (d_moy = 11) and d_date_sk is not null)
+  predicate:((d_moy = 11) and 
(d_year = 1999) and d_date_sk is not null)
 

hive git commit: HIVE-17560 : HiveMetastore doesn't start in secure cluster if repl change manager is enabled (Thejas Nair, reviewed by Daniel Dai)

2017-09-20 Thread thejas
Repository: hive
Updated Branches:
  refs/heads/master 1c2e999c7 -> 0bdc570b7


HIVE-17560 : HiveMetastore doesn't start in secure cluster if repl change 
manager is enabled (Thejas Nair, reviewed by Daniel Dai)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/0bdc570b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/0bdc570b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/0bdc570b

Branch: refs/heads/master
Commit: 0bdc570b7535b8bfd8380108ec4e01c92c0e2a93
Parents: 1c2e999
Author: Thejas M Nair 
Authored: Wed Sep 20 12:08:40 2017 -0700
Committer: Thejas M Nair 
Committed: Wed Sep 20 12:08:40 2017 -0700

--
 .../org/apache/hadoop/hive/metastore/HiveMetaStore.java | 9 +
 1 file changed, 9 insertions(+)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/0bdc570b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
--
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 8bbc325..b863d48 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -136,6 +136,7 @@ import org.apache.hadoop.hive.metastore.txn.TxnUtils;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.SerDeException;
 import org.apache.hadoop.hive.shims.Utils;
+import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StringUtils;
@@ -7641,6 +7642,14 @@ public class HiveMetaStore extends ThriftHiveMetastore {
   boolean useSSL = conf.getBoolVar(ConfVars.HIVE_METASTORE_USE_SSL);
   useSasl = conf.getBoolVar(HiveConf.ConfVars.METASTORE_USE_THRIFT_SASL);
 
+  if (useSasl) {
+// we are in secure mode. Login using keytab
+String kerberosName = SecurityUtil
+
.getServerPrincipal(conf.getVar(ConfVars.METASTORE_KERBEROS_PRINCIPAL), 
"0.0.0.0");
+String keyTabFile = 
conf.getVar(ConfVars.METASTORE_KERBEROS_KEYTAB_FILE);
+UserGroupInformation.loginUserFromKeytab(kerberosName, keyTabFile);
+  }
+
   TProcessor processor;
   TTransportFactory transFactory;
   final TProtocolFactory protocolFactory;



hive git commit: HIVE-17428 : REPL LOAD of ALTER_PARTITION event doesn't create import tasks if the partition doesn't exist during analyze phase (Sankar Hariappan, reviewed by Anishek Agarwal, Thejas

2017-09-20 Thread thejas
Repository: hive
Updated Branches:
  refs/heads/master 02d359db2 -> 1c2e999c7


HIVE-17428 : REPL LOAD of ALTER_PARTITION event doesn't create import tasks if 
the partition doesn't exist during analyze phase (Sankar Hariappan, reviewed by 
Anishek Agarwal, Thejas Nair)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1c2e999c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1c2e999c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1c2e999c

Branch: refs/heads/master
Commit: 1c2e999c79440d8b596235829de8fb6a185bd3cc
Parents: 02d359d
Author: Sankar Hariappan 
Authored: Wed Sep 20 09:29:58 2017 -0700
Committer: Thejas M Nair 
Committed: Wed Sep 20 12:07:48 2017 -0700

--
 .../hive/ql/parse/TestReplicationScenarios.java | 57 +---
 .../java/org/apache/hadoop/hive/ql/Driver.java  | 23 ++--
 .../hive/ql/parse/ImportSemanticAnalyzer.java   | 15 +-
 .../ql/parse/ReplicationSemanticAnalyzer.java   |  3 +-
 4 files changed, 35 insertions(+), 63 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/1c2e999c/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
index 2e880c7..d87a4c0 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenarios.java
@@ -234,8 +234,6 @@ public class TestReplicationScenarios {
   }
 
   private void loadAndVerify(String replDbName, String dumpLocation, String 
lastReplId) throws IOException {
-run("EXPLAIN REPL LOAD " + replDbName + " FROM '" + dumpLocation + "'", 
driverMirror);
-printOutput(driverMirror);
 run("REPL LOAD " + replDbName + " FROM '" + dumpLocation + "'", 
driverMirror);
 verifyRun("REPL STATUS " + replDbName, lastReplId, driverMirror);
 return;
@@ -336,8 +334,6 @@ public class TestReplicationScenarios {
 // Partition droppped after "repl dump"
 run("ALTER TABLE " + dbName + ".ptned " + "DROP PARTITION(b=1)", driver);
 
-run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'", 
driverMirror);
-printOutput(driverMirror);
 run("REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'", 
driverMirror);
 
 run("REPL STATUS " + dbName + "_dupe", driverMirror);
@@ -750,8 +746,6 @@ public class TestReplicationScenarios {
 String incrementalDumpLocn = getResult(0,0,driver);
 String incrementalDumpId = getResult(0,1,true,driver);
 LOG.info("Dumped to {} with id {}", incrementalDumpLocn, 
incrementalDumpId);
-run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + 
"'", driverMirror);
-printOutput(driverMirror);
 run("REPL LOAD " + dbName + "_dupe FROM '"+incrementalDumpLocn+"'", 
driverMirror);
 
 run("REPL STATUS " + dbName + "_dupe", driverMirror);
@@ -876,8 +870,6 @@ public class TestReplicationScenarios {
 String incrementalDumpLocn = getResult(0, 0, driver);
 String incrementalDumpId = getResult(0, 1, true, driver);
 LOG.info("Incremental-Dump: Dumped to {} with id {} from {}", 
incrementalDumpLocn, incrementalDumpId, replDumpId);
-run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + 
"'", driverMirror);
-printOutput(driverMirror);
 run("REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + "'", 
driverMirror);
 verifyRun("SELECT a from " + dbName + "_dupe.unptned ORDER BY a", 
unptn_data, driverMirror);
   }
@@ -927,8 +919,6 @@ public class TestReplicationScenarios {
 run("REPL DUMP " + dbName, driver);
 String replDumpLocn = getResult(0,0,driver);
 String replDumpId = getResult(0,1,true,driver);
-run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'", 
driverMirror);
-printOutput(driverMirror);
 run("REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'", 
driverMirror);
 verifySetup("REPL STATUS " + dbName + "_dupe", new String[]{replDumpId}, 
driverMirror);
 
@@ -958,8 +948,6 @@ public class TestReplicationScenarios {
 String postDropReplDumpLocn = getResult(0,0,driver);
 String postDropReplDumpId = getResult(0,1,true,driver);
 LOG.info("Dumped to {} with id {}->{}", postDropReplDumpLocn, replDumpId, 
postDropReplDumpId);
-run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + postDropReplDumpLocn 
+ "'", driverMirror);
-printOutput(driverMirror);
 run("REPL LOAD " + dbName + "_dupe FROM '" + 

[2/2] hive git commit: HIVE-15899 Make CTAS with acid target table and insert into acid_tbl select ... union all ... work (Eugene Koifman, reviewed by Ashutosh Chauhan)

2017-09-20 Thread ekoifman
HIVE-15899 Make CTAS with acid target table and insert into acid_tbl select ... 
union all ... work (Eugene Koifman, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/02d359db
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/02d359db
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/02d359db

Branch: refs/heads/master
Commit: 02d359db235cc646928ff2b7f5b1fe2d88c7ad12
Parents: 5608300
Author: Eugene Koifman 
Authored: Wed Sep 20 10:46:21 2017 -0700
Committer: Eugene Koifman 
Committed: Wed Sep 20 10:46:21 2017 -0700

--
 .../apache/hadoop/hive/ql/TestAcidOnTez.java| 329 +++
 .../java/org/apache/hadoop/hive/ql/Context.java |   3 +
 .../java/org/apache/hadoop/hive/ql/Driver.java  |   6 +-
 .../apache/hadoop/hive/ql/exec/MoveTask.java|  13 +-
 .../org/apache/hadoop/hive/ql/io/AcidUtils.java |  12 +
 .../apache/hadoop/hive/ql/metadata/Hive.java|  43 ++-
 .../ql/optimizer/QueryPlanPostProcessor.java| 166 ++
 .../optimizer/unionproc/UnionProcFactory.java   |   3 +-
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  28 +-
 .../apache/hadoop/hive/ql/plan/LoadDesc.java|  22 +-
 .../hadoop/hive/ql/plan/LoadFileDesc.java   |  20 +-
 .../hadoop/hive/ql/plan/LoadTableDesc.java  |  26 +-
 .../hadoop/hive/ql/plan/TezEdgeProperty.java|  10 +-
 .../apache/hadoop/hive/ql/TestTxnNoBuckets.java | 189 +--
 .../clientpositive/autoColumnStats_4.q.out  |   1 +
 .../clientpositive/llap/acid_no_buckets.q.out   |   8 +
 .../llap/dynamic_semijoin_reduction_3.q.out |   7 +
 .../llap/dynpart_sort_optimization_acid.q.out   |  12 +
 .../results/clientpositive/llap/sqlmerge.q.out  |   4 +
 19 files changed, 751 insertions(+), 151 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/02d359db/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java
--
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java
index d0b5cf6..8b4b21f 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestAcidOnTez.java
@@ -33,6 +33,7 @@ import 
org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
 import org.apache.hadoop.hive.metastore.txn.TxnDbUtil;
 import org.apache.hadoop.hive.metastore.txn.TxnStore;
 import org.apache.hadoop.hive.metastore.txn.TxnUtils;
+import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator;
 import org.apache.hadoop.hive.ql.io.BucketCodec;
 import org.apache.hadoop.hive.ql.io.HiveInputFormat;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
@@ -88,7 +89,6 @@ public class TestAcidOnTez {
 hiveConf = new HiveConf(this.getClass());
 hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
 hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
-hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
 hiveConf.set(HiveConf.ConfVars.METASTOREWAREHOUSE.varname, 
TEST_WAREHOUSE_DIR);
 hiveConf.setVar(HiveConf.ConfVars.HIVEMAPREDMODE, "nonstrict");
 hiveConf.setVar(HiveConf.ConfVars.HIVEINPUTFORMAT, 
HiveInputFormat.class.getName());
@@ -179,14 +179,14 @@ public class TestAcidOnTez {
 runStatementOnDriver("create table " + Table.NONACIDNONBUCKET + " stored 
as ORC TBLPROPERTIES('transactional'='false') as " +
   "select a, b from " + Table.ACIDTBL + " where a <= 5 union all select a, 
b from " + Table.NONACIDORCTBL + " where a >= 5", confForTez);
 
-List rs = runStatementOnDriver("select a, b, INPUT__FILE__NAME 
from " + Table.NONACIDNONBUCKET + " order by a, b, INPUT__FILE__NAME");
+List rs = runStatementOnDriver("select a, b, INPUT__FILE__NAME 
from " + Table.NONACIDNONBUCKET + " order by a, b, INPUT__FILE__NAME", 
confForTez);
 String expected0[][] = {
-  {"1\t2", "/1/00_0"},
-  {"3\t4", "/1/00_0"},
-  {"5\t6", "/1/00_0"},
-  {"5\t6", "/2/00_0"},
-  {"7\t8", "/2/00_0"},
-  {"9\t10", "/2/00_0"},
+  {"1\t2", AbstractFileMergeOperator.UNION_SUDBIR_PREFIX + "1/00_0"},
+  {"3\t4", AbstractFileMergeOperator.UNION_SUDBIR_PREFIX + "1/00_0"},
+  {"5\t6", AbstractFileMergeOperator.UNION_SUDBIR_PREFIX + "1/00_0"},
+  {"5\t6", AbstractFileMergeOperator.UNION_SUDBIR_PREFIX + "2/00_0"},
+  {"7\t8", AbstractFileMergeOperator.UNION_SUDBIR_PREFIX + "2/00_0"},
+  {"9\t10", AbstractFileMergeOperator.UNION_SUDBIR_PREFIX + "2/00_0"},
 };
 Assert.assertEquals("Unexpected row count 

[1/2] hive git commit: HIVE-15899 Make CTAS with acid target table and insert into acid_tbl select ... union all ... work (Eugene Koifman, reviewed by Ashutosh Chauhan)

2017-09-20 Thread ekoifman
Repository: hive
Updated Branches:
  refs/heads/master 56083008b -> 02d359db2


http://git-wip-us.apache.org/repos/asf/hive/blob/02d359db/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out
--
diff --git a/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out 
b/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out
index 34dd487..7905194 100644
--- a/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out
+++ b/ql/src/test/results/clientpositive/llap/acid_no_buckets.q.out
@@ -166,6 +166,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.srcpart_acid
+  Write Type: UPDATE
 
   Stage: Stage-3
 Stats-Aggr Operator
@@ -344,6 +345,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.srcpart_acid
+  Write Type: DELETE
 
   Stage: Stage-3
 Stats-Aggr Operator
@@ -660,6 +662,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.srcpart_acidb
+  Write Type: UPDATE
 
   Stage: Stage-3
 Stats-Aggr Operator
@@ -839,6 +842,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.srcpart_acidb
+  Write Type: DELETE
 
   Stage: Stage-3
 Stats-Aggr Operator
@@ -1154,6 +1158,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.srcpart_acidv
+  Write Type: UPDATE
 
   Stage: Stage-3
 Stats-Aggr Operator
@@ -1332,6 +1337,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.srcpart_acidv
+  Write Type: DELETE
 
   Stage: Stage-3
 Stats-Aggr Operator
@@ -1648,6 +1654,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.srcpart_acidvb
+  Write Type: UPDATE
 
   Stage: Stage-3
 Stats-Aggr Operator
@@ -1827,6 +1834,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.srcpart_acidvb
+  Write Type: DELETE
 
   Stage: Stage-3
 Stats-Aggr Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/02d359db/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_3.q.out
--
diff --git 
a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_3.q.out 
b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_3.q.out
index 8aee753..095ae9d 100644
--- a/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_3.q.out
+++ b/ql/src/test/results/clientpositive/llap/dynamic_semijoin_reduction_3.q.out
@@ -256,6 +256,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.acidtbl
+  Write Type: DELETE
 
   Stage: Stage-6
 Stats-Aggr Operator
@@ -269,6 +270,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.acidtbl
+  Write Type: UPDATE
 
   Stage: Stage-7
 Stats-Aggr Operator
@@ -295,6 +297,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.acidtbl
+  Write Type: INSERT
 
   Stage: Stage-9
 Stats-Aggr Operator
@@ -427,6 +430,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.acidtbl
+  Write Type: INSERT
 
   Stage: Stage-3
 Stats-Aggr Operator
@@ -727,6 +731,7 @@ STAGE PLANS:
   output format: org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat
   serde: org.apache.hadoop.hive.ql.io.orc.OrcSerde
   name: default.acidtbl
+  Write Type: DELETE
 
   Stage: Stage-6
 Stats-Aggr Operator
@@ -740,6 +745,7 @@ STAGE PLANS:
   output