Repository: hive
Updated Branches:
  refs/heads/master c5b3ccc41 -> 583a9511b


HIVE-17535 Select 1 EXCEPT Select 1 fails with NPE (Vineet Garg,reviewed by 
Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/583a9511
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/583a9511
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/583a9511

Branch: refs/heads/master
Commit: 583a9511ba8809d81595a5fa4da32ed2c2f8912e
Parents: c5b3ccc
Author: Vineet Garg <vg...@apache.com>
Authored: Wed Sep 20 14:09:32 2017 -0700
Committer: Vineet Garg <vg...@apache.com>
Committed: Wed Sep 20 14:09:32 2017 -0700

----------------------------------------------------------------------
 .../test/resources/testconfiguration.properties |  1 +
 .../hadoop/hive/ql/parse/CalcitePlanner.java    | 19 ++++++-----------
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  |  8 ++++++-
 .../clientnegative/subquery_missing_from.q      |  1 -
 .../clientnegative/subquery_select_no_source.q  |  2 --
 .../clientnegative/subquery_missing_from.q.out  |  3 ---
 .../subquery_select_no_source.q.out             |  1 -
 .../clientpositive/beeline/mapjoin2.q.out       |  4 ++++
 .../beeline/select_dummy_source.q.out           | 22 +++++++++++++-------
 .../clientpositive/decimal_precision2.q.out     | 10 ++++-----
 .../clientpositive/llap/explainuser_1.q.out     | 14 +++++++------
 .../results/clientpositive/llap/mapjoin2.q.out  |  4 ++++
 .../llap/select_dummy_source.q.out              | 10 +++++++--
 .../test/results/clientpositive/mapjoin2.q.out  |  4 ++++
 .../clientpositive/select_dummy_source.q.out    | 22 +++++++++++++-------
 .../results/clientpositive/timestamptz_1.q.out  |  2 +-
 .../results/clientpositive/udtf_stack.q.out     | 18 +++++++++-------
 .../vector_tablesample_rows.q.out               |  2 +-
 18 files changed, 90 insertions(+), 57 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index d472bb3..efa690d 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -30,6 +30,7 @@ disabled.query.files=ql_rewrite_gbtoidx.q,\
   cbo_rp_subq_not_in.q,\
   cbo_rp_subq_exists.q,\
   orc_llap.q,\
+  min_structvalue.q,\
   ql_rewrite_gbtoidx_cbo_2.q,\
   rcfile_merge1.q,\
   smb_mapjoin_8.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
index 2645fab..28953b9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/CalcitePlanner.java
@@ -709,7 +709,7 @@ public class CalcitePlanner extends SemanticAnalyzer {
     boolean isSupportedRoot = root == HiveParser.TOK_QUERY || root == 
HiveParser.TOK_EXPLAIN
         || qb.isCTAS() || qb.isMaterializedView();
     // Queries without a source table currently are not supported by CBO
-    boolean isSupportedType = (qb.getIsQuery() && 
!qb.containsQueryWithoutSourceTable())
+    boolean isSupportedType = (qb.getIsQuery())
         || qb.isCTAS() || qb.isMaterializedView() || cboCtx.type == 
PreCboCtx.Type.INSERT
         || cboCtx.type == PreCboCtx.Type.MULTI_INSERT;
     boolean noBadTokens = HiveCalciteUtil.validateASTForUnsupportedTokens(ast);
@@ -4164,18 +4164,11 @@ public class CalcitePlanner extends SemanticAnalyzer {
 
       if (aliasToRel.isEmpty()) {
         // // This may happen for queries like select 1; (no source table)
-        // We can do following which is same, as what Hive does.
-        // With this, we will be able to generate Calcite plan.
-        // qb.getMetaData().setSrcForAlias(DUMMY_TABLE, getDummyTable());
-        // RelNode op = genTableLogicalPlan(DUMMY_TABLE, qb);
-        // qb.addAlias(DUMMY_TABLE);
-        // qb.setTabAlias(DUMMY_TABLE, DUMMY_TABLE);
-        // aliasToRel.put(DUMMY_TABLE, op);
-        // However, Hive trips later while trying to get Metadata for this 
dummy
-        // table
-        // So, for now lets just disable this. Anyway there is nothing much to
-        // optimize in such cases.
-        throw new CalciteSemanticException("Unsupported", 
UnsupportedFeature.Others);
+        qb.getMetaData().setSrcForAlias(DUMMY_TABLE, getDummyTable());
+        qb.addAlias(DUMMY_TABLE);
+        qb.setTabAlias(DUMMY_TABLE, DUMMY_TABLE);
+        RelNode op = genTableLogicalPlan(DUMMY_TABLE, qb);
+        aliasToRel.put(DUMMY_TABLE, op);
 
       }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index fee5a79..d56fd21 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -2019,6 +2019,9 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
       }
 
       if (tab == null) {
+        if(tabName.equals(DUMMY_DATABASE + "." + DUMMY_TABLE)) {
+          continue;
+        }
         ASTNode src = qb.getParseInfo().getSrcForAlias(alias);
         if (null != src) {
           throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(src));
@@ -10611,6 +10614,9 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
 
     // Recurse over all the source tables
     for (String alias : qb.getTabAliases()) {
+      if(alias.equals(DUMMY_TABLE)) {
+        continue;
+      }
       Operator op = genTablePlan(alias, qb);
       aliasToOpInfo.put(alias, op);
     }
@@ -10738,7 +10744,7 @@ public class SemanticAnalyzer extends 
BaseSemanticAnalyzer {
     opParseCtx.get(operator).setRowResolver(newRR);
   }
 
-  private Table getDummyTable() throws SemanticException {
+  protected Table getDummyTable() throws SemanticException {
     Path dummyPath = createDummyFile();
     Table desc = new Table(DUMMY_DATABASE, DUMMY_TABLE);
     desc.getTTable().getSd().setLocation(dummyPath.toString());

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/queries/clientnegative/subquery_missing_from.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/subquery_missing_from.q 
b/ql/src/test/queries/clientnegative/subquery_missing_from.q
deleted file mode 100644
index 3b49ac6..0000000
--- a/ql/src/test/queries/clientnegative/subquery_missing_from.q
+++ /dev/null
@@ -1 +0,0 @@
-select * from src where src.key in (select key);
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/queries/clientnegative/subquery_select_no_source.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/subquery_select_no_source.q 
b/ql/src/test/queries/clientnegative/subquery_select_no_source.q
deleted file mode 100644
index 75cae51..0000000
--- a/ql/src/test/queries/clientnegative/subquery_select_no_source.q
+++ /dev/null
@@ -1,2 +0,0 @@
--- since CBO doesn't allow such queries we can not support subqueries here
-explain select (select max(p_size) from part);

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientnegative/subquery_missing_from.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/subquery_missing_from.q.out 
b/ql/src/test/results/clientnegative/subquery_missing_from.q.out
deleted file mode 100644
index b09a8e3..0000000
--- a/ql/src/test/results/clientnegative/subquery_missing_from.q.out
+++ /dev/null
@@ -1,3 +0,0 @@
-FAILED: SemanticException Line 0:-1 Invalid SubQuery expression 'key' in 
definition of SubQuery sq_1 [
-src.key in (select key)
-] used as sq_1 at Line 0:-1: From clause is missing in SubQuery.

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientnegative/subquery_select_no_source.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/subquery_select_no_source.q.out 
b/ql/src/test/results/clientnegative/subquery_select_no_source.q.out
deleted file mode 100644
index 37c4e57..0000000
--- a/ql/src/test/results/clientnegative/subquery_select_no_source.q.out
+++ /dev/null
@@ -1 +0,0 @@
-FAILED: CalciteSubquerySemanticException [Error 10249]: Unsupported SubQuery 
Expression  Currently SubQuery expressions are only allowed as Where and Having 
Clause predicates

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientpositive/beeline/mapjoin2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/beeline/mapjoin2.q.out 
b/ql/src/test/results/clientpositive/beeline/mapjoin2.q.out
index 08e398a..7e70841 100644
--- a/ql/src/test/results/clientpositive/beeline/mapjoin2.q.out
+++ b/ql/src/test/results/clientpositive/beeline/mapjoin2.q.out
@@ -53,6 +53,7 @@ POSTHOOK: Input: default@tbl
 #### A masked pattern was here ####
 false  false   true    true
 true   true    false   false
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross 
product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 
confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
@@ -62,6 +63,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
 11     1       1       0       0
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross 
product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 
key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
@@ -71,6 +73,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
 11     1       1       0       0
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross 
product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 
11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
@@ -80,6 +83,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
 11     1       1       0       0
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 
'Stage-1:MAPRED' is a cross product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a full outer join ( SELECT 11 
key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out
----------------------------------------------------------------------
diff --git 
a/ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out 
b/ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out
index b6fa03f..0b73e84 100644
--- a/ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out
+++ b/ql/src/test/results/clientpositive/beeline/select_dummy_source.q.out
@@ -89,13 +89,17 @@ STAGE PLANS:
               UDTF Operator
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: COMPLETE
                 function name: explode
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: col (type: string)
+                  outputColumnNames: _col0
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: COMPLETE
+                    table:
+                        input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator
@@ -204,7 +208,11 @@ STAGE PLANS:
             UDTF Operator
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: COMPLETE
               function name: explode
-              ListSink
+              Select Operator
+                expressions: col (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: COMPLETE
+                ListSink
 
 PREHOOK: query: select explode(array('a', 'b'))
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientpositive/decimal_precision2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/decimal_precision2.q.out 
b/ql/src/test/results/clientpositive/decimal_precision2.q.out
index 384ba80..4ce7e1c 100644
--- a/ql/src/test/results/clientpositive/decimal_precision2.q.out
+++ b/ql/src/test/results/clientpositive/decimal_precision2.q.out
@@ -37,9 +37,9 @@ STAGE PLANS:
           Row Limit Per Split: 1
           Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column 
stats: COMPLETE
           Select Operator
-            expressions: 100 (type: decimal(3,0))
+            expressions: 100 (type: int)
             outputColumnNames: _col0
-            Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE 
Column stats: COMPLETE
+            Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column 
stats: COMPLETE
             ListSink
 
 PREHOOK: query: explain select 0.000BD
@@ -59,9 +59,9 @@ STAGE PLANS:
           Row Limit Per Split: 1
           Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column 
stats: COMPLETE
           Select Operator
-            expressions: 0 (type: decimal(1,0))
+            expressions: 0 (type: int)
             outputColumnNames: _col0
-            Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE 
Column stats: COMPLETE
+            Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE Column 
stats: COMPLETE
             ListSink
 
 PREHOOK: query: explain select 0.100BD
@@ -147,7 +147,7 @@ STAGE PLANS:
           Row Limit Per Split: 1
           Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE Column 
stats: COMPLETE
           Select Operator
-            expressions: 69.0212249755859375 (type: decimal(27,20))
+            expressions: 69.0212249755859375 (type: decimal(18,16))
             outputColumnNames: _col0
             Statistics: Num rows: 1 Data size: 112 Basic stats: COMPLETE 
Column stats: COMPLETE
             ListSink

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out 
b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
index 8ee41d0..a47d791 100644
--- a/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
+++ b/ql/src/test/results/clientpositive/llap/explainuser_1.q.out
@@ -5150,16 +5150,18 @@ PREHOOK: query: explain select explode(array('a', 'b'))
 PREHOOK: type: QUERY
 POSTHOOK: query: explain select explode(array('a', 'b'))
 POSTHOOK: type: QUERY
-Plan not optimized by CBO.
+Plan optimized by CBO.
 
 Stage-0
   Fetch Operator
     limit:-1
-    UDTF Operator [UDTF_2]
-      function name:explode
-      Select Operator [SEL_1]
-        Output:["_col0"]
-        TableScan [TS_0]
+    Select Operator [SEL_3]
+      Output:["_col0"]
+      UDTF Operator [UDTF_2]
+        function name:explode
+        Select Operator [SEL_1]
+          Output:["_col0"]
+          TableScan [TS_0]
 
 PREHOOK: query: CREATE TABLE T1(key STRING, val STRING) STORED AS TEXTFILE
 PREHOOK: type: CREATETABLE

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientpositive/llap/mapjoin2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/mapjoin2.q.out 
b/ql/src/test/results/clientpositive/llap/mapjoin2.q.out
index cdee70b..ce65c6d 100644
--- a/ql/src/test/results/clientpositive/llap/mapjoin2.q.out
+++ b/ql/src/test/results/clientpositive/llap/mapjoin2.q.out
@@ -53,6 +53,7 @@ POSTHOOK: Input: default@tbl
 #### A masked pattern was here ####
 false  false   true    true
 true   true    false   false
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 
confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
@@ -62,6 +63,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
 11     1       1       0       0
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 1' is a cross product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 
key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
@@ -71,6 +73,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
 11     1       1       0       0
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Map 2' is a cross product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 
11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
@@ -80,6 +83,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
 11     1       1       0       0
+Warning: Shuffle Join MERGEJOIN[9][tables = [$hdt$_0, $hdt$_1]] in Stage 
'Reducer 2' is a cross product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a full outer join ( SELECT 11 
key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientpositive/llap/select_dummy_source.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/select_dummy_source.q.out 
b/ql/src/test/results/clientpositive/llap/select_dummy_source.q.out
index 7efd2ff..b7f939f 100644
--- a/ql/src/test/results/clientpositive/llap/select_dummy_source.q.out
+++ b/ql/src/test/results/clientpositive/llap/select_dummy_source.q.out
@@ -82,7 +82,10 @@ STAGE PLANS:
             outputColumnNames: _col0
             UDTF Operator
               function name: explode
-              ListSink
+              Select Operator
+                expressions: col (type: string)
+                outputColumnNames: _col0
+                ListSink
 
 PREHOOK: query: select explode(array('a', 'b'))
 PREHOOK: type: QUERY
@@ -178,7 +181,10 @@ STAGE PLANS:
             outputColumnNames: _col0
             UDTF Operator
               function name: explode
-              ListSink
+              Select Operator
+                expressions: col (type: string)
+                outputColumnNames: _col0
+                ListSink
 
 PREHOOK: query: select explode(array('a', 'b'))
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientpositive/mapjoin2.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/mapjoin2.q.out 
b/ql/src/test/results/clientpositive/mapjoin2.q.out
index 08e398a..7e70841 100644
--- a/ql/src/test/results/clientpositive/mapjoin2.q.out
+++ b/ql/src/test/results/clientpositive/mapjoin2.q.out
@@ -53,6 +53,7 @@ POSTHOOK: Input: default@tbl
 #### A masked pattern was here ####
 false  false   true    true
 true   true    false   false
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross 
product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a join ( SELECT 11 key, 0 
confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
@@ -62,6 +63,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
 11     1       1       0       0
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross 
product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a left outer join ( SELECT 11 
key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
@@ -71,6 +73,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
 11     1       1       0       0
+Warning: Map Join MAPJOIN[9][bigTable=?] in task 'Stage-3:MAPRED' is a cross 
product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a right outer join ( SELECT 
11 key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table
@@ -80,6 +83,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 #### A masked pattern was here ####
 11     1       1       0       0
+Warning: Shuffle Join JOIN[6][tables = [$hdt$_0, $hdt$_1]] in Stage 
'Stage-1:MAPRED' is a cross product
 PREHOOK: query: select a.key, a.a_one, b.b_one, a.a_zero, b.b_zero from ( 
SELECT 11 key, 0 confuse_you, 1 a_one, 0 a_zero ) a full outer join ( SELECT 11 
key, 0 confuse_you, 1 b_one, 0 b_zero ) b on a.key = b.key
 PREHOOK: type: QUERY
 PREHOOK: Input: _dummy_database@_dummy_table

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientpositive/select_dummy_source.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/select_dummy_source.q.out 
b/ql/src/test/results/clientpositive/select_dummy_source.q.out
index b6fa03f..0b73e84 100644
--- a/ql/src/test/results/clientpositive/select_dummy_source.q.out
+++ b/ql/src/test/results/clientpositive/select_dummy_source.q.out
@@ -89,13 +89,17 @@ STAGE PLANS:
               UDTF Operator
                 Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: COMPLETE
                 function name: explode
-                File Output Operator
-                  compressed: false
+                Select Operator
+                  expressions: col (type: string)
+                  outputColumnNames: _col0
                   Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: COMPLETE
+                    table:
+                        input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator
@@ -204,7 +208,11 @@ STAGE PLANS:
             UDTF Operator
               Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: COMPLETE
               function name: explode
-              ListSink
+              Select Operator
+                expressions: col (type: string)
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: COMPLETE
+                ListSink
 
 PREHOOK: query: select explode(array('a', 'b'))
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientpositive/timestamptz_1.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/timestamptz_1.q.out 
b/ql/src/test/results/clientpositive/timestamptz_1.q.out
index 4756033..b4ef3e4 100644
--- a/ql/src/test/results/clientpositive/timestamptz_1.q.out
+++ b/ql/src/test/results/clientpositive/timestamptz_1.q.out
@@ -18,7 +18,7 @@ POSTHOOK: query: insert overwrite table tstz1 select 
cast('2016-01-03 12:26:34 A
 POSTHOOK: type: QUERY
 POSTHOOK: Input: _dummy_database@_dummy_table
 POSTHOOK: Output: default@tstz1
-POSTHOOK: Lineage: tstz1.t EXPRESSION []
+POSTHOOK: Lineage: tstz1.t SIMPLE []
 PREHOOK: query: select cast(t as string) from tstz1
 PREHOOK: type: QUERY
 PREHOOK: Input: default@tstz1

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientpositive/udtf_stack.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/udtf_stack.q.out 
b/ql/src/test/results/clientpositive/udtf_stack.q.out
index 97dcd3a..3192a44 100644
--- a/ql/src/test/results/clientpositive/udtf_stack.q.out
+++ b/ql/src/test/results/clientpositive/udtf_stack.q.out
@@ -182,13 +182,17 @@ STAGE PLANS:
               UDTF Operator
                 Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE 
Column stats: COMPLETE
                 function name: stack
-                File Output Operator
-                  compressed: false
-                  Statistics: Num rows: 1 Data size: 185 Basic stats: COMPLETE 
Column stats: COMPLETE
-                  table:
-                      input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
-                      output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                Select Operator
+                  expressions: col0 (type: string), col1 (type: string), null 
(type: void)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE 
Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 4 Basic stats: COMPLETE 
Column stats: COMPLETE
+                    table:
+                        input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
 
   Stage: Stage-0
     Fetch Operator

http://git-wip-us.apache.org/repos/asf/hive/blob/583a9511/ql/src/test/results/clientpositive/vector_tablesample_rows.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vector_tablesample_rows.q.out 
b/ql/src/test/results/clientpositive/vector_tablesample_rows.q.out
index 283762a..2d86d8c 100644
--- a/ql/src/test/results/clientpositive/vector_tablesample_rows.q.out
+++ b/ql/src/test/results/clientpositive/vector_tablesample_rows.q.out
@@ -251,7 +251,7 @@ STAGE PLANS:
             Select Operator
               Statistics: Num rows: 1 Data size: 1 Basic stats: COMPLETE 
Column stats: COMPLETE
               Group By Operator
-                aggregations: count(1)
+                aggregations: count()
                 Group By Vectorization:
                     groupByMode: HASH
                     vectorOutput: false

Reply via email to