Repository: hive
Updated Branches:
  refs/heads/master d9ec721b4 -> 8123c71f5


HIVE-17617: Rollup of an empty resultset should contain the grouping of the 
empty grouping set (Zoltan Haindrich, reviewed by Ashutosh Chauhan)

Signed-off-by: Zoltan Haindrich <k...@rxd.hu>


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/8123c71f
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/8123c71f
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/8123c71f

Branch: refs/heads/master
Commit: 8123c71f57f8194b0fa6108152ea0f07e7f0e86d
Parents: d9ec721
Author: Zoltan Haindrich <k...@rxd.hu>
Authored: Wed Oct 25 17:04:52 2017 +0200
Committer: Zoltan Haindrich <k...@rxd.hu>
Committed: Wed Oct 25 17:04:52 2017 +0200

----------------------------------------------------------------------
 .../test/resources/testconfiguration.properties |   1 +
 .../hadoop/hive/ql/exec/GroupByOperator.java    |  47 +++-
 .../ql/exec/vector/VectorGroupByOperator.java   |  14 +-
 .../exec/vector/VectorHashKeyWrapperBatch.java  |  19 +-
 .../apache/hadoop/hive/ql/plan/GroupByDesc.java |   5 +-
 .../clientpositive/groupby_rollup_empty.q       |  66 ++++++
 .../clientpositive/groupby_rollup_empty.q.out   | 225 ++++++++++++++++++
 .../llap/groupby_rollup_empty.q.out             | 234 +++++++++++++++++++
 8 files changed, 597 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/8123c71f/itests/src/test/resources/testconfiguration.properties
----------------------------------------------------------------------
diff --git a/itests/src/test/resources/testconfiguration.properties 
b/itests/src/test/resources/testconfiguration.properties
index 9f9b914..a4648be 100644
--- a/itests/src/test/resources/testconfiguration.properties
+++ b/itests/src/test/resources/testconfiguration.properties
@@ -176,6 +176,7 @@ minillaplocal.shared.query.files=alter_merge_2_orc.q,\
   groupby1.q,\
   groupby2.q,\
   groupby3.q,\
+  groupby_rollup_empty.q,\
   having.q,\
   identity_project_remove_skip.q,\
   insert1.q,\

http://git-wip-us.apache.org/repos/asf/hive/blob/8123c71f/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
index d3dfd21..4a0acb1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/GroupByOperator.java
@@ -36,6 +36,7 @@ import org.apache.hadoop.hive.common.type.TimestampTZ;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.llap.LlapDaemonInfo;
 import org.apache.hadoop.hive.ql.CompilationOpContext;
+import org.apache.hadoop.hive.ql.exec.tez.TezContext;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.OpParseContext;
 import org.apache.hadoop.hive.ql.plan.AggregationDesc;
@@ -67,8 +68,6 @@ import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
 
-import com.google.common.math.IntMath;
-
 import javolution.util.FastBitSet;
 
 /**
@@ -209,7 +208,6 @@ public class GroupByOperator extends Operator<GroupByDesc> {
     heartbeatInterval = HiveConf.getIntVar(hconf,
         HiveConf.ConfVars.HIVESENDHEARTBEAT);
     countAfterReport = 0;
-    groupingSetsPresent = conf.isGroupingSetsPresent();
     ObjectInspector rowInspector = inputObjInspectors[0];
 
     // init keyFields
@@ -228,6 +226,7 @@ public class GroupByOperator extends Operator<GroupByDesc> {
 
     // Initialize the constants for the grouping sets, so that they can be 
re-used for
     // each row
+    groupingSetsPresent = conf.isGroupingSetsPresent();
     if (groupingSetsPresent) {
       groupingSets = conf.getListGroupingSets();
       groupingSetsPosition = conf.getGroupingSetPosition();
@@ -1096,7 +1095,7 @@ public class GroupByOperator extends 
Operator<GroupByDesc> {
     if (!abort) {
       try {
         // If there is no grouping key and no row came to this operator
-        if (firstRow && (keyFields.length == 0)) {
+        if (firstRow && GroupByOperator.shouldEmitSummaryRow(conf)) {
           firstRow = false;
 
           // There is no grouping key - simulate a null row
@@ -1119,8 +1118,12 @@ public class GroupByOperator extends 
Operator<GroupByDesc> {
             aggregationEvaluators[ai].aggregate(aggregations[ai], o);
           }
 
-          // create dummy keys - size 0
-          forward(new Object[0], aggregations);
+          Object[] keys=new Object[outputKeyLength];
+          int pos = conf.getGroupingSetPosition();
+          if (pos >= 0 && pos < outputKeyLength) {
+            keys[pos] = new IntWritable((1 << pos) - 1);
+          }
+          forward(keys, aggregations);
         } else {
           flush();
         }
@@ -1179,4 +1182,36 @@ public class GroupByOperator extends 
Operator<GroupByDesc> {
     return getConf().getMode() == GroupByDesc.Mode.MERGEPARTIAL ||
         getConf().getMode() == GroupByDesc.Mode.COMPLETE;
   }
+
+  public static boolean shouldEmitSummaryRow(GroupByDesc desc) {
+    // exactly one reducer should emit the summary row
+    if (!firstReducer()) {
+      return false;
+    }
+    // empty keyset is basically ()
+    if (desc.getKeys().size() == 0) {
+      return true;
+    }
+    int groupingSetPosition = desc.getGroupingSetPosition();
+    List<Integer> listGroupingSets = desc.getListGroupingSets();
+    // groupingSets are known at map/reducer side; but have to do real 
processing
+    // hence grouppingSetsPresent is true only at map side
+    if (groupingSetPosition >= 0 && listGroupingSets != null) {
+      Integer emptyGrouping = (1 << groupingSetPosition) - 1;
+      if (listGroupingSets.contains(emptyGrouping)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  public static boolean firstReducer() {
+    MapredContext ctx = TezContext.get();
+    if (ctx != null && ctx instanceof TezContext) {
+      TezContext tezContext = (TezContext) ctx;
+      return tezContext.getTezProcessorContext().getTaskIndex() == 0;
+    }
+    return true;
+  }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/8123c71f/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
index 31f2621..d81cd26 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorGroupByOperator.java
@@ -36,18 +36,15 @@ import org.apache.hadoop.hive.ql.exec.GroupByOperator;
 import org.apache.hadoop.hive.ql.exec.KeyWrapper;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression;
-import org.apache.hadoop.hive.ql.exec.vector.expressions.IdentityExpression;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.AggregationDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
 import org.apache.hadoop.hive.ql.plan.GroupByDesc;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc;
-import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc.ProcessingMode;
 import org.apache.hadoop.hive.ql.plan.api.OperatorType;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -777,6 +774,7 @@ public class VectorGroupByOperator extends 
Operator<GroupByDesc> implements
 
     private boolean first;
     private boolean isLastGroupBatch;
+    private boolean hasOutput;
 
     /**
      * The group vector key helper.
@@ -819,6 +817,7 @@ public class VectorGroupByOperator extends 
Operator<GroupByDesc> implements
     @Override
     public void doProcessBatch(VectorizedRowBatch batch, boolean 
isFirstGroupingSet,
         boolean[] currentGroupingSetsOverrideIsNulls) throws HiveException {
+      hasOutput = true;
       if (first) {
         // Copy the group key to output batch now.  We'll copy in the 
aggregates at the end of the group.
         first = false;
@@ -847,6 +846,15 @@ public class VectorGroupByOperator extends 
Operator<GroupByDesc> implements
       if (!aborted && !first && !isLastGroupBatch) {
         writeGroupRow(groupAggregators, buffer);
       }
+      if (!hasOutput && GroupByOperator.shouldEmitSummaryRow(conf)) {
+        VectorHashKeyWrapper kw = 
keyWrappersBatch.getVectorHashKeyWrappers()[0];
+        int pos = conf.getGroupingSetPosition();
+        if (pos >= 0) {
+          long val = (1 << pos) - 1;
+          keyWrappersBatch.setLongValue(kw, pos, val);
+        }
+        writeSingleRow(kw , groupAggregators);
+      }
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/8123c71f/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
index 82e8748..f00ad96 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
@@ -18,12 +18,10 @@
 
 package org.apache.hadoop.hive.ql.exec.vector;
 
-import 
org.apache.hadoop.hive.ql.exec.vector.VectorHashKeyWrapper.EmptyVectorHashKeyWrapper;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.util.JavaDataModel;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type;
@@ -942,7 +940,7 @@ public class VectorHashKeyWrapperBatch extends 
VectorColumnSetInfo {
     compiledKeyWrapperBatch.vectorHashKeyWrappers =
         new VectorHashKeyWrapper[VectorizedRowBatch.DEFAULT_SIZE];
     for(int i=0;i<VectorizedRowBatch.DEFAULT_SIZE; ++i) {
-      compiledKeyWrapperBatch.vectorHashKeyWrappers[i] = 
+      compiledKeyWrapperBatch.vectorHashKeyWrappers[i] =
           compiledKeyWrapperBatch.allocateKeyWrapper();
     }
 
@@ -1022,6 +1020,21 @@ public class VectorHashKeyWrapperBatch extends 
VectorColumnSetInfo {
     }
   }
 
+  public void setLongValue(VectorHashKeyWrapper kw, int keyIndex, Long value)
+    throws HiveException {
+
+    if (columnVectorTypes[keyIndex] != Type.LONG) {
+      throw new HiveException("Consistency error: expected LONG type; found: " 
+ columnVectorTypes[keyIndex]);
+    }
+    int columnTypeSpecificIndex = columnTypeSpecificIndices[keyIndex];
+
+    if (value == null) {
+      kw.assignNullLong(keyIndex, columnTypeSpecificIndex);
+      return;
+    }
+    kw.assignLong(columnTypeSpecificIndex, value);
+  }
+
   public int getVariableSize(int batchSize) {
     int variableSize = 0;
     if ( 0 < stringIndices.length) {

http://git-wip-us.apache.org/repos/asf/hive/blob/8123c71f/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
index 489a3b6..a44b780 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
@@ -70,7 +70,7 @@ public class GroupByDesc extends AbstractOperatorDesc {
   private ArrayList<ExprNodeDesc> keys;
   private List<Integer> listGroupingSets;
   private boolean groupingSetsPresent;
-  private int groupingSetPosition = -1;
+  private int groupingSetPosition = -1; //  /* in case of grouping sets; 
groupby1 will output values for every setgroup; this is the index of the column 
that information will be sent */
   private ArrayList<org.apache.hadoop.hive.ql.plan.AggregationDesc> 
aggregators;
   private ArrayList<java.lang.String> outputColumnNames;
   private float groupByMemoryUsage;
@@ -233,7 +233,7 @@ public class GroupByDesc extends AbstractOperatorDesc {
     }
     return false;
   }
-  
+
   @Explain(displayName = "bucketGroup", displayOnlyOnTrue = true)
   public boolean getBucketGroup() {
     return bucketGroup;
@@ -438,4 +438,5 @@ public class GroupByDesc extends AbstractOperatorDesc {
     }
     return false;
   }
+
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/8123c71f/ql/src/test/queries/clientpositive/groupby_rollup_empty.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/groupby_rollup_empty.q 
b/ql/src/test/queries/clientpositive/groupby_rollup_empty.q
new file mode 100644
index 0000000..0bd5179
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/groupby_rollup_empty.q
@@ -0,0 +1,66 @@
+set hive.vectorized.execution.enabled=false;
+
+drop table if exists tx1;
+drop table if exists tx2;
+create table tx1 (a integer,b integer,c integer);
+
+select sum(c)
+from tx1
+;
+
+select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by a,b grouping sets ((), b, a);
+
+select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by rollup (b);
+
+-- non-empty table
+
+insert into tx1 values (1,1,1);
+
+select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by rollup (b);
+
+select  sum(c),
+        grouping(b),
+       '1,1 and 1,0' as expected
+from    tx1
+group by rollup (b);
+
+
+set hive.vectorized.execution.enabled=true;
+create table tx2 (a integer,b integer,c integer,d double,u string) stored as 
orc;
+
+explain
+select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx2
+where  a<0
+group by a,b grouping sets ((), b, a);
+
+
+select sum(c),'NULL' as expected
+from tx2;
+
+select  sum(c),
+       max(u),
+       'asd',
+        grouping(b),
+       'NULL,1' as expected
+from    tx2
+where  a<0
+group by a,b,d grouping sets ((), b, a, d);
+

http://git-wip-us.apache.org/repos/asf/hive/blob/8123c71f/ql/src/test/results/clientpositive/groupby_rollup_empty.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/groupby_rollup_empty.q.out 
b/ql/src/test/results/clientpositive/groupby_rollup_empty.q.out
new file mode 100644
index 0000000..5db3184
--- /dev/null
+++ b/ql/src/test/results/clientpositive/groupby_rollup_empty.q.out
@@ -0,0 +1,225 @@
+PREHOOK: query: drop table if exists tx1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table if exists tx1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table if exists tx2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table if exists tx2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tx1 (a integer,b integer,c integer)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tx1
+POSTHOOK: query: create table tx1 (a integer,b integer,c integer)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tx1
+PREHOOK: query: select sum(c)
+from tx1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select sum(c)
+from tx1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+NULL
+PREHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by a,b grouping sets ((), b, a)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by a,b grouping sets ((), b, a)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+NULL   1       NULL,1
+PREHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by rollup (b)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by rollup (b)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+NULL   1       NULL,1
+PREHOOK: query: insert into tx1 values (1,1,1)
+PREHOOK: type: QUERY
+PREHOOK: Output: default@tx1
+POSTHOOK: query: insert into tx1 values (1,1,1)
+POSTHOOK: type: QUERY
+POSTHOOK: Output: default@tx1
+POSTHOOK: Lineage: tx1.a EXPRESSION 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: tx1.b EXPRESSION 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: tx1.c EXPRESSION 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+PREHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by rollup (b)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by rollup (b)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+NULL   1       NULL,1
+PREHOOK: query: select  sum(c),
+        grouping(b),
+       '1,1 and 1,0' as expected
+from    tx1
+group by rollup (b)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select  sum(c),
+        grouping(b),
+       '1,1 and 1,0' as expected
+from    tx1
+group by rollup (b)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+1      1       1,1 and 1,0
+1      0       1,1 and 1,0
+PREHOOK: query: create table tx2 (a integer,b integer,c integer,d double,u 
string) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tx2
+POSTHOOK: query: create table tx2 (a integer,b integer,c integer,d double,u 
string) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tx2
+PREHOOK: query: explain
+select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx2
+where  a<0
+group by a,b grouping sets ((), b, a)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx2
+where  a<0
+group by a,b grouping sets ((), b, a)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Map Reduce
+      Map Operator Tree:
+          TableScan
+            alias: tx2
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+            Filter Operator
+              predicate: (a < 0) (type: boolean)
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+              Select Operator
+                expressions: a (type: int), b (type: int), c (type: int)
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
+                Group By Operator
+                  aggregations: sum(_col2)
+                  keys: _col0 (type: int), _col1 (type: int), 0 (type: int)
+                  mode: hash
+                  outputColumnNames: _col0, _col1, _col2, _col3
+                  Statistics: Num rows: 3 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
+                  Reduce Output Operator
+                    key expressions: _col0 (type: int), _col1 (type: int), 
_col2 (type: int)
+                    sort order: +++
+                    Map-reduce partition columns: _col0 (type: int), _col1 
(type: int), _col2 (type: int)
+                    Statistics: Num rows: 3 Data size: 0 Basic stats: PARTIAL 
Column stats: NONE
+                    value expressions: _col3 (type: bigint)
+      Execution mode: vectorized
+      Reduce Operator Tree:
+        Group By Operator
+          aggregations: sum(VALUE._col0)
+          keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 (type: 
int)
+          mode: mergepartial
+          outputColumnNames: _col0, _col1, _col2, _col3
+          Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+          Select Operator
+            expressions: _col3 (type: bigint), grouping(_col2, 0) (type: int), 
'NULL,1' (type: string)
+            outputColumnNames: _col0, _col1, _col2
+            Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+            File Output Operator
+              compressed: false
+              Statistics: Num rows: 1 Data size: 0 Basic stats: PARTIAL Column 
stats: NONE
+              table:
+                  input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                  output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select sum(c),'NULL' as expected
+from tx2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx2
+#### A masked pattern was here ####
+POSTHOOK: query: select sum(c),'NULL' as expected
+from tx2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx2
+#### A masked pattern was here ####
+NULL   NULL
+PREHOOK: query: select  sum(c),
+       max(u),
+       'asd',
+        grouping(b),
+       'NULL,1' as expected
+from    tx2
+where  a<0
+group by a,b,d grouping sets ((), b, a, d)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx2
+#### A masked pattern was here ####
+POSTHOOK: query: select  sum(c),
+       max(u),
+       'asd',
+        grouping(b),
+       'NULL,1' as expected
+from    tx2
+where  a<0
+group by a,b,d grouping sets ((), b, a, d)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx2
+#### A masked pattern was here ####
+NULL   NULL    asd     1       NULL,1

http://git-wip-us.apache.org/repos/asf/hive/blob/8123c71f/ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out 
b/ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out
new file mode 100644
index 0000000..061b0d7
--- /dev/null
+++ b/ql/src/test/results/clientpositive/llap/groupby_rollup_empty.q.out
@@ -0,0 +1,234 @@
+PREHOOK: query: drop table if exists tx1
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table if exists tx1
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: drop table if exists tx2
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: drop table if exists tx2
+POSTHOOK: type: DROPTABLE
+PREHOOK: query: create table tx1 (a integer,b integer,c integer)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tx1
+POSTHOOK: query: create table tx1 (a integer,b integer,c integer)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tx1
+PREHOOK: query: select sum(c)
+from tx1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select sum(c)
+from tx1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+NULL
+PREHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by a,b grouping sets ((), b, a)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by a,b grouping sets ((), b, a)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+NULL   1       NULL,1
+PREHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by rollup (b)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by rollup (b)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+NULL   1       NULL,1
+PREHOOK: query: insert into tx1 values (1,1,1)
+PREHOOK: type: QUERY
+PREHOOK: Output: default@tx1
+POSTHOOK: query: insert into tx1 values (1,1,1)
+POSTHOOK: type: QUERY
+POSTHOOK: Output: default@tx1
+POSTHOOK: Lineage: tx1.a EXPRESSION 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col1, 
type:string, comment:), ]
+POSTHOOK: Lineage: tx1.b EXPRESSION 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col2, 
type:string, comment:), ]
+POSTHOOK: Lineage: tx1.c EXPRESSION 
[(values__tmp__table__1)values__tmp__table__1.FieldSchema(name:tmp_values_col3, 
type:string, comment:), ]
+PREHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by rollup (b)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx1
+where  a<0
+group by rollup (b)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+NULL   1       NULL,1
+PREHOOK: query: select  sum(c),
+        grouping(b),
+       '1,1 and 1,0' as expected
+from    tx1
+group by rollup (b)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx1
+#### A masked pattern was here ####
+POSTHOOK: query: select  sum(c),
+        grouping(b),
+       '1,1 and 1,0' as expected
+from    tx1
+group by rollup (b)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx1
+#### A masked pattern was here ####
+1      1       1,1 and 1,0
+1      0       1,1 and 1,0
+PREHOOK: query: create table tx2 (a integer,b integer,c integer,d double,u 
string) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tx2
+POSTHOOK: query: create table tx2 (a integer,b integer,c integer,d double,u 
string) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tx2
+PREHOOK: query: explain
+select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx2
+where  a<0
+group by a,b grouping sets ((), b, a)
+PREHOOK: type: QUERY
+POSTHOOK: query: explain
+select  sum(c),
+        grouping(b),
+       'NULL,1' as expected
+from    tx2
+where  a<0
+group by a,b grouping sets ((), b, a)
+POSTHOOK: type: QUERY
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tx2
+                  Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE 
Column stats: NONE
+                  Filter Operator
+                    predicate: (a < 0) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 12 Basic stats: 
COMPLETE Column stats: NONE
+                    Select Operator
+                      expressions: a (type: int), b (type: int), c (type: int)
+                      outputColumnNames: _col0, _col1, _col2
+                      Statistics: Num rows: 1 Data size: 12 Basic stats: 
COMPLETE Column stats: NONE
+                      Group By Operator
+                        aggregations: sum(_col2)
+                        keys: _col0 (type: int), _col1 (type: int), 0 (type: 
int)
+                        mode: hash
+                        outputColumnNames: _col0, _col1, _col2, _col3
+                        Statistics: Num rows: 3 Data size: 36 Basic stats: 
COMPLETE Column stats: NONE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int), _col1 (type: 
int), _col2 (type: int)
+                          sort order: +++
+                          Map-reduce partition columns: _col0 (type: int), 
_col1 (type: int), _col2 (type: int)
+                          Statistics: Num rows: 3 Data size: 36 Basic stats: 
COMPLETE Column stats: NONE
+                          value expressions: _col3 (type: bigint)
+            Execution mode: vectorized, llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: vectorized, llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: sum(VALUE._col0)
+                keys: KEY._col0 (type: int), KEY._col1 (type: int), KEY._col2 
(type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2, _col3
+                Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE 
Column stats: NONE
+                Select Operator
+                  expressions: _col3 (type: bigint), grouping(_col2, 0) (type: 
int), 'NULL,1' (type: string)
+                  outputColumnNames: _col0, _col1, _col2
+                  Statistics: Num rows: 1 Data size: 12 Basic stats: COMPLETE 
Column stats: NONE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 1 Data size: 12 Basic stats: 
COMPLETE Column stats: NONE
+                    table:
+                        input format: 
org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: 
org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: 
org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: select sum(c),'NULL' as expected
+from tx2
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx2
+#### A masked pattern was here ####
+POSTHOOK: query: select sum(c),'NULL' as expected
+from tx2
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx2
+#### A masked pattern was here ####
+NULL   NULL
+PREHOOK: query: select  sum(c),
+       max(u),
+       'asd',
+        grouping(b),
+       'NULL,1' as expected
+from    tx2
+where  a<0
+group by a,b,d grouping sets ((), b, a, d)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tx2
+#### A masked pattern was here ####
+POSTHOOK: query: select  sum(c),
+       max(u),
+       'asd',
+        grouping(b),
+       'NULL,1' as expected
+from    tx2
+where  a<0
+group by a,b,d grouping sets ((), b, a, d)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tx2
+#### A masked pattern was here ####
+NULL   NULL    asd     1       NULL,1

Reply via email to