Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java 
(original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/QBJoinTree.java 
Fri Nov  7 20:41:34 2014
@@ -32,7 +32,7 @@ import org.apache.hadoop.hive.ql.plan.Op
  * Internal representation of the join tree.
  *
  */
-public class QBJoinTree implements Serializable{
+public class QBJoinTree implements Serializable, Cloneable {
   private static final long serialVersionUID = 1L;
   private String leftAlias;
   private String[] rightAliases;
@@ -363,4 +363,70 @@ public class QBJoinTree implements Seria
   public List<ASTNode> getPostJoinFilters() {
     return postJoinFilters;
   }
+
+  @Override
+  public QBJoinTree clone() throws CloneNotSupportedException {
+    QBJoinTree cloned = new QBJoinTree();
+
+    // shallow copy aliasToOpInfo, we won't want to clone the operator tree 
here
+    cloned.setAliasToOpInfo(aliasToOpInfo == null ? null :
+        new HashMap<String, Operator<? extends OperatorDesc>>(aliasToOpInfo));
+
+    cloned.setBaseSrc(baseSrc == null ? null : baseSrc.clone());
+
+    // shallow copy ASTNode
+    cloned.setExpressions(expressions);
+    cloned.setFilters(filters);
+    cloned.setFiltersForPushing(filtersForPushing);
+
+    // clone filterMap
+    int[][] clonedFilterMap = filterMap == null ? null : new 
int[filterMap.length][];
+    if (filterMap != null) {
+      for (int i = 0; i < filterMap.length; i++) {
+        clonedFilterMap[i] = filterMap[i] == null ? null : 
filterMap[i].clone();
+      }
+    }
+    cloned.setFilterMap(clonedFilterMap);
+
+    cloned.setId(id);
+
+    // clone joinCond
+    JoinCond[] clonedJoinCond = joinCond == null ? null : new 
JoinCond[joinCond.length];
+    if (joinCond != null) {
+      for (int i = 0; i < joinCond.length; i++) {
+        if(joinCond[i] == null) {
+          continue;
+        }
+        JoinCond clonedCond = new JoinCond();
+        clonedCond.setJoinType(joinCond[i].getJoinType());
+        clonedCond.setLeft(joinCond[i].getLeft());
+        clonedCond.setPreserved(joinCond[i].getPreserved());
+        clonedCond.setRight(joinCond[i].getRight());
+        clonedJoinCond[i] = clonedCond;
+      }
+    }
+    cloned.setJoinCond(clonedJoinCond);
+
+    cloned.setJoinSrc(joinSrc == null ? null : joinSrc.clone());
+    cloned.setLeftAlias(leftAlias);
+    cloned.setLeftAliases(leftAliases == null ? null : leftAliases.clone());
+    cloned.setMapAliases(mapAliases == null ? null : new 
ArrayList<String>(mapAliases));
+    cloned.setMapSideJoin(mapSideJoin);
+    cloned.setNoOuterJoin(noOuterJoin);
+    cloned.setNoSemiJoin(noSemiJoin);
+    cloned.setNullSafes(nullsafes == null ? null : new 
ArrayList<Boolean>(nullsafes));
+    cloned.setRightAliases(rightAliases == null ? null : rightAliases.clone());
+    cloned.setStreamAliases(streamAliases == null ? null : new 
ArrayList<String>(streamAliases));
+
+    // clone postJoinFilters
+    for (ASTNode filter : postJoinFilters) {
+      cloned.getPostJoinFilters().add(filter);
+    }
+    // clone rhsSemijoin
+    for (Entry<String, ArrayList<ASTNode>> entry : rhsSemijoin.entrySet()) {
+      cloned.addRHSSemijoinColumns(entry.getKey(), entry.getValue());
+    }
+
+    return cloned;
+  }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
 Fri Nov  7 20:41:34 2014
@@ -20,34 +20,12 @@ package org.apache.hadoop.hive.ql.parse;
 
 import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.HIVESTATSDBCLASS;
 
-import java.io.IOException;
-import java.io.Serializable;
-import java.lang.reflect.Field;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.UndeclaredThrowableException;
-import java.math.BigDecimal;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.UUID;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.regex.Pattern;
-import java.util.regex.PatternSyntaxException;
-
 import com.google.common.annotations.VisibleForTesting;
-
-import net.hydromatic.optiq.SchemaPlus;
-import net.hydromatic.optiq.tools.Frameworks;
+import com.google.common.base.Function;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableList.Builder;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
 
 import org.antlr.runtime.ClassicToken;
 import org.antlr.runtime.Token;
@@ -122,7 +100,6 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
 import org.apache.hadoop.hive.ql.optimizer.Optimizer;
-import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
 import 
org.apache.hadoop.hive.ql.optimizer.optiq.HiveDefaultRelMetadataProvider;
 import org.apache.hadoop.hive.ql.optimizer.optiq.HiveOptiqUtil;
 import org.apache.hadoop.hive.ql.optimizer.optiq.HiveTypeSystemImpl;
@@ -146,6 +123,7 @@ import org.apache.hadoop.hive.ql.optimiz
 import org.apache.hadoop.hive.ql.optimizer.optiq.translator.RexNodeConverter;
 import 
org.apache.hadoop.hive.ql.optimizer.optiq.translator.SqlFunctionConverter;
 import org.apache.hadoop.hive.ql.optimizer.optiq.translator.TypeConverter;
+import org.apache.hadoop.hive.ql.optimizer.unionproc.UnionProcContext;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.tableSpec.SpecType;
 import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderExpression;
 import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderSpec;
@@ -259,6 +237,9 @@ import org.eigenbase.rel.rules.LoptOptim
 import org.eigenbase.rel.rules.MergeFilterRule;
 import org.eigenbase.rel.rules.PushFilterPastProjectRule;
 import org.eigenbase.rel.rules.PushFilterPastSetOpRule;
+import org.eigenbase.rel.rules.PushSemiJoinPastFilterRule;
+import org.eigenbase.rel.rules.PushSemiJoinPastJoinRule;
+import org.eigenbase.rel.rules.PushSemiJoinPastProjectRule;
 import org.eigenbase.rel.rules.SemiJoinRel;
 import org.eigenbase.rel.rules.TransitivePredicatesOnJoinRule;
 import org.eigenbase.relopt.RelOptCluster;
@@ -276,30 +257,51 @@ import org.eigenbase.reltype.RelDataType
 import org.eigenbase.reltype.RelDataTypeFactory;
 import org.eigenbase.reltype.RelDataTypeField;
 import org.eigenbase.rex.RexBuilder;
+import org.eigenbase.rex.RexFieldCollation;
 import org.eigenbase.rex.RexInputRef;
 import org.eigenbase.rex.RexNode;
 import org.eigenbase.rex.RexUtil;
 import org.eigenbase.rex.RexWindowBound;
-import org.eigenbase.rex.RexFieldCollation;
 import org.eigenbase.sql.SqlAggFunction;
-import org.eigenbase.sql.SqlWindow;
-import org.eigenbase.sql.parser.SqlParserPos;
-import org.eigenbase.sql.type.SqlTypeName;
-import org.eigenbase.sql2rel.RelFieldTrimmer;
 import org.eigenbase.sql.SqlCall;
 import org.eigenbase.sql.SqlExplainLevel;
 import org.eigenbase.sql.SqlKind;
-import org.eigenbase.sql.SqlNode;
 import org.eigenbase.sql.SqlLiteral;
+import org.eigenbase.sql.SqlNode;
+import org.eigenbase.sql.SqlWindow;
+import org.eigenbase.sql.parser.SqlParserPos;
+import org.eigenbase.sql.type.SqlTypeName;
+import org.eigenbase.sql2rel.RelFieldTrimmer;
 import org.eigenbase.util.CompositeList;
 import org.eigenbase.util.ImmutableIntList;
 import org.eigenbase.util.Pair;
 
-import com.google.common.base.Function;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableList.Builder;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
+import java.io.IOException;
+import java.io.Serializable;
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.UUID;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.regex.Pattern;
+import java.util.regex.PatternSyntaxException;
+
+import net.hydromatic.optiq.SchemaPlus;
+import net.hydromatic.optiq.tools.Frameworks;
 
 /**
  * Implementation of the semantic analyzer. It generates the query plan.
@@ -4130,9 +4132,7 @@ public class SemanticAnalyzer extends Ba
         throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(grpbyExpr));
       }
 
-      groupByKeys.add(new ExprNodeColumnDesc(exprInfo.getType(), exprInfo
-          .getInternalName(), exprInfo.getTabAlias(), exprInfo
-          .getIsVirtualCol()));
+      groupByKeys.add(new ExprNodeColumnDesc(exprInfo));
       String field = getColumnInternalName(i);
       outputColumnNames.add(field);
       ColumnInfo oColInfo = new ColumnInfo(field, exprInfo.getType(), "", 
false);
@@ -6260,8 +6260,10 @@ public class SemanticAnalyzer extends Ba
           if (!("".equals(nm[0])) && nm[1] != null) {
             colName = unescapeIdentifier(colInfo.getAlias()).toLowerCase(); // 
remove ``
           }
-          String ctasColName = fixCtasColumnName(colName, colInfo, inputRR);
-          col.setName(ctasColName);
+          if (runCBO) {
+            colName = fixCtasColumnName(colName);
+          }
+          col.setName(colName);
           col.setType(colInfo.getType().getTypeName());
           field_schemas.add(col);
         }
@@ -6439,7 +6441,7 @@ public class SemanticAnalyzer extends Ba
     return output;
   }
 
-  private static String fixCtasColumnName(String colName, ColumnInfo colInfo, 
RowResolver rr) {
+  private static String fixCtasColumnName(String colName) {
     int lastDot = colName.lastIndexOf('.');
     if (lastDot < 0) return colName; // alias is not fully qualified
     String nqColumnName = colName.substring(lastDot + 1);
@@ -6926,9 +6928,7 @@ public class SemanticAnalyzer extends Ba
     for (ColumnInfo colInfo : inputRR.getColumnInfos()) {
       String internalName = getColumnInternalName(i++);
       outputColumns.add(internalName);
-      valueCols.add(new ExprNodeColumnDesc(colInfo.getType(), colInfo
-          .getInternalName(), colInfo.getTabAlias(), colInfo
-          .getIsVirtualCol()));
+      valueCols.add(new ExprNodeColumnDesc(colInfo));
       colExprMap.put(internalName, valueCols
           .get(valueCols.size() - 1));
     }
@@ -7057,8 +7057,7 @@ public class SemanticAnalyzer extends Ba
       ColumnInfo colInfo = columnInfos.get(i);
       String[] nm = inputRR.reverseLookup(colInfo.getInternalName());
       String[] nm2 = inputRR.getAlternateMappings(colInfo.getInternalName());
-      ExprNodeColumnDesc value = new ExprNodeColumnDesc(colInfo.getType(),
-          colInfo.getInternalName(), colInfo.getTabAlias(), 
colInfo.getIsVirtualCol());
+      ExprNodeColumnDesc value = new ExprNodeColumnDesc(colInfo);
 
       // backtrack can be null when input is script operator
       ExprNodeDesc valueBack = ExprNodeDescUtils.backtrack(value, dummy, 
input);
@@ -7310,8 +7309,7 @@ public class SemanticAnalyzer extends Ba
       ColumnInfo colInfo = columns.get(i);
       String[] nm = inputRR.reverseLookup(colInfo.getInternalName());
       String[] nm2 = inputRR.getAlternateMappings(colInfo.getInternalName());
-      ExprNodeDesc expr = new ExprNodeColumnDesc(colInfo.getType(),
-          colInfo.getInternalName(), colInfo.getTabAlias(), 
colInfo.getIsVirtualCol());
+      ExprNodeDesc expr = new ExprNodeColumnDesc(colInfo);
 
       // backtrack can be null when input is script operator
       ExprNodeDesc exprBack = ExprNodeDescUtils.backtrack(expr, dummy, child);
@@ -8399,12 +8397,9 @@ public class SemanticAnalyzer extends Ba
         new HashMap<String, ExprNodeDesc>();
     for (int i = 0; i < columns.size(); i++) {
       ColumnInfo col = columns.get(i);
-      colList.add(new ExprNodeColumnDesc(col.getType(), col.getInternalName(),
-          col.getTabAlias(), col.getIsVirtualCol()));
+      colList.add(new ExprNodeColumnDesc(col));
       columnNames.add(col.getInternalName());
-      columnExprMap.put(col.getInternalName(),
-          new ExprNodeColumnDesc(col.getType(), col.getInternalName(),
-              col.getTabAlias(), col.getIsVirtualCol()));
+      columnExprMap.put(col.getInternalName(), new ExprNodeColumnDesc(col));
     }
     Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
         new SelectDesc(colList, columnNames, true), new RowSchema(inputRR
@@ -9261,8 +9256,7 @@ public class SemanticAnalyzer extends Ba
       for (String col : bucketCols) {
         ColumnInfo ci = rwsch.get(alias, col);
         // TODO: change type to the one in the table schema
-        args.add(new ExprNodeColumnDesc(ci.getType(), ci.getInternalName(), ci
-            .getTabAlias(), ci.getIsVirtualCol()));
+        args.add(new ExprNodeColumnDesc(ci));
       }
     } else {
       for (ASTNode expr : ts.getExprs()) {
@@ -9843,8 +9837,7 @@ public class SemanticAnalyzer extends Ba
     for (ColumnInfo col : source.getColumnInfos()) {
       String[] tabCol = source.reverseLookup(col.getInternalName());
       lvForwardRR.put(tabCol[0], tabCol[1], col);
-      ExprNodeDesc colExpr = new ExprNodeColumnDesc(col.getType(), 
col.getInternalName(),
-          col.getTabAlias(), false);
+      ExprNodeDesc colExpr = new ExprNodeColumnDesc(col);
       colList.add(colExpr);
       colNames.add(colExpr.getName());
       lvfColExprMap.put(col.getInternalName(), colExpr);
@@ -9933,8 +9926,7 @@ public class SemanticAnalyzer extends Ba
       String tableAlias = tableCol[0];
       String colAlias = tableCol[1];
       dest.put(tableAlias, colAlias, newCol);
-      colExprMap.put(internalName, new ExprNodeColumnDesc(c.getType(), 
c.getInternalName(),
-          c.getTabAlias(), c.getIsVirtualCol()));
+      colExprMap.put(internalName, new ExprNodeColumnDesc(c));
     }
   }
 
@@ -10241,13 +10233,15 @@ public class SemanticAnalyzer extends Ba
      //        be supported and would require additional checks similar to 
IsQuery?
      boolean isSupportedType =
          qb.getIsQuery() || qb.isCTAS() || cboCtx.type == 
PreCboCtx.Type.INSERT;
-     boolean result = isSupportedRoot && isSupportedType && createVwDesc == 
null;
+     boolean noBadTokens = HiveOptiqUtil.validateASTForUnsupportedTokens(ast);
+     boolean result = isSupportedRoot && isSupportedType && createVwDesc == 
null && noBadTokens;
      if (!result) {
        if (needToLogMessage) {
          String msg = "";
          if (!isSupportedRoot) msg += "doesn't have QUERY or EXPLAIN as root 
and not a CTAS; ";
          if (!isSupportedType) msg += "is not a query, CTAS, or insert; ";
          if (createVwDesc != null) msg += "has create view; ";
+         if (!noBadTokens) msg += "has unsupported tokens; ";
 
          if (msg.isEmpty()) msg += "has some unspecified limitations; ";
          LOG.info("Not invoking CBO because the statement " + msg.substring(0, 
msg.length() - 2));
@@ -11993,9 +11987,7 @@ public class SemanticAnalyzer extends Ba
      */
     int pos = 0;
     for (ColumnInfo colInfo : colInfoList) {
-        ExprNodeDesc valueColExpr = new ExprNodeColumnDesc(colInfo.getType(), 
colInfo
-            .getInternalName(), colInfo.getTabAlias(), colInfo
-            .getIsVirtualCol());
+        ExprNodeDesc valueColExpr = new ExprNodeColumnDesc(colInfo);
         valueCols.add(valueColExpr);
         String internalName = SemanticAnalyzer.getColumnInternalName(pos++);
         outputColumnNames.add(internalName);
@@ -12240,9 +12232,7 @@ public class SemanticAnalyzer extends Ba
     RowResolver rsNewRR = new RowResolver();
     int pos = 0;
     for (ColumnInfo colInfo : colInfoList) {
-        ExprNodeDesc valueColExpr = new ExprNodeColumnDesc(colInfo.getType(), 
colInfo
-            .getInternalName(), colInfo.getTabAlias(), colInfo
-            .getIsVirtualCol());
+        ExprNodeDesc valueColExpr = new ExprNodeColumnDesc(colInfo);
         valueCols.add(valueColExpr);
         String internalName = SemanticAnalyzer.getColumnInternalName(pos++);
         outputColumnNames.add(internalName);
@@ -12642,7 +12632,15 @@ public class SemanticAnalyzer extends Ba
       // TODO: Decorelation of subquery should be done before attempting
       // Partition Pruning; otherwise Expression evaluation may try to execute
       // corelated sub query.
-      basePlan = hepPlan(basePlan, true, mdProvider, new 
PushFilterPastProjectRule(
+
+      // Push Down Semi Joins
+      basePlan = hepPlan(basePlan, true, mdProvider,
+          PushSemiJoinPastJoinRule.INSTANCE,
+          new PushSemiJoinPastFilterRule(HiveFilterRel.DEFAULT_FILTER_FACTORY),
+          new 
PushSemiJoinPastProjectRule(HiveProjectRel.DEFAULT_PROJECT_FACTORY));
+
+      basePlan = hepPlan(basePlan, true, mdProvider,
+          new PushFilterPastProjectRule(
           FilterRelBase.class, HiveFilterRel.DEFAULT_FILTER_FACTORY, 
HiveProjectRel.class,
           HiveProjectRel.DEFAULT_PROJECT_FACTORY), new PushFilterPastSetOpRule(
           HiveFilterRel.DEFAULT_FILTER_FACTORY), new MergeFilterRule(

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java
 Fri Nov  7 20:41:34 2014
@@ -18,14 +18,15 @@
 
 package org.apache.hadoop.hive.ql.plan;
 
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
 import org.apache.commons.lang.builder.HashCodeBuilder;
+import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
 /**
  * ExprNodeColumnDesc.
  *
@@ -56,6 +57,10 @@ public class ExprNodeColumnDesc extends 
   public ExprNodeColumnDesc() {
   }
 
+  public ExprNodeColumnDesc(ColumnInfo ci) {
+    this(ci.getType(), ci.getInternalName(), ci.getTabAlias(), 
ci.getIsVirtualCol());
+  }
+
   public ExprNodeColumnDesc(TypeInfo typeInfo, String column, String tabAlias,
       boolean isPartitionColOrVirtualCol) {
     super(typeInfo);

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDescUtils.java
 Fri Nov  7 20:41:34 2014
@@ -193,6 +193,7 @@ public class ExprNodeDescUtils {
   /**
    * Convert expressions in current operator to those in terminal operator, 
which
    * is an ancestor of current or null (back to top operator).
+   * Possibly contain null values for non-traceable exprs
    */
   public static ArrayList<ExprNodeDesc> backtrack(List<ExprNodeDesc> sources,
       Operator<?> current, Operator<?> terminal) throws SemanticException {
@@ -396,29 +397,34 @@ public class ExprNodeDescUtils {
         * Get Map of ExprNodeColumnDesc HashCode to ExprNodeColumnDesc.
         * 
         * @param exprDesc
-        * @param hashCodeTocolumnDescMap
+        * @param hashCodeToColumnDescMap
         *            Assumption: If two ExprNodeColumnDesc have same hash code 
then
         *            they are logically referring to same projection
         */
        public static void getExprNodeColumnDesc(ExprNodeDesc exprDesc,
-                       Map<Integer, ExprNodeDesc> hashCodeTocolumnDescMap) {
+                       Map<Integer, ExprNodeDesc> hashCodeToColumnDescMap) {
                if (exprDesc instanceof ExprNodeColumnDesc) {
-                       hashCodeTocolumnDescMap.put(
-                                       ((ExprNodeColumnDesc) 
exprDesc).hashCode(),
-                                       ((ExprNodeColumnDesc) exprDesc));
+                       hashCodeToColumnDescMap.put(exprDesc.hashCode(), 
exprDesc);
                } else if (exprDesc instanceof ExprNodeColumnListDesc) {
-                       for (ExprNodeDesc child : ((ExprNodeColumnListDesc) 
exprDesc)
-                                       .getChildren()) {
-                               getExprNodeColumnDesc(child, 
hashCodeTocolumnDescMap);
+                       for (ExprNodeDesc child : exprDesc.getChildren()) {
+                               getExprNodeColumnDesc(child, 
hashCodeToColumnDescMap);
                        }
                } else if (exprDesc instanceof ExprNodeGenericFuncDesc) {
-                       for (ExprNodeDesc child : ((ExprNodeGenericFuncDesc) 
exprDesc)
-                                       .getChildren()) {
-                               getExprNodeColumnDesc(child, 
hashCodeTocolumnDescMap);
+                       for (ExprNodeDesc child : exprDesc.getChildren()) {
+                               getExprNodeColumnDesc(child, 
hashCodeToColumnDescMap);
                        }
                } else if (exprDesc instanceof ExprNodeFieldDesc) {
                        getExprNodeColumnDesc(((ExprNodeFieldDesc) 
exprDesc).getDesc(),
-                                       hashCodeTocolumnDescMap);
+                                       hashCodeToColumnDescMap);
                }
        }
+
+  public static boolean isAllConstants(List<ExprNodeDesc> value) {
+    for (ExprNodeDesc expr : value) {
+      if (!(expr instanceof ExprNodeConstantDesc)) {
+        return false;
+      }
+    }
+    return true;
+  }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/ExprWalkerInfo.java
 Fri Nov  7 20:41:34 2014
@@ -19,16 +19,13 @@ package org.apache.hadoop.hive.ql.ppd;
 
 import java.util.ArrayList;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
-import java.util.Set;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.ql.exec.Operator;
-import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
@@ -143,7 +140,7 @@ public class ExprWalkerInfo implements N
    * @return converted expression for give node. If there is none then returns
    *         null.
    */
-  public ExprNodeDesc getConvertedNode(Node nd) {
+  public ExprNodeDesc getConvertedNode(ExprNodeDesc nd) {
     ExprInfo ei = exprInfoMap.get(nd);
     if (ei == null) {
       return null;
@@ -238,11 +235,11 @@ public class ExprWalkerInfo implements N
    * @param expr
    */
   public void addFinalCandidate(ExprNodeDesc expr) {
-    String alias = getAlias(expr);
-    if (pushdownPreds.get(alias) == null) {
-      pushdownPreds.put(alias, new ArrayList<ExprNodeDesc>());
-    }
-    pushdownPreds.get(alias).add(expr);
+    addFinalCandidate(getAlias(expr), expr);
+  }
+
+  public void addFinalCandidate(String alias, ExprNodeDesc expr) {
+    getPushdownPreds(alias).add(expr);
   }
 
   /**
@@ -252,10 +249,7 @@ public class ExprWalkerInfo implements N
    * @param pushDowns
    */
   public void addPushDowns(String alias, List<ExprNodeDesc> pushDowns) {
-    if (pushdownPreds.get(alias) == null) {
-      pushdownPreds.put(alias, new ArrayList<ExprNodeDesc>());
-    }
-    pushdownPreds.get(alias).addAll(pushDowns);
+    getPushdownPreds(alias).addAll(pushDowns);
   }
 
   /**
@@ -269,6 +263,26 @@ public class ExprWalkerInfo implements N
     return pushdownPreds;
   }
 
+  private List<ExprNodeDesc> getPushdownPreds(String alias) {
+    List<ExprNodeDesc> predicates = pushdownPreds.get(alias);
+    if (predicates == null) {
+      pushdownPreds.put(alias, predicates = new ArrayList<ExprNodeDesc>());
+    }
+    return predicates;
+  }
+
+  public boolean hasAnyCandidates() {
+    if (pushdownPreds == null || pushdownPreds.isEmpty()) {
+      return false;
+    }
+    for (List<ExprNodeDesc> exprs : pushdownPreds.values()) {
+      if (!exprs.isEmpty()) {
+        return true;
+      }
+    }
+    return false;
+  }
+
   /**
    * Adds the specified expr as a non-final candidate
    *

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
 Fri Nov  7 20:41:34 2014
@@ -31,7 +31,6 @@ import java.util.Stack;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
 import org.apache.hadoop.hive.ql.exec.LateralViewJoinOperator;
@@ -48,9 +47,6 @@ import org.apache.hadoop.hive.ql.lib.Nod
 import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
 import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
 import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.OpParseContext;
 import org.apache.hadoop.hive.ql.parse.RowResolver;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
@@ -342,14 +338,12 @@ public final class OpProcFactory {
       super.process(nd, stack, procCtx, nodeOutputs);
       OpWalkerInfo owi = (OpWalkerInfo) procCtx;
       ExprWalkerInfo prunedPred = owi.getPrunedPreds((Operator<? extends 
OperatorDesc>) nd);
-      if (prunedPred == null) {
+      if (prunedPred == null || !prunedPred.hasAnyCandidates()) {
         return null;
       }
       Map<String, List<ExprNodeDesc>> candidates = 
prunedPred.getFinalCandidates();
-      if (candidates != null && !candidates.isEmpty()) {
-        createFilter((Operator)nd, prunedPred, owi);
-        candidates.clear();
-      }
+      createFilter((Operator)nd, prunedPred, owi);
+      candidates.clear();
       return null;
     }
 
@@ -476,7 +470,12 @@ public final class OpProcFactory {
         Set<String> toRemove = new HashSet<String>();
         // we don't push down any expressions that refer to aliases that can;t
         // be pushed down per getQualifiedAliases
-        for (String key : prunePreds.getFinalCandidates().keySet()) {
+        for (Entry<String, List<ExprNodeDesc>> entry : 
prunePreds.getFinalCandidates().entrySet()) {
+          String key = entry.getKey();
+          List<ExprNodeDesc> value = entry.getValue();
+          if (key == null && ExprNodeDescUtils.isAllConstants(value)) {
+            continue;   // propagate constants
+          }
           if (!aliases.contains(key)) {
             toRemove.add(key);
           }
@@ -517,199 +516,6 @@ public final class OpProcFactory {
       return getQualifiedAliases((JoinOperator) nd, owi.getRowResolver(nd));
     }
 
-    @Override
-    protected Object handlePredicates(Node nd, ExprWalkerInfo prunePreds, 
OpWalkerInfo owi)
-        throws SemanticException {
-      if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
-          HiveConf.ConfVars.HIVEPPDRECOGNIZETRANSITIVITY)) {
-        applyFilterTransitivity((JoinOperator) nd, owi);
-      }
-      return super.handlePredicates(nd, prunePreds, owi);
-    }
-
-    /**
-     * Adds additional pushdown predicates for a join operator by replicating
-     * filters transitively over all the equijoin conditions.
-     *
-     * If we have a predicate "t.col=1" and the equijoin conditions
-     * "t.col=s.col" and "t.col=u.col", we add the filters "s.col=1" and
-     * "u.col=1". Note that this does not depend on the types of joins (ie.
-     * inner, left/right/full outer) between the tables s, t and u because if
-     * a predicate, eg. "t.col=1" is present in getFinalCandidates() at this
-     * point, we have already verified that it can be pushed down, so any rows
-     * emitted must satisfy s.col=t.col=u.col=1 and replicating the filters
-     * like this is ok.
-     */
-    private void applyFilterTransitivity(JoinOperator nd, OpWalkerInfo owi)
-        throws SemanticException {
-      ExprWalkerInfo prunePreds =
-          owi.getPrunedPreds(nd);
-      if (prunePreds != null) {
-        // We want to use the row resolvers of the parents of the join op
-        // because the rowresolver refers to the output columns of an operator
-        // and the filters at this point refer to the input columns of the join
-        // operator.
-        Map<String, RowResolver> aliasToRR =
-            new HashMap<String, RowResolver>();
-        for (Operator<? extends OperatorDesc> o : (nd).getParentOperators()) {
-          for (String alias : owi.getRowResolver(o).getTableNames()){
-            aliasToRR.put(alias, owi.getRowResolver(o));
-          }
-        }
-
-        // eqExpressions is a list of ArrayList<ASTNode>'s, one for each table
-        // in the join. Then for each i, j and k, the join condition is that
-        // eqExpressions[i][k]=eqExpressions[j][k] (*) (ie. the columns 
referenced
-        // by the corresponding ASTNodes are equal). For example, if the query
-        // was SELECT * FROM a join b on a.col=b.col and a.col2=b.col2 left
-        // outer join c on b.col=c.col and b.col2=c.col2 WHERE c.col=1,
-        // eqExpressions would be [[a.col1, a.col2], [b.col1, b.col2],
-        // [c.col1, c.col2]].
-        //
-        // numEqualities is the number of equal columns in each equality
-        // "chain" and numColumns is the number of such chains.
-        //
-        // Note that (*) is guaranteed to be true for the
-        // join operator: if the equijoin condititions can't be expressed in
-        // these equal-length lists of equal columns (for example if we had the
-        // query SELECT * FROM a join b on a.col=b.col and a.col2=b.col2 left
-        // outer join c on b.col=c.col), more than one join operator is used.
-        ArrayList<ArrayList<ASTNode>> eqExpressions =
-            owi.getParseContext().getJoinContext().get(nd).getExpressions();
-        int numColumns = eqExpressions.size();
-        int numEqualities = eqExpressions.get(0).size();
-
-        // oldFilters contains the filters to be pushed down
-        Map<String, List<ExprNodeDesc>> oldFilters =
-            prunePreds.getFinalCandidates();
-        Map<String, List<ExprNodeDesc>> newFilters =
-            new HashMap<String, List<ExprNodeDesc>>();
-
-        // We loop through for each chain of equalities
-        for (int i=0; i<numEqualities; i++) {
-          // equalColumns[i] is the ColumnInfo corresponding to the ith term
-          // of the equality or null if the term is not a simple column
-          // reference
-          ColumnInfo[] equalColumns=new ColumnInfo[numColumns];
-          for (int j=0; j<numColumns; j++) {
-            equalColumns[j] =
-                getColumnInfoFromAST(eqExpressions.get(j).get(i), aliasToRR);
-          }
-          for (int j=0; j<numColumns; j++) {
-            for (int k=0; k<numColumns; k++) {
-              if (j != k && equalColumns[j]!= null
-                  && equalColumns[k] != null) {
-                // terms j and k in the equality chain are simple columns,
-                // so we can replace instances of column j with column k
-                // in the filter and ad the replicated filter.
-                ColumnInfo left = equalColumns[j];
-                ColumnInfo right = equalColumns[k];
-                if (oldFilters.get(left.getTabAlias()) != null){
-                  for (ExprNodeDesc expr :
-                    oldFilters.get(left.getTabAlias())) {
-                    // Only replicate the filter if there is exactly one column
-                    // referenced
-                    Set<String> colsreferenced =
-                        new HashSet<String>(expr.getCols());
-                    if (colsreferenced.size() == 1
-                        && colsreferenced.contains(left.getInternalName())){
-                      ExprNodeDesc newexpr = expr.clone();
-                      // Replace the column reference in the filter
-                      replaceColumnReference(newexpr, left.getInternalName(),
-                          right.getInternalName());
-                      if (newFilters.get(right.getTabAlias()) == null) {
-                        newFilters.put(right.getTabAlias(),
-                            new ArrayList<ExprNodeDesc>());
-                      }
-                      newFilters.get(right.getTabAlias()).add(newexpr);
-                    }
-                  }
-                }
-              }
-            }
-          }
-        }
-        // Push where false filter transitively
-        Map<String,List<ExprNodeDesc>> candidates = 
prunePreds.getNonFinalCandidates();
-        List<ExprNodeDesc> exprs;
-        // where false is not associated with any alias in candidates
-        if (null != candidates && candidates.get(null) != null && ((exprs = 
candidates.get(null)) != null)) {
-          Iterator<ExprNodeDesc> itr = exprs.iterator();
-          while (itr.hasNext()) {
-            ExprNodeDesc expr = itr.next();
-            if (expr instanceof ExprNodeConstantDesc && 
Boolean.FALSE.equals(((ExprNodeConstantDesc)expr).getValue())) {
-              // push this 'where false' expr to all aliases
-              for (String alias : aliasToRR.keySet()) {
-                List<ExprNodeDesc> pushedFilters = newFilters.get(alias);
-                if (null == pushedFilters) {
-                  newFilters.put(alias, new ArrayList<ExprNodeDesc>());
-
-                }
-                newFilters.get(alias).add(expr);
-              }
-              // this filter is pushed, we can remove it from non-final 
candidates.
-              itr.remove();
-            }
-          }
-        }
-        for (Entry<String, List<ExprNodeDesc>> aliasToFilters
-            : newFilters.entrySet()){
-          owi.getPrunedPreds(nd)
-            .addPushDowns(aliasToFilters.getKey(), aliasToFilters.getValue());
-        }
-      }
-    }
-
-    /**
-     * Replaces the ColumnInfo for the column referred to by an ASTNode
-     * representing "table.column" or null if the ASTNode is not in that form
-     */
-    private ColumnInfo getColumnInfoFromAST(ASTNode nd,
-        Map<String, RowResolver> aliastoRR) throws SemanticException {
-      // this bit is messy since we are parsing an ASTNode at this point
-      if (nd.getType()==HiveParser.DOT) {
-        if (nd.getChildCount()==2) {
-          if (nd.getChild(0).getType()==HiveParser.TOK_TABLE_OR_COL
-              && nd.getChild(0).getChildCount()==1
-              && nd.getChild(1).getType()==HiveParser.Identifier){
-            // We unescape the identifiers and make them lower case--this
-            // really shouldn't be done here, but getExpressions gives us the
-            // raw ASTNodes. The same thing is done in SemanticAnalyzer.
-            // parseJoinCondPopulateAlias().
-            String alias = BaseSemanticAnalyzer.unescapeIdentifier(
-                nd.getChild(0).getChild(0).getText().toLowerCase());
-            String column = BaseSemanticAnalyzer.unescapeIdentifier(
-                nd.getChild(1).getText().toLowerCase());
-            RowResolver rr=aliastoRR.get(alias);
-            if (rr == null) {
-              return null;
-            }
-            return rr.get(alias, column);
-          }
-        }
-      }
-      return null;
-    }
-
-    /**
-     * Replaces all instances of oldColumn with newColumn in the
-     * ExprColumnDesc's of the ExprNodeDesc
-     */
-    private void replaceColumnReference(ExprNodeDesc expr,
-        String oldColumn, String newColumn) {
-      if (expr instanceof ExprNodeColumnDesc) {
-        if (((ExprNodeColumnDesc) expr).getColumn().equals(oldColumn)){
-          ((ExprNodeColumnDesc) expr).setColumn(newColumn);
-        }
-      }
-
-      if (expr.getChildren() != null){
-        for (ExprNodeDesc childexpr : expr.getChildren()) {
-          replaceColumnReference(childexpr, oldColumn, newColumn);
-        }
-      }
-    }
-
     /**
      * Figures out the aliases for whom it is safe to push predicates based on
      * ANSI SQL semantics. The join conditions are left associative so "a
@@ -760,6 +566,86 @@ public final class OpProcFactory {
     }
   }
 
+  public static class ReduceSinkPPD extends DefaultPPD implements 
NodeProcessor {
+    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
+                          Object... nodeOutputs) throws SemanticException {
+      super.process(nd, stack, procCtx, nodeOutputs);
+      Operator<?> operator = (Operator<?>) nd;
+      OpWalkerInfo owi = (OpWalkerInfo) procCtx;
+      if (operator.getNumChild() == 1 &&
+          operator.getChildOperators().get(0) instanceof JoinOperator) {
+        if (HiveConf.getBoolVar(owi.getParseContext().getConf(),
+            HiveConf.ConfVars.HIVEPPDRECOGNIZETRANSITIVITY)) {
+          JoinOperator child = (JoinOperator) 
operator.getChildOperators().get(0);
+          int targetPos = child.getParentOperators().indexOf(operator);
+          applyFilterTransitivity(child, targetPos, owi);
+        }
+      }
+      return null;
+    }
+
+    /**
+     * Adds additional pushdown predicates for a join operator by replicating
+     * filters transitively over all the equijoin conditions.
+     *
+     * If we have a predicate "t.col=1" and the equijoin conditions
+     * "t.col=s.col" and "t.col=u.col", we add the filters "s.col=1" and
+     * "u.col=1". Note that this does not depend on the types of joins (ie.
+     * inner, left/right/full outer) between the tables s, t and u because if
+     * a predicate, eg. "t.col=1" is present in getFinalCandidates() at this
+     * point, we have already verified that it can be pushed down, so any rows
+     * emitted must satisfy s.col=t.col=u.col=1 and replicating the filters
+     * like this is ok.
+     */
+    private void applyFilterTransitivity(JoinOperator join, int targetPos, 
OpWalkerInfo owi)
+        throws SemanticException {
+
+      ExprWalkerInfo joinPreds = owi.getPrunedPreds(join);
+      if (joinPreds == null || !joinPreds.hasAnyCandidates()) {
+        return;
+      }
+      Map<String, List<ExprNodeDesc>> oldFilters = 
joinPreds.getFinalCandidates();
+      Map<String, List<ExprNodeDesc>> newFilters = new HashMap<String, 
List<ExprNodeDesc>>();
+
+      List<Operator<? extends OperatorDesc>> parentOperators = 
join.getParentOperators();
+
+      ReduceSinkOperator target = (ReduceSinkOperator) 
parentOperators.get(targetPos);
+      List<ExprNodeDesc> targetKeys = target.getConf().getKeyCols();
+
+      ExprWalkerInfo rsPreds = owi.getPrunedPreds(target);
+      for (int sourcePos = 0; sourcePos < parentOperators.size(); sourcePos++) 
{
+        ReduceSinkOperator source = (ReduceSinkOperator) 
parentOperators.get(sourcePos);
+        List<ExprNodeDesc> sourceKeys = source.getConf().getKeyCols();
+        Set<String> sourceAliases = new 
HashSet<String>(Arrays.asList(source.getInputAliases()));
+        for (Map.Entry<String, List<ExprNodeDesc>> entry : 
oldFilters.entrySet()) {
+          if (entry.getKey() == null && 
ExprNodeDescUtils.isAllConstants(entry.getValue())) {
+            // propagate constants
+            for (String targetAlias : target.getInputAliases()) {
+              rsPreds.addPushDowns(targetAlias, entry.getValue());
+            }
+            continue;
+          }
+          if (!sourceAliases.contains(entry.getKey())) {
+            continue;
+          }
+          for (ExprNodeDesc predicate : entry.getValue()) {
+            ExprNodeDesc backtrack = ExprNodeDescUtils.backtrack(predicate, 
join, source);
+            if (backtrack == null) {
+              continue;
+            }
+            ExprNodeDesc replaced = ExprNodeDescUtils.replace(backtrack, 
sourceKeys, targetKeys);
+            if (replaced == null) {
+              continue;
+            }
+            for (String targetAlias : target.getInputAliases()) {
+              rsPreds.addFinalCandidate(targetAlias, replaced);
+            }
+          }
+        }
+      }
+    }
+  }
+
   /**
    * Default processor which just merges its children.
    */
@@ -900,11 +786,10 @@ public final class OpProcFactory {
 
   protected static Object createFilter(Operator op,
       ExprWalkerInfo pushDownPreds, OpWalkerInfo owi) {
-    if (pushDownPreds == null || pushDownPreds.getFinalCandidates() == null
-        || pushDownPreds.getFinalCandidates().size() == 0) {
-      return null;
+    if (pushDownPreds != null && pushDownPreds.hasAnyCandidates()) {
+      return createFilter(op, pushDownPreds.getFinalCandidates(), owi);
     }
-    return createFilter(op, pushDownPreds.getFinalCandidates(), owi);
+    return null;
   }
 
   protected static Object createFilter(Operator op,
@@ -1113,6 +998,10 @@ public final class OpProcFactory {
     return new JoinerPPD();
   }
 
+  public static NodeProcessor getRSProc() {
+    return new ReduceSinkPPD();
+  }
+
   private OpProcFactory() {
     // prevent instantiation
   }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicatePushDown.java
 Fri Nov  7 20:41:34 2014
@@ -30,6 +30,7 @@ import org.apache.hadoop.hive.ql.exec.La
 import org.apache.hadoop.hive.ql.exec.LimitOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.PTFOperator;
+import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
 import org.apache.hadoop.hive.ql.exec.ScriptOperator;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.UDTFOperator;
@@ -118,6 +119,9 @@ public class PredicatePushDown implement
     opRules.put(new RuleRegExp("R9",
       LateralViewJoinOperator.getOperatorName() + "%"),
       OpProcFactory.getLVJProc());
+    opRules.put(new RuleRegExp("R10",
+        ReduceSinkOperator.getOperatorName() + "%"),
+        OpProcFactory.getRSProc());
 
     // The dispatcher fires the processor corresponding to the closest matching
     // rule and passes the context along

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/ppd/PredicateTransitivePropagate.java
 Fri Nov  7 20:41:34 2014
@@ -30,7 +30,6 @@ import java.util.Stack;
 import org.apache.hadoop.hive.ql.exec.CommonJoinOperator;
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
 import org.apache.hadoop.hive.ql.exec.JoinOperator;
-import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.OperatorFactory;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
@@ -179,40 +178,6 @@ public class PredicateTransitivePropagat
       return null;
     }
 
-    // calculate filter propagation directions for each alias
-    // L<->R for innner/semi join, L->R for left outer join, R->L for right 
outer join
-    private int[][] getTargets(CommonJoinOperator<JoinDesc> join) {
-      JoinCondDesc[] conds = join.getConf().getConds();
-
-      int aliases = conds.length + 1;
-      Vectors vector = new Vectors(aliases);
-      for (JoinCondDesc cond : conds) {
-        int left = cond.getLeft();
-        int right = cond.getRight();
-        switch (cond.getType()) {
-          case JoinDesc.INNER_JOIN:
-          case JoinDesc.LEFT_SEMI_JOIN:
-            vector.add(left, right);
-            vector.add(right, left);
-            break;
-          case JoinDesc.LEFT_OUTER_JOIN:
-            vector.add(left, right);
-            break;
-          case JoinDesc.RIGHT_OUTER_JOIN:
-            vector.add(right, left);
-            break;
-          case JoinDesc.FULL_OUTER_JOIN:
-            break;
-        }
-      }
-      int[][] result = new int[aliases][];
-      for (int pos = 0 ; pos < aliases; pos++) {
-        // find all targets recursively
-        result[pos] = vector.traverse(pos);
-      }
-      return result;
-    }
-
     // check same filter exists already
     private boolean filterExists(ReduceSinkOperator target, ExprNodeDesc 
replaced) {
       Operator<?> operator = target.getParentOperators().get(0);
@@ -226,6 +191,40 @@ public class PredicateTransitivePropagat
     }
   }
 
+  // calculate filter propagation directions for each alias
+  // L<->R for inner/semi join, L->R for left outer join, R->L for right outer 
join
+  public static int[][] getTargets(CommonJoinOperator<JoinDesc> join) {
+    JoinCondDesc[] conds = join.getConf().getConds();
+
+    int aliases = conds.length + 1;
+    Vectors vector = new Vectors(aliases);
+    for (JoinCondDesc cond : conds) {
+      int left = cond.getLeft();
+      int right = cond.getRight();
+      switch (cond.getType()) {
+        case JoinDesc.INNER_JOIN:
+        case JoinDesc.LEFT_SEMI_JOIN:
+          vector.add(left, right);
+          vector.add(right, left);
+          break;
+        case JoinDesc.LEFT_OUTER_JOIN:
+          vector.add(left, right);
+          break;
+        case JoinDesc.RIGHT_OUTER_JOIN:
+          vector.add(right, left);
+          break;
+        case JoinDesc.FULL_OUTER_JOIN:
+          break;
+      }
+    }
+    int[][] result = new int[aliases][];
+    for (int pos = 0 ; pos < aliases; pos++) {
+      // find all targets recursively
+      result[pos] = vector.traverse(pos);
+    }
+    return result;
+  }
+
   private static class Vectors {
 
     private Set<Integer>[] vector;
@@ -245,10 +244,11 @@ public class PredicateTransitivePropagat
     public int[] traverse(int pos) {
       Set<Integer> targets = new HashSet<Integer>();
       traverse(targets, pos);
-      return toArray(targets);
+      return toArray(targets, pos);
     }
 
-    private int[] toArray(Set<Integer> values) {
+    private int[] toArray(Set<Integer> values, int pos) {
+      values.remove(pos);
       int index = 0;
       int[] result = new int[values.size()];
       for (int value : values) {

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/security/ProxyUserAuthenticator.java
 Fri Nov  7 20:41:34 2014
@@ -30,6 +30,8 @@ import org.apache.hadoop.security.UserGr
  * but honours a proxy config setting proxy.user.name instead of the
  * current user if set. This allows server processes like webhcat which
  * proxy other users to easily specify an override if allowed.
+ *
+ * It is no longer necessary to use this class with WebHCat as of Hive 0.14.
  */
 public class ProxyUserAuthenticator extends HadoopDefaultAuthenticator {
 

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
 Fri Nov  7 20:41:34 2014
@@ -809,6 +809,14 @@ public class SessionState {
       return (ss != null) ? ss.getIsSilent() : isSilent;
     }
 
+    public void logInfo(String info) {
+      logInfo(info, null);
+    }
+
+    public void logInfo(String info, String detail) {
+      LOG.info(info + StringUtils.defaultString(detail));
+    }
+
     public void printInfo(String info) {
       printInfo(info, null);
     }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java 
(original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java 
Fri Nov  7 20:41:34 2014
@@ -18,14 +18,10 @@
 
 package org.apache.hadoop.hive.ql.stats;
 
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
+import com.google.common.math.DoubleMath;
+import com.google.common.math.LongMath;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -42,6 +38,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.RowSchema;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
+import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
@@ -89,8 +86,14 @@ import org.apache.hadoop.hive.serde2.obj
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.tez.mapreduce.hadoop.MRJobConfig;
 
-import com.google.common.base.Joiner;
-import com.google.common.collect.Lists;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
 
 public class StatsUtils {
 
@@ -389,7 +392,7 @@ public class StatsUtils {
       }
 
       if (s <= 0 && rc > 0) {
-        s = rc * avgRowSize;
+        s = safeMult(rc, avgRowSize);
         dataSizes.set(i, s);
       }
     }
@@ -494,7 +497,7 @@ public class StatsUtils {
     long result = 0;
     for (Long l : vals) {
       if (l > 0) {
-        result += l;
+        result = safeAdd(result, l);
       }
     }
     return result;
@@ -1011,12 +1014,10 @@ public class StatsUtils {
     if (colExprMap != null  && rowSchema != null) {
       for (ColumnInfo ci : rowSchema.getSignature()) {
         String outColName = ci.getInternalName();
-        outColName = StatsUtils.stripPrefixFromColumnName(outColName);
         String outTabAlias = ci.getTabAlias();
         ExprNodeDesc end = colExprMap.get(outColName);
         ColStatistics colStat = getColStatisticsFromExpression(conf, 
parentStats, end);
         if (colStat != null) {
-          outColName = StatsUtils.stripPrefixFromColumnName(outColName);
           colStat.setColumnName(outColName);
           colStat.setTableAlias(outTabAlias);
         }
@@ -1070,7 +1071,6 @@ public class StatsUtils {
       ExprNodeColumnDesc encd = (ExprNodeColumnDesc) end;
       colName = encd.getColumn();
       tabAlias = encd.getTabAlias();
-      colName = stripPrefixFromColumnName(colName);
 
       if (encd.getIsPartitionColOrVirtualCol()) {
 
@@ -1261,6 +1261,7 @@ public class StatsUtils {
       if (cs != null) {
         String colType = cs.getColumnType();
         long nonNullCount = numRows - cs.getNumNulls();
+        double sizeOf = 0;
         if (colType.equalsIgnoreCase(serdeConstants.TINYINT_TYPE_NAME)
             || colType.equalsIgnoreCase(serdeConstants.SMALLINT_TYPE_NAME)
             || colType.equalsIgnoreCase(serdeConstants.INT_TYPE_NAME)
@@ -1268,31 +1269,25 @@ public class StatsUtils {
             || colType.equalsIgnoreCase(serdeConstants.BOOLEAN_TYPE_NAME)
             || colType.equalsIgnoreCase(serdeConstants.FLOAT_TYPE_NAME)
             || colType.equalsIgnoreCase(serdeConstants.DOUBLE_TYPE_NAME)) {
-
-          result += nonNullCount * cs.getAvgColLen();
+          sizeOf = cs.getAvgColLen();
         } else if (colType.equalsIgnoreCase(serdeConstants.STRING_TYPE_NAME)
             || colType.startsWith(serdeConstants.VARCHAR_TYPE_NAME)
             || colType.startsWith(serdeConstants.CHAR_TYPE_NAME)) {
-
           int acl = (int) Math.round(cs.getAvgColLen());
-          result += nonNullCount * 
JavaDataModel.get().lengthForStringOfLength(acl);
+          sizeOf = JavaDataModel.get().lengthForStringOfLength(acl);
         } else if (colType.equalsIgnoreCase(serdeConstants.BINARY_TYPE_NAME)) {
-
           int acl = (int) Math.round(cs.getAvgColLen());
-          result += nonNullCount * 
JavaDataModel.get().lengthForByteArrayOfSize(acl);
+          sizeOf = JavaDataModel.get().lengthForByteArrayOfSize(acl);
         } else if 
(colType.equalsIgnoreCase(serdeConstants.TIMESTAMP_TYPE_NAME)) {
-
-          result += nonNullCount * JavaDataModel.get().lengthOfTimestamp();
+          sizeOf = JavaDataModel.get().lengthOfTimestamp();
         } else if (colType.startsWith(serdeConstants.DECIMAL_TYPE_NAME)) {
-
-          result += nonNullCount * JavaDataModel.get().lengthOfDecimal();
+          sizeOf = JavaDataModel.get().lengthOfDecimal();
         } else if (colType.equalsIgnoreCase(serdeConstants.DATE_TYPE_NAME)) {
-
-          result += nonNullCount * JavaDataModel.get().lengthOfDate();
+          sizeOf = JavaDataModel.get().lengthOfDate();
         } else {
-
-          result += nonNullCount * cs.getAvgColLen();
+          sizeOf = cs.getAvgColLen();
         }
+        result = safeAdd(result, safeMult(nonNullCount, sizeOf));
       }
     }
 
@@ -1300,21 +1295,6 @@ public class StatsUtils {
   }
 
   /**
-   * Remove KEY/VALUE prefix from column name
-   * @param colName
-   *          - column name
-   * @return column name
-   */
-  public static String stripPrefixFromColumnName(String colName) {
-    String stripedName = colName;
-    if (colName.startsWith("KEY") || colName.startsWith("VALUE")) {
-      // strip off KEY./VALUE. from column name
-      stripedName = colName.split("\\.")[1];
-    }
-    return stripedName;
-  }
-
-  /**
    * Returns fully qualified name of column
    * @param tabName
    * @param colName
@@ -1363,38 +1343,42 @@ public class StatsUtils {
   }
 
   /**
-   * Try to get fully qualified column name from expression node
+   * Get fully qualified column name from output key column names and column 
expression map
    * @param keyExprs
-   *          - expression nodes
+   *          - output key names
    * @param map
    *          - column expression map
    * @return list of fully qualified names
    */
-  public static List<String> 
getFullQualifedColNameFromExprs(List<ExprNodeDesc> keyExprs,
+  public static List<String> getFullyQualifedReducerKeyNames(List<String> 
keyExprs,
       Map<String, ExprNodeDesc> map) {
     List<String> result = Lists.newArrayList();
     if (keyExprs != null) {
-      for (ExprNodeDesc end : keyExprs) {
-        String outColName = null;
-        for (Map.Entry<String, ExprNodeDesc> entry : map.entrySet()) {
-          if (entry.getValue().isSame(end)) {
-            outColName = entry.getKey();
-            outColName = stripPrefixFromColumnName(outColName);
+      for (String key : keyExprs) {
+        String colName = key;
+        ExprNodeDesc end = map.get(colName);
+        // if we couldn't get expression try prepending "KEY." prefix to 
reducer key column names
+        if (end == null) {
+          colName = Utilities.ReduceField.KEY.toString() + "." + key;
+          end = map.get(colName);
+          if (end == null) {
+            continue;
           }
         }
         if (end instanceof ExprNodeColumnDesc) {
           ExprNodeColumnDesc encd = (ExprNodeColumnDesc) end;
-          if (outColName == null) {
-            outColName = encd.getColumn();
-            outColName = stripPrefixFromColumnName(outColName);
-          }
           String tabAlias = encd.getTabAlias();
-          result.add(getFullyQualifiedColumnName(tabAlias, outColName));
+          result.add(getFullyQualifiedColumnName(tabAlias, colName));
         } else if (end instanceof ExprNodeGenericFuncDesc) {
           ExprNodeGenericFuncDesc enf = (ExprNodeGenericFuncDesc) end;
-          List<String> cols = 
getFullQualifedColNameFromExprs(enf.getChildren(), map);
-          String joinedStr = Joiner.on(".").skipNulls().join(cols);
-          result.add(joinedStr);
+          String tabAlias = "";
+          for (ExprNodeDesc childEnd : enf.getChildren()) {
+            if (childEnd instanceof  ExprNodeColumnDesc) {
+              tabAlias = ((ExprNodeColumnDesc) childEnd).getTabAlias();
+              break;
+            }
+          }
+          result.add(getFullyQualifiedColumnName(tabAlias, colName));
         } else if (end instanceof ExprNodeConstantDesc) {
           ExprNodeConstantDesc encd = (ExprNodeConstantDesc) end;
           result.add(encd.getValue().toString());
@@ -1439,4 +1423,38 @@ public class StatsUtils {
         conf.getInt(MRJobConfig.MAP_MEMORY_MB, 
MRJobConfig.DEFAULT_MAP_MEMORY_MB);
     return memory;
   }
+
+  /**
+   * negative number of rows or data sizes are invalid. It could be because of
+   * long overflow in which case return Long.MAX_VALUE
+   * @param val - input value
+   * @return Long.MAX_VALUE if val is negative else val
+   */
+  public static long getMaxIfOverflow(long val) {
+    return val < 0 ? Long.MAX_VALUE : val;
+  }
+
+  /** Bounded multiplication - overflows become MAX_VALUE */
+  public static long safeMult(long a, double b) {
+    double result = a * b;
+    return (result > Long.MAX_VALUE) ? Long.MAX_VALUE : (long)result;
+  }
+ 
+  /** Bounded addition - overflows become MAX_VALUE */
+  public static long safeAdd(long a, long b) {
+    try {
+      return LongMath.checkedAdd(a, b);
+    } catch (ArithmeticException ex) {
+      return Long.MAX_VALUE;
+    }
+  }
+ 
+  /** Bounded multiplication - overflows become MAX_VALUE */
+  public static long safeMult(long a, long b) {
+    try {
+      return LongMath.checkedMultiply(a, b);
+    } catch (ArithmeticException ex) {
+      return Long.MAX_VALUE;
+    }
+  }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStreamingEvaluator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStreamingEvaluator.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStreamingEvaluator.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFStreamingEvaluator.java
 Fri Nov  7 20:41:34 2014
@@ -67,7 +67,7 @@ public abstract class GenericUDAFStreami
   @Override
   public ObjectInspector init(Mode m, ObjectInspector[] parameters)
       throws HiveException {
-    throw new HiveException(getClass().getSimpleName() + ": init not 
supported");
+    return wrappedEval.init(m, parameters);
   }
 
   @Override

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFBaseNumeric.java
 Fri Nov  7 20:41:34 2014
@@ -294,7 +294,7 @@ public abstract class GenericUDFBaseNume
 
   @Override
   public String getDisplayString(String[] children) {
-    assert (children.length == 2);
+    assert (children.length == 2) : opDisplayName + " with " + children.length 
+ " children";
     return "(" + children[0] + " " + opDisplayName + " " + children[1] + ")";
   }
 

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/util/ZooKeeperHiveHelper.java
 Fri Nov  7 20:41:34 2014
@@ -18,17 +18,10 @@
 
 package org.apache.hadoop.hive.ql.util;
 
-import java.util.List;
-
-import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.Watcher;
-import org.apache.zookeeper.ZooKeeper;
-import org.apache.zookeeper.data.ACL;
 
 public class ZooKeeperHiveHelper {
   public static final Log LOG = 
LogFactory.getLog(ZooKeeperHiveHelper.class.getName());
@@ -59,34 +52,6 @@ public class ZooKeeperHiveHelper {
     return quorum.toString();
   }
 
-
-  /**
-   * Create a path on ZooKeeper, if it does not already exist ("mkdir -p")
-   *
-   * @param zooKeeperClient ZooKeeper session
-   * @param path string with ZOOKEEPER_PATH_SEPARATOR as the separator
-   * @param acl list of ACL entries
-   * @param createMode for create mode of each node in the patch
-   * @return
-   * @throws KeeperException
-   * @throws InterruptedException
-   */
-  public static String createPathRecursively(ZooKeeper zooKeeperClient, String 
path, List<ACL> acl,
-      CreateMode createMode) throws KeeperException, InterruptedException {
-    String[] pathComponents = StringUtils.splitByWholeSeparator(path, 
ZOOKEEPER_PATH_SEPARATOR);
-    String currentPath = "";
-    for (String pathComponent : pathComponents) {
-      currentPath += ZOOKEEPER_PATH_SEPARATOR + pathComponent;
-      try {
-        String node = zooKeeperClient.create(currentPath, new byte[0], acl, 
createMode);
-        LOG.info("Created path: " + node);
-      } catch (KeeperException.NodeExistsException e) {
-        // Do nothing here
-      }
-    }
-    return currentPath;
-  }
-
   /**
    * A no-op watcher class
    */
@@ -95,5 +60,4 @@ public class ZooKeeperHiveHelper {
     public void process(org.apache.zookeeper.WatchedEvent event) {
     }
   }
-
 }

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorFilterOperator.java
 Fri Nov  7 20:41:34 2014
@@ -106,7 +106,7 @@ public class TestVectorFilterOperator {
 
     VectorizedRowBatch vrg = fdr.getNext();
 
-    vfo.processOp(vrg, 0);
+    vfo.getConditionEvaluator().evaluate(vrg);
 
     //Verify
     int rows = 0;

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/TestVectorGroupByOperator.java
 Fri Nov  7 20:41:34 2014
@@ -38,7 +38,6 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeCaptureOutputOperator;
 import org.apache.hadoop.hive.ql.exec.vector.util.FakeVectorRowBatchFromConcat;
@@ -632,9 +631,9 @@ public class TestVectorGroupByOperator {
         "count",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(1),
-                new Decimal128(2),
-                new Decimal128(3)}),
+                HiveDecimal.create(1),
+                HiveDecimal.create(2),
+                HiveDecimal.create(3)}),
        3L);
   }
 
@@ -645,28 +644,28 @@ public class TestVectorGroupByOperator {
         "max",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(1),
-                new Decimal128(2),
-                new Decimal128(3)}),
-       new Decimal128(3));
+                HiveDecimal.create(1),
+                HiveDecimal.create(2),
+                HiveDecimal.create(3)}),
+       HiveDecimal.create(3));
     testAggregateDecimal(
         "Decimal",
         "max",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(3),
-                new Decimal128(2),
-                new Decimal128(1)}),
-        new Decimal128(3));
+                HiveDecimal.create(3),
+                HiveDecimal.create(2),
+                HiveDecimal.create(1)}),
+        HiveDecimal.create(3));
     testAggregateDecimal(
         "Decimal",
         "max",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(2),
-                new Decimal128(3),
-                new Decimal128(1)}),
-        new Decimal128(3));
+                HiveDecimal.create(2),
+                HiveDecimal.create(3),
+                HiveDecimal.create(1)}),
+        HiveDecimal.create(3));
   }
 
   @Test
@@ -676,29 +675,29 @@ public class TestVectorGroupByOperator {
         "min",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(1),
-                new Decimal128(2),
-                new Decimal128(3)}),
-       new Decimal128(1));
+                HiveDecimal.create(1),
+                HiveDecimal.create(2),
+                HiveDecimal.create(3)}),
+       HiveDecimal.create(1));
     testAggregateDecimal(
         "Decimal",
         "min",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(3),
-                new Decimal128(2),
-                new Decimal128(1)}),
-        new Decimal128(1));
+                HiveDecimal.create(3),
+                HiveDecimal.create(2),
+                HiveDecimal.create(1)}),
+        HiveDecimal.create(1));
 
     testAggregateDecimal(
         "Decimal",
         "min",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(2),
-                new Decimal128(1),
-                new Decimal128(3)}),
-        new Decimal128(1));
+                HiveDecimal.create(2),
+                HiveDecimal.create(1),
+                HiveDecimal.create(3)}),
+        HiveDecimal.create(1));
   }
 
   @Test
@@ -708,10 +707,10 @@ public class TestVectorGroupByOperator {
        "sum",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(1),
-                new Decimal128(2),
-                new Decimal128(3)}),
-       new Decimal128(1+2+3));
+                HiveDecimal.create(1),
+                HiveDecimal.create(2),
+                HiveDecimal.create(3)}),
+       HiveDecimal.create(1+2+3));
   }
 
   @Test
@@ -722,12 +721,12 @@ public class TestVectorGroupByOperator {
         "sum",
         4,
         Arrays.asList(new Object[]{
-                new Decimal128("1234.2401", scale),
-                new Decimal128("1868.52", scale),
-                new Decimal128(0L, (short) 0),
-                new Decimal128("456.84", scale),
-                new Decimal128("121.89", scale)}),
-       new Decimal128("3681.4901", scale));
+                HiveDecimal.create("1234.2401").setScale(scale),
+                HiveDecimal.create("1868.52").setScale(scale),
+                HiveDecimal.ZERO.setScale(scale),
+                HiveDecimal.create("456.84").setScale(scale),
+                HiveDecimal.create("121.89").setScale(scale)}),
+       HiveDecimal.create("3681.4901").setScale( scale));
   }
 
   @Test
@@ -737,9 +736,9 @@ public class TestVectorGroupByOperator {
         "avg",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(1),
-                new Decimal128(2),
-                new Decimal128(3)}),
+                HiveDecimal.create(1),
+                HiveDecimal.create(2),
+                HiveDecimal.create(3)}),
        HiveDecimal.create((1+2+3)/3));
   }
 
@@ -750,9 +749,9 @@ public class TestVectorGroupByOperator {
         "avg",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(-1),
-                new Decimal128(-2),
-                new Decimal128(-3)}),
+                HiveDecimal.create(-1),
+                HiveDecimal.create(-2),
+                HiveDecimal.create(-3)}),
         HiveDecimal.create((-1-2-3)/3));
   }
 
@@ -763,10 +762,10 @@ public class TestVectorGroupByOperator {
         "variance",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(13),
-                new Decimal128(5),
-                new Decimal128(7),
-                new Decimal128(19)}),
+                HiveDecimal.create(13),
+                HiveDecimal.create(5),
+                HiveDecimal.create(7),
+                HiveDecimal.create(19)}),
         (double) 30);
   }
 
@@ -777,10 +776,10 @@ public class TestVectorGroupByOperator {
         "var_samp",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(13),
-                new Decimal128(5),
-                new Decimal128(7),
-                new Decimal128(19)}),
+                HiveDecimal.create(13),
+                HiveDecimal.create(5),
+                HiveDecimal.create(7),
+                HiveDecimal.create(19)}),
         (double) 40);
   }
 
@@ -791,10 +790,10 @@ public class TestVectorGroupByOperator {
         "stddev_pop",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(13),
-                new Decimal128(5),
-                new Decimal128(7),
-                new Decimal128(19)}),
+                HiveDecimal.create(13),
+                HiveDecimal.create(5),
+                HiveDecimal.create(7),
+                HiveDecimal.create(19)}),
         (double) Math.sqrt(30));
   }
 
@@ -805,10 +804,10 @@ public class TestVectorGroupByOperator {
         "stddev_samp",
         2,
         Arrays.asList(new Object[]{
-                new Decimal128(13),
-                new Decimal128(5),
-                new Decimal128(7),
-                new Decimal128(19)}),
+                HiveDecimal.create(13),
+                HiveDecimal.create(5),
+                HiveDecimal.create(7),
+                HiveDecimal.create(19)}),
         (double) Math.sqrt(40));
   }
 
@@ -820,8 +819,8 @@ public class TestVectorGroupByOperator {
             2,
             new String[] {"decimal(38,0)", "bigint"},
             Arrays.asList(new Object[]{
-                    new Decimal128(1),null,
-                    new Decimal128(1), null}),
+                    HiveDecimal.create(1),null,
+                    HiveDecimal.create(1), null}),
             Arrays.asList(new Object[]{13L,null,7L, 19L})),
         buildHashMap(HiveDecimal.create(1), 20L, null, 19L));
   }
@@ -2095,12 +2094,12 @@ public class TestVectorGroupByOperator {
       } else if (arr[0] instanceof HiveDecimalWritable) {
         HiveDecimalWritable hdw = (HiveDecimalWritable) arr[0];
         HiveDecimal hd = hdw.getHiveDecimal();
-        Decimal128 d128 = (Decimal128)expected;
-        assertEquals (key, d128.toBigDecimal(), hd.bigDecimalValue());
+        HiveDecimal expectedDec = (HiveDecimal)expected;
+        assertEquals (key, expectedDec, hd);
       } else if (arr[0] instanceof HiveDecimal) {
           HiveDecimal hd = (HiveDecimal) arr[0];
-          Decimal128 d128 = (Decimal128)expected;
-          assertEquals (key, d128.toBigDecimal(), hd.bigDecimalValue());
+          HiveDecimal expectedDec = (HiveDecimal)expected;
+          assertEquals (key, expectedDec, hd);
       } else {
         Assert.fail("Unsupported result type: " + arr[0].getClass().getName());
       }

Modified: 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java
 (original)
+++ 
hive/branches/spark/ql/src/test/org/apache/hadoop/hive/ql/exec/vector/expressions/TestConstantVectorExpression.java
 Fri Nov  7 20:41:34 2014
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertTru
 
 import java.util.Arrays;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
@@ -43,7 +43,7 @@ public class TestConstantVectorExpressio
     ConstantVectorExpression doubleCve = new ConstantVectorExpression(1, 
17.34);
     String str = "alpha";
     ConstantVectorExpression bytesCve = new ConstantVectorExpression(2, 
str.getBytes());
-    Decimal128 decVal = new Decimal128(25.8, (short) 1);
+    HiveDecimal decVal = HiveDecimal.create("25.8");
     ConstantVectorExpression decimalCve = new ConstantVectorExpression(3, 
decVal);
     ConstantVectorExpression nullCve = new ConstantVectorExpression(4, 
"string", true);
     
@@ -85,12 +85,12 @@ public class TestConstantVectorExpressio
     assertTrue(bcv.length[0] == alphaBytes.length);
     assertTrue(sameFirstKBytes(alphaBytes, bcv.vector[0], alphaBytes.length)); 
 
-    assertTrue(25.8 == dv.vector[0].doubleValue());
+    assertTrue(25.8 == dv.vector[0].getHiveDecimal().doubleValue());
     // Evaluation of the decimal Constant Vector Expression after the vector is
     // modified.    
-    ((DecimalColumnVector) (vrg.cols[3])).vector[0] = new Decimal128(39.7, 
(short) 1);
+    ((DecimalColumnVector) 
(vrg.cols[3])).vector[0].set(HiveDecimal.create("39.7"));
     decimalCve.evaluate(vrg);
-    assertTrue(25.8 == dv.vector[0].doubleValue());    
+    assertTrue(25.8 == dv.vector[0].getHiveDecimal().doubleValue());    
   }
   
   private boolean sameFirstKBytes(byte[] o1, byte[] o2, int k) {


Reply via email to