Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapper.java
 Fri Nov  7 20:41:34 2014
@@ -20,7 +20,8 @@ package org.apache.hadoop.hive.ql.exec.v
 
 import java.util.Arrays;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.KeyWrapper;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
@@ -40,7 +41,7 @@ public class VectorHashKeyWrapper extend
   private static final long[] EMPTY_LONG_ARRAY = new long[0];
   private static final double[] EMPTY_DOUBLE_ARRAY = new double[0];
   private static final byte[][] EMPTY_BYTES_ARRAY = new byte[0][];
-  private static final Decimal128[] EMPTY_DECIMAL_ARRAY = new Decimal128[0];
+  private static final HiveDecimalWritable[] EMPTY_DECIMAL_ARRAY = new 
HiveDecimalWritable[0];
 
   private long[] longValues;
   private double[] doubleValues;
@@ -49,7 +50,7 @@ public class VectorHashKeyWrapper extend
   private int[] byteStarts;
   private int[] byteLengths;
 
-  private Decimal128[] decimalValues;
+  private HiveDecimalWritable[] decimalValues;
 
   private boolean[] isNull;
   private int hashcode;
@@ -58,9 +59,9 @@ public class VectorHashKeyWrapper extend
           int byteValuesCount, int decimalValuesCount) {
     longValues = longValuesCount > 0 ? new long[longValuesCount] : 
EMPTY_LONG_ARRAY;
     doubleValues = doubleValuesCount > 0 ? new double[doubleValuesCount] : 
EMPTY_DOUBLE_ARRAY;
-    decimalValues = decimalValuesCount > 0 ? new 
Decimal128[decimalValuesCount] : EMPTY_DECIMAL_ARRAY;
+    decimalValues = decimalValuesCount > 0 ? new 
HiveDecimalWritable[decimalValuesCount] : EMPTY_DECIMAL_ARRAY;
     for(int i = 0; i < decimalValuesCount; ++i) {
-      decimalValues[i] = new Decimal128();
+      decimalValues[i] = new HiveDecimalWritable(HiveDecimal.ZERO);
     }
     if (byteValuesCount > 0) {
       byteValues = new byte[byteValuesCount][];
@@ -87,9 +88,12 @@ public class VectorHashKeyWrapper extend
   public void setHashKey() {
     hashcode = Arrays.hashCode(longValues) ^
         Arrays.hashCode(doubleValues) ^
-        Arrays.hashCode(decimalValues) ^
         Arrays.hashCode(isNull);
 
+    for (int i = 0; i < decimalValues.length; i++) {
+      hashcode ^= decimalValues[i].getHiveDecimal().hashCode();
+    }
+
     // This code, with branches and all, is not executed if there are no 
string keys
     for (int i = 0; i < byteValues.length; ++i) {
       /*
@@ -161,27 +165,36 @@ public class VectorHashKeyWrapper extend
   }
 
   public void duplicateTo(VectorHashKeyWrapper clone) {
-    clone.longValues = longValues.clone();
-    clone.doubleValues = doubleValues.clone();
+    clone.longValues = (longValues.length > 0) ? longValues.clone() : 
EMPTY_LONG_ARRAY;
+    clone.doubleValues = (doubleValues.length > 0) ? doubleValues.clone() : 
EMPTY_DOUBLE_ARRAY;
     clone.isNull = isNull.clone();
 
-    // Decimal128 requires deep clone
-    clone.decimalValues = new Decimal128[decimalValues.length];
-    for(int i = 0; i < decimalValues.length; ++i) {
-      clone.decimalValues[i] = new Decimal128().update(decimalValues[i]);
+    if (decimalValues.length > 0) {
+      // Decimal columns use HiveDecimalWritable.
+      clone.decimalValues = new HiveDecimalWritable[decimalValues.length];
+      for(int i = 0; i < decimalValues.length; ++i) {
+        clone.decimalValues[i] = new HiveDecimalWritable(decimalValues[i]);
+      }
+    } else {
+      clone.decimalValues = EMPTY_DECIMAL_ARRAY;
     }
 
-    clone.byteValues = new byte[byteValues.length][];
-    clone.byteStarts = new int[byteValues.length];
-    clone.byteLengths = byteLengths.clone();
-    for (int i = 0; i < byteValues.length; ++i) {
-      // avoid allocation/copy of nulls, because it potentially expensive. 
branch instead.
-      if (!isNull[longValues.length + doubleValues.length + i]) {
-        clone.byteValues[i] = Arrays.copyOfRange(
-            byteValues[i],
-            byteStarts[i],
-            byteStarts[i] + byteLengths[i]);
+    if (byteLengths.length > 0) {
+      clone.byteValues = new byte[byteValues.length][];
+      clone.byteStarts = new int[byteValues.length];
+      clone.byteLengths = byteLengths.clone();
+      for (int i = 0; i < byteValues.length; ++i) {
+        // avoid allocation/copy of nulls, because it potentially expensive.
+        // branch instead.
+        if (!isNull[longValues.length + doubleValues.length + i]) {
+          clone.byteValues[i] = Arrays.copyOfRange(byteValues[i],
+              byteStarts[i], byteStarts[i] + byteLengths[i]);
+        }
       }
+    } else {
+      clone.byteValues = EMPTY_BYTES_ARRAY;
+      clone.byteStarts = EMPTY_INT_ARRAY;
+      clone.byteLengths = EMPTY_INT_ARRAY;
     }
     clone.hashcode = hashcode;
     assert clone.equals(this);
@@ -234,8 +247,8 @@ public class VectorHashKeyWrapper extend
     isNull[longValues.length + doubleValues.length + index] = true;
   }
 
-  public void assignDecimal(int index, Decimal128 value) {
-    decimalValues[index].update(value);
+  public void assignDecimal(int index, HiveDecimalWritable value) {
+    decimalValues[index].set(value);
     isNull[longValues.length + doubleValues.length + byteValues.length + 
index] = false;
   }
 
@@ -299,7 +312,7 @@ public class VectorHashKeyWrapper extend
     return isNull[longValues.length + doubleValues.length + byteValues.length 
+ i];
   }
 
-  public Decimal128 getDecimal(int i) {
+  public HiveDecimalWritable getDecimal(int i) {
     return decimalValues[i];
   }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorHashKeyWrapperBatch.java
 Fri Nov  7 20:41:34 2014
@@ -580,7 +580,7 @@ public class VectorHashKeyWrapperBatch e
     } else if (klh.decimalIndex >= 0) {
       return kw.getIsDecimalNull(klh.decimalIndex)? null :
           keyOutputWriter.writeValue(
-                kw.getDecimal(klh.decimalIndex));
+                kw.getDecimal(klh.decimalIndex).getHiveDecimal());
     }
     else {
       throw new HiveException(String.format(

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorMapJoinOperator.java
 Fri Nov  7 20:41:34 2014
@@ -28,7 +28,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
-import org.apache.hadoop.hive.ql.exec.Utilities;
 import 
org.apache.hadoop.hive.ql.exec.persistence.MapJoinTableContainer.ReusableGetAdaptor;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
@@ -53,19 +52,16 @@ public class VectorMapJoinOperator exten
    */
   private static final long serialVersionUID = 1L;
 
-  /**
-   * Vectorizaiton context key
-   * Used to retrieve column map from the MapTask scratch
-   */
-  private String fileKey;
-  private int tagLen;
-
   private VectorExpression[] keyExpressions;
-  private transient VectorHashKeyWrapperBatch keyWrapperBatch;
-  private transient VectorExpressionWriter[] keyOutputWriters;
 
   private VectorExpression[] bigTableFilterExpressions;
   private VectorExpression[] bigTableValueExpressions;
+  
+  private VectorizationContext vOutContext;
+
+  // The above members are initialized by the constructor and must not be
+  // transient.
+  //---------------------------------------------------------------------------
 
   private transient VectorizedRowBatch outputBatch;
   private transient VectorExpressionWriter[] valueWriters;
@@ -76,8 +72,9 @@ public class VectorMapJoinOperator exten
   //
   private transient int batchIndex;
   private transient VectorHashKeyWrapper[] keyValues;
-  
-  private transient VectorizationContext vOutContext = null;
+  private transient VectorHashKeyWrapperBatch keyWrapperBatch;
+  private transient VectorExpressionWriter[] keyOutputWriters;
+
   private transient VectorizedRowBatchCtx vrbCtx = null;
   
   public VectorMapJoinOperator() {
@@ -96,7 +93,6 @@ public class VectorMapJoinOperator exten
     numAliases = desc.getExprs().size();
     posBigTable = (byte) desc.getPosBigTable();
     filterMaps = desc.getFilterMap();
-    tagLen = desc.getTagLength();
     noOuterJoin = desc.isNoOuterJoin();
 
     Map<Byte, List<ExprNodeDesc>> filterExpressions = desc.getFilters();
@@ -113,7 +109,6 @@ public class VectorMapJoinOperator exten
     // We are making a new output vectorized row batch.
     vOutContext = new VectorizationContext(desc.getOutputColumnNames());
     vOutContext.setFileKey(vContext.getFileKey() + "/MAP_JOIN_" + 
desc.getBigTableAlias());
-    this.fileKey = vOutContext.getFileKey();
   }
 
   @Override
@@ -124,7 +119,7 @@ public class VectorMapJoinOperator exten
     keyOutputWriters = 
VectorExpressionWriterFactory.getExpressionWriters(keyDesc);
 
     vrbCtx = new VectorizedRowBatchCtx();
-    vrbCtx.init(hconf, this.fileKey, (StructObjectInspector) 
this.outputObjInspector);
+    vrbCtx.init(vOutContext.getScratchColumnTypeMap(), (StructObjectInspector) 
this.outputObjInspector);
 
     outputBatch = vrbCtx.createVectorizedRowBatch();
 
@@ -193,10 +188,8 @@ public class VectorMapJoinOperator exten
     Object[] values = (Object[]) row;
     VectorColumnAssign[] vcas = outputVectorAssigners.get(outputOI);
     if (null == vcas) {
-      Map<String, Map<String, Integer>> allColumnMaps = 
Utilities.getAllColumnVectorMaps(hconf);
-      Map<String, Integer> columnMap = allColumnMaps.get(fileKey);
       vcas = VectorColumnAssignFactory.buildAssigners(
-          outputBatch, outputOI, columnMap, conf.getOutputColumnNames());
+          outputBatch, outputOI, vOutContext.getProjectionColumnMap(), 
conf.getOutputColumnNames());
       outputVectorAssigners.put(outputOI, vcas);
     }
     for (int i=0; i<values.length; ++i) {

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorSMBMapJoinOperator.java
 Fri Nov  7 20:41:34 2014
@@ -28,7 +28,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
 import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
-import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
 import 
org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
@@ -52,14 +51,6 @@ public class VectorSMBMapJoinOperator ex
   
   private static final long serialVersionUID = 1L;
 
-  private int tagLen;
-  
-  private transient VectorizedRowBatch outputBatch;  
-  private transient VectorizationContext vOutContext = null;
-  private transient VectorizedRowBatchCtx vrbCtx = null;  
-  
-  private String fileKey;
-
   private VectorExpression[] bigTableValueExpressions;
 
   private VectorExpression[] bigTableFilterExpressions;
@@ -68,6 +59,16 @@ public class VectorSMBMapJoinOperator ex
 
   private VectorExpressionWriter[] keyOutputWriters;
 
+  private VectorizationContext vOutContext;
+
+  // The above members are initialized by the constructor and must not be
+  // transient.
+  //---------------------------------------------------------------------------
+
+  private transient VectorizedRowBatch outputBatch;  
+
+  private transient VectorizedRowBatchCtx vrbCtx = null;
+
   private transient VectorHashKeyWrapperBatch keyWrapperBatch;
 
   private transient Map<ObjectInspector, VectorColumnAssign[]> 
outputVectorAssigners;
@@ -98,7 +99,6 @@ public class VectorSMBMapJoinOperator ex
     numAliases = desc.getExprs().size();
     posBigTable = (byte) desc.getPosBigTable();
     filterMaps = desc.getFilterMap();
-    tagLen = desc.getTagLength();
     noOuterJoin = desc.isNoOuterJoin();
 
     // Must obtain vectorized equivalents for filter and value expressions
@@ -117,7 +117,6 @@ public class VectorSMBMapJoinOperator ex
     // We are making a new output vectorized row batch.
     vOutContext = new VectorizationContext(desc.getOutputColumnNames());
     vOutContext.setFileKey(vContext.getFileKey() + "/SMB_JOIN_" + 
desc.getBigTableAlias());
-    this.fileKey = vOutContext.getFileKey();
   }
   
   @Override
@@ -135,7 +134,7 @@ public class VectorSMBMapJoinOperator ex
     super.initializeOp(hconf);
 
     vrbCtx = new VectorizedRowBatchCtx();
-    vrbCtx.init(hconf, this.fileKey, (StructObjectInspector) 
this.outputObjInspector);
+    vrbCtx.init(vOutContext.getScratchColumnTypeMap(), (StructObjectInspector) 
this.outputObjInspector);
     
     outputBatch = vrbCtx.createVectorizedRowBatch();
     
@@ -272,10 +271,8 @@ public class VectorSMBMapJoinOperator ex
     Object[] values = (Object[]) row;
     VectorColumnAssign[] vcas = outputVectorAssigners.get(outputOI);
     if (null == vcas) {
-      Map<String, Map<String, Integer>> allColumnMaps = 
Utilities.getAllColumnVectorMaps(hconf);
-      Map<String, Integer> columnMap = allColumnMaps.get(fileKey);
       vcas = VectorColumnAssignFactory.buildAssigners(
-          outputBatch, outputOI, columnMap, conf.getOutputColumnNames());
+          outputBatch, outputOI, vOutContext.getProjectionColumnMap(), 
conf.getOutputColumnNames());
       outputVectorAssigners.put(outputOI, vcas);
     }
     for (int i = 0; i < values.length; ++i) {

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizationContext.java
 Fri Nov  7 20:41:34 2014
@@ -19,6 +19,7 @@
 package org.apache.hadoop.hive.ql.exec.vector;
 
 import java.lang.reflect.Constructor;
+import java.math.BigDecimal;
 import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.ArrayList;
@@ -34,7 +35,7 @@ import java.util.regex.Pattern;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
@@ -868,7 +869,7 @@ public class VectorizationContext {
     case FLOAT_FAMILY:
       return new ConstantVectorExpression(outCol, ((Number) 
constantValue).doubleValue());
     case DECIMAL:
-      VectorExpression ve = new ConstantVectorExpression(outCol, (Decimal128) 
constantValue);
+      VectorExpression ve = new ConstantVectorExpression(outCol, (HiveDecimal) 
constantValue);
       // Set type name with decimal precision, scale, etc.
       ve.setOutputType(typeName);
       return ve;
@@ -1237,9 +1238,9 @@ public class VectorizationContext {
       ((IDoubleInExpr) expr).setInListValues(inValsD);
     } else if (isDecimalFamily(colType)) {
       cl = (mode == Mode.FILTER ? FilterDecimalColumnInList.class : 
DecimalColumnInList.class);
-      Decimal128[] inValsD = new Decimal128[childrenForInList.size()];
+      HiveDecimal[] inValsD = new HiveDecimal[childrenForInList.size()];
       for (int i = 0; i != inValsD.length; i++) {
-        inValsD[i] = (Decimal128) getVectorTypeScalarValue(
+        inValsD[i] = (HiveDecimal) getVectorTypeScalarValue(
             (ExprNodeConstantDesc)  childrenForInList.get(i));
       }
       expr = createVectorExpression(cl, childExpr.subList(0, 1), 
Mode.PROJECTION, returnType);
@@ -1287,44 +1288,43 @@ public class VectorizationContext {
     return null;
   }
 
-  private Decimal128 castConstantToDecimal(Object scalar, TypeInfo type) 
throws HiveException {
+  private HiveDecimal castConstantToDecimal(Object scalar, TypeInfo type) 
throws HiveException {
     PrimitiveTypeInfo ptinfo = (PrimitiveTypeInfo) type;
-    String typename = type.getTypeName();
-    Decimal128 d = new Decimal128();
     int scale = HiveDecimalUtils.getScaleForType(ptinfo);
+    String typename = type.getTypeName();
+    HiveDecimal rawDecimal;
     switch (ptinfo.getPrimitiveCategory()) {
     case FLOAT:
-      float floatVal = ((Float) scalar).floatValue();
-      d.update(floatVal, (short) scale);
+      rawDecimal = HiveDecimal.create(String.valueOf((Float) scalar));
       break;
     case DOUBLE:
-      double doubleVal = ((Double) scalar).doubleValue();
-      d.update(doubleVal, (short) scale);
+      rawDecimal = HiveDecimal.create(String.valueOf((Double) scalar));
       break;
     case BYTE:
-      byte byteVal = ((Byte) scalar).byteValue();
-      d.update(byteVal, (short) scale);
+      rawDecimal = HiveDecimal.create((Byte) scalar);
       break;
     case SHORT:
-      short shortVal = ((Short) scalar).shortValue();
-      d.update(shortVal, (short) scale);
+      rawDecimal = HiveDecimal.create((Short) scalar);
       break;
     case INT:
-      int intVal = ((Integer) scalar).intValue();
-      d.update(intVal, (short) scale);
+      rawDecimal = HiveDecimal.create((Integer) scalar);
       break;
     case LONG:
-      long longVal = ((Long) scalar).longValue();
-      d.update(longVal, (short) scale);
+      rawDecimal = HiveDecimal.create((Long) scalar);
       break;
     case DECIMAL:
-      HiveDecimal decimalVal = (HiveDecimal) scalar;
-      d.update(decimalVal.unscaledValue(), (short) scale);
+      rawDecimal = (HiveDecimal) scalar;
       break;
     default:
-      throw new HiveException("Unsupported type "+typename+" for cast to 
Decimal128");
+      throw new HiveException("Unsupported type " + typename + " for cast to 
HiveDecimal");
+    }
+    if (rawDecimal == null) {
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("Casting constant scalar " + scalar + " to HiveDecimal 
resulted in null");
+      }
+      return null;
     }
-    return d;
+    return rawDecimal;
   }
 
   private String castConstantToString(Object scalar, TypeInfo type) throws 
HiveException {
@@ -1391,7 +1391,7 @@ public class VectorizationContext {
     if (child instanceof ExprNodeConstantDesc) {
      // Return a constant vector expression
       Object constantValue = ((ExprNodeConstantDesc) child).getValue();
-      Decimal128 decimalValue = castConstantToDecimal(constantValue, 
child.getTypeInfo());
+      HiveDecimal decimalValue = castConstantToDecimal(constantValue, 
child.getTypeInfo());
       return getConstantVectorExpression(decimalValue, returnType, 
Mode.PROJECTION);
     } else if (child instanceof ExprNodeNullDesc) {
       return getConstantVectorExpression(null, returnType, Mode.PROJECTION);
@@ -1801,10 +1801,7 @@ public class VectorizationContext {
         return 0;
       }
     } else if 
(decimalTypePattern.matcher(constDesc.getTypeString()).matches()) {
-      HiveDecimal hd = (HiveDecimal) constDesc.getValue();
-      Decimal128 dvalue = new Decimal128();
-      dvalue.update(hd.unscaledValue(), (short) hd.scale());
-      return dvalue;
+      return (HiveDecimal) constDesc.getValue();
     } else {
       return constDesc.getValue();
     }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedBatchUtil.java
 Fri Nov  7 20:41:34 2014
@@ -481,8 +481,7 @@ public class VectorizedBatchUtil {
       if (writableCol != null) {
         dcv.isNull[rowIndex] = false;
         HiveDecimalWritable wobj = (HiveDecimalWritable) writableCol;
-        dcv.vector[rowIndex].update(wobj.getHiveDecimal().unscaledValue(),
-            (short) wobj.getScale());
+        dcv.set(rowIndex, wobj);
       } else {
         setNullColIsNullValue(dcv, rowIndex);
       }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedRowBatchCtx.java
 Fri Nov  7 20:41:34 2014
@@ -34,7 +34,6 @@ import java.util.regex.Pattern;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -96,7 +95,7 @@ public class VectorizedRowBatchCtx {
   // list does not contain partition columns
   private List<Integer> colsToInclude;
 
-  private Map<Integer, String> columnTypeMap = null;
+  private Map<Integer, String> scratchColumnTypeMap = null;
 
   /**
    * Constructor for VectorizedRowBatchCtx
@@ -126,36 +125,17 @@ public class VectorizedRowBatchCtx {
   public VectorizedRowBatchCtx() {
 
   }
-  
-  /**
-   * Initializes the VectorizedRowBatch context based on an arbitrary object 
inspector
-   * Used by non-tablescan operators when they change the vectorization 
context 
-   * @param hiveConf
-   * @param fileKey 
-   *          The key on which to retrieve the extra column mapping from the 
map/reduce scratch
-   * @param rowOI
-   *          Object inspector that shapes the column types
-   */
-  public void init(Configuration hiveConf, String fileKey,
-      StructObjectInspector rowOI) {
-    Map<String, Map<Integer, String>> scratchColumnVectorTypes =
-            Utilities.getAllScratchColumnVectorTypeMaps(hiveConf);
-    columnTypeMap = scratchColumnVectorTypes.get(fileKey);
-    this.rowOI= rowOI;
-    this.rawRowOI = rowOI;
-  }
-  
 
   /**
    * Initializes the VectorizedRowBatch context based on an scratch column 
type map and
    * object inspector.
-   * @param columnTypeMap
+   * @param scratchColumnTypeMap
    * @param rowOI
    *          Object inspector that shapes the column types
    */
-  public void init(Map<Integer, String> columnTypeMap,
+  public void init(Map<Integer, String> scratchColumnTypeMap,
       StructObjectInspector rowOI) {
-    this.columnTypeMap = columnTypeMap;
+    this.scratchColumnTypeMap = scratchColumnTypeMap;
     this.rowOI= rowOI;
     this.rawRowOI = rowOI;
   }
@@ -179,7 +159,8 @@ public class VectorizedRowBatchCtx {
       IOException,
       SerDeException,
       InstantiationException,
-      IllegalAccessException, HiveException {
+      IllegalAccessException,
+      HiveException {
 
     Map<String, PartitionDesc> pathToPartitionInfo = Utilities
         .getMapRedWork(hiveConf).getMapWork().getPathToPartitionInfo();
@@ -189,8 +170,8 @@ public class VectorizedRowBatchCtx {
             split.getPath(), IOPrepareCache.get().getPartitionDescMap());
 
     String partitionPath = split.getPath().getParent().toString();
-    columnTypeMap = Utilities
-        .getAllScratchColumnVectorTypeMaps(hiveConf)
+    scratchColumnTypeMap = Utilities
+        .getMapWorkAllScratchColumnVectorTypeMaps(hiveConf)
         .get(partitionPath);
 
     Properties partProps =
@@ -557,7 +538,7 @@ public class VectorizedRowBatchCtx {
             dv.isRepeating = true;
           } else {
             HiveDecimal hd = (HiveDecimal) value;
-            dv.vector[0] = new Decimal128(hd.toString(), (short) hd.scale());
+            dv.set(0, hd);
             dv.isRepeating = true;
             dv.isNull[0] = false;      
           }
@@ -613,12 +594,12 @@ public class VectorizedRowBatchCtx {
   }
 
   private void addScratchColumnsToBatch(VectorizedRowBatch vrb) throws 
HiveException {
-    if (columnTypeMap != null && !columnTypeMap.isEmpty()) {
+    if (scratchColumnTypeMap != null && !scratchColumnTypeMap.isEmpty()) {
       int origNumCols = vrb.numCols;
-      int newNumCols = vrb.cols.length+columnTypeMap.keySet().size();
+      int newNumCols = vrb.cols.length+scratchColumnTypeMap.keySet().size();
       vrb.cols = Arrays.copyOf(vrb.cols, newNumCols);
       for (int i = origNumCols; i < newNumCols; i++) {
-       String typeName = columnTypeMap.get(i);
+       String typeName = scratchColumnTypeMap.get(i);
        if (typeName == null) {
          throw new HiveException("No type found for column type entry " + i);
        }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToBoolean.java
 Fri Nov  7 20:41:34 2014
@@ -41,6 +41,6 @@ public class CastDecimalToBoolean extend
    * Otherwise, return 1 for true.
    */
   protected void func(LongColumnVector outV, DecimalColumnVector inV,  int i) {
-    outV.vector[i] = inV.vector[i].getSignum() == 0 ? 0 : 1;
+    outV.vector[i] = inV.vector[i].getHiveDecimal().signum() == 0 ? 0 : 1;
   }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDecimal.java
 Fri Nov  7 20:41:34 2014
@@ -52,9 +52,8 @@ public class CastDecimalToDecimal extend
    * at position i in the respective vectors.
    */
   protected void convert(DecimalColumnVector outV, DecimalColumnVector inV, 
int i) {
-    outV.vector[i].update(inV.vector[i]);
-    outV.vector[i].changeScaleDestructive(outV.scale);
-    outV.checkPrecisionOverflow(i);
+    // The set routine enforces precision and scale.
+    outV.vector[i].set(inV.vector[i]);
   }
 
   /**

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToDouble.java
 Fri Nov  7 20:41:34 2014
@@ -34,6 +34,6 @@ public class CastDecimalToDouble extends
   }
 
   protected void func(DoubleColumnVector outV, DecimalColumnVector inV, int i) 
{
-    outV.vector[i] = inV.vector[i].doubleValue();
+    outV.vector[i] = inV.vector[i].getHiveDecimal().doubleValue();
   }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToLong.java
 Fri Nov  7 20:41:34 2014
@@ -37,6 +37,6 @@ public class CastDecimalToLong extends F
 
   @Override
   protected void func(LongColumnVector outV, DecimalColumnVector inV,  int i) {
-    outV.vector[i] = inV.vector[i].longValue();
+    outV.vector[i] = inV.vector[i].getHiveDecimal().longValue();
   }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToString.java
 Fri Nov  7 20:41:34 2014
@@ -20,6 +20,7 @@ package org.apache.hadoop.hive.ql.exec.v
 
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
+import 
org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalToStringUnaryUDF;
 
 /**
  * To support vectorized cast of decimal to string.
@@ -43,7 +44,7 @@ public class CastDecimalToString extends
 
   @Override
   protected void func(BytesColumnVector outV, DecimalColumnVector inV, int i) {
-    String s = inV.vector[i].getHiveDecimalString();
+    String s = inV.vector[i].getHiveDecimal().toString();
     byte[] b = null;
     try {
       b = s.getBytes("UTF-8");

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDecimalToTimestamp.java
 Fri Nov  7 20:41:34 2014
@@ -18,8 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
-import org.apache.hadoop.hive.common.type.SqlMathUtil;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 
@@ -31,34 +30,23 @@ import org.apache.hadoop.hive.ql.exec.ve
 public class CastDecimalToTimestamp extends FuncDecimalToLong {
   private static final long serialVersionUID = 1L;
 
-  /* The field tmp is a scratch variable for this operation. It is
-   * purposely not made static because if this code is ever made 
multi-threaded,
-   * each thread will then have its own VectorExpression tree and thus
-   * its own copy of the variable.
-   */
-  private transient Decimal128 tmp = null;
-  private static transient Decimal128 tenE9 = new Decimal128(1000000000);
+  private static transient HiveDecimal tenE9 = HiveDecimal.create(1000000000);
 
   public CastDecimalToTimestamp(int inputColumn, int outputColumn) {
     super(inputColumn, outputColumn);
-    tmp = new Decimal128(0);
   }
 
   public CastDecimalToTimestamp() {
-
-    // initialize local field after deserialization
-    tmp = new Decimal128(0);
   }
 
   @Override
   protected void func(LongColumnVector outV, DecimalColumnVector inV,  int i) {
-    tmp.update(inV.vector[i]);
-
-    // Reduce scale at most by 9, therefore multiplication will not require 
rounding.
-    int newScale = inV.scale > 9 ? (inV.scale - 9) : 0;
-    tmp.multiplyDestructive(tenE9, (short) newScale);
-
-    // set output
-    outV.vector[i] = tmp.longValue();
+    HiveDecimal result = inV.vector[i].getHiveDecimal().multiply(tenE9);
+    if (result == null) {
+      outV.noNulls = false;
+      outV.isNull[i] = true;
+    } else {
+      outV.vector[i] = result.longValue();
+    }
   }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastDoubleToDecimal.java
 Fri Nov  7 20:41:34 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 
@@ -38,7 +39,7 @@ public class CastDoubleToDecimal extends
 
   @Override
   protected void func(DecimalColumnVector outV, DoubleColumnVector inV, int i) 
{
-    outV.vector[i].update(inV.vector[i], outV.scale);
-    outV.checkPrecisionOverflow(i);
+    String s = ((Double) inV.vector[i]).toString();
+    outV.vector[i].set(HiveDecimal.create(s));
   }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastLongToDecimal.java
 Fri Nov  7 20:41:34 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 
@@ -40,7 +41,6 @@ public class CastLongToDecimal extends F
 
   @Override
   protected void func(DecimalColumnVector outV, LongColumnVector inV, int i) {
-    outV.vector[i].update(inV.vector[i], outV.scale);
-    outV.checkPrecisionOverflow(i);
+    outV.vector[i].set(HiveDecimal.create(inV.vector[i]));
   }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastStringToDecimal.java
 Fri Nov  7 20:41:34 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.BytesColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
@@ -58,14 +59,13 @@ public class CastStringToDecimal extends
        * making a new string.
        */
       s = new String(inV.vector[i], inV.start[i], inV.length[i], "UTF-8");
-      outV.vector[i].update(s, outV.scale);
+      outV.vector[i].set(HiveDecimal.create(s));
     } catch (Exception e) {
 
       // for any exception in conversion to decimal, produce NULL
       outV.noNulls = false;
       outV.isNull[i] = true;
     }
-    outV.checkPrecisionOverflow(i);
   }
 
   @Override

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/CastTimestampToDecimal.java
 Fri Nov  7 20:41:34 2014
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 
@@ -39,9 +40,10 @@ public class CastTimestampToDecimal exte
   @Override
   protected void func(DecimalColumnVector outV, LongColumnVector inV, int i) {
 
-    // the resulting decimal value is 10e-9 * the input long value.
-    outV.vector[i].updateFixedPoint(inV.vector[i], (short) 9);
-    outV.vector[i].changeScaleDestructive(outV.scale);
-    outV.checkPrecisionOverflow(i);
+    // The resulting decimal value is 10e-9 * the input long value (i.e. 
seconds).
+    //
+    HiveDecimal result = HiveDecimal.create(inV.vector[i]);
+    result = result.scaleByPowerOfTen(-9);
+    outV.set(i, result);
   }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/ConstantVectorExpression.java
 Fri Nov  7 20:41:34 2014
@@ -20,7 +20,7 @@ package org.apache.hadoop.hive.ql.exec.v
 
 import java.util.Arrays;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.ql.exec.vector.*;
@@ -44,7 +44,7 @@ public class ConstantVectorExpression ex
   protected long longValue = 0;
   private double doubleValue = 0;
   private byte[] bytesValue = null;
-  private Decimal128 decimalValue = null;
+  private HiveDecimal decimalValue = null;
   private boolean isNullValue = false;
 
   private Type type;
@@ -85,7 +85,7 @@ public class ConstantVectorExpression ex
     setBytesValue(value.getValue().getBytes());
   }
 
-  public ConstantVectorExpression(int outputColumn, Decimal128 value) {
+  public ConstantVectorExpression(int outputColumn, HiveDecimal value) {
     this(outputColumn, "decimal");
     setDecimalValue(value);
   }
@@ -137,7 +137,7 @@ public class ConstantVectorExpression ex
     dcv.isRepeating = true;
     dcv.noNulls = !isNullValue;
     if (!isNullValue) {
-      dcv.vector[0].update(decimalValue);
+      dcv.vector[0].set(decimalValue);
     } else {
       dcv.isNull[0] = true;
     }
@@ -191,7 +191,7 @@ public class ConstantVectorExpression ex
     this.bytesValueLength = bytesValue.length;
   }
 
-  public void setDecimalValue(Decimal128 decimalValue) {
+  public void setDecimalValue(HiveDecimal decimalValue) {
     this.decimalValue = decimalValue;
   }
 

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalColumnInList.java
 Fri Nov  7 20:41:34 2014
@@ -18,11 +18,12 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import 
org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 import java.util.HashSet;
 
@@ -32,11 +33,11 @@ import java.util.HashSet;
 public class DecimalColumnInList extends VectorExpression implements 
IDecimalInExpr {
   private static final long serialVersionUID = 1L;
   private int inputCol;
-  private Decimal128[] inListValues;
+  private HiveDecimal[] inListValues;
   private int outputColumn;
 
   // The set object containing the IN list.
-  private transient HashSet<Decimal128> inSet;
+  private transient HashSet<HiveDecimal> inSet;
 
   public DecimalColumnInList() {
     super();
@@ -60,8 +61,8 @@ public class DecimalColumnInList extends
     }
 
     if (inSet == null) {
-      inSet = new HashSet<Decimal128>(inListValues.length);
-      for (Decimal128 val : inListValues) {
+      inSet = new HashSet<HiveDecimal>(inListValues.length);
+      for (HiveDecimal val : inListValues) {
         inSet.add(val);
       }
     }
@@ -72,7 +73,7 @@ public class DecimalColumnInList extends
     boolean[] nullPos = inputColVector.isNull;
     boolean[] outNulls = outputColVector.isNull;
     int n = batch.size;
-    Decimal128[] vector = inputColVector.vector;
+    HiveDecimalWritable[] vector = inputColVector.vector;
     long[] outputVector = outputColVector.vector;
 
     // return immediately if batch is empty
@@ -87,16 +88,16 @@ public class DecimalColumnInList extends
 
         // All must be selected otherwise size would be zero
         // Repeating property will not change.
-        outputVector[0] = inSet.contains(vector[0]) ? 1 : 0;
+        outputVector[0] = inSet.contains(vector[0].getHiveDecimal()) ? 1 : 0;
         outputColVector.isRepeating = true;
       } else if (batch.selectedInUse) {
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
+          outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 0;
         }
       } else {
         for(int i = 0; i != n; i++) {
-          outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
+          outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 0;
         }
       }
     } else {
@@ -105,7 +106,7 @@ public class DecimalColumnInList extends
         //All must be selected otherwise size would be zero
         //Repeating property will not change.
         if (!nullPos[0]) {
-          outputVector[0] = inSet.contains(vector[0]) ? 1 : 0;
+          outputVector[0] = inSet.contains(vector[0].getHiveDecimal()) ? 1 : 0;
           outNulls[0] = false;
         } else {
           outNulls[0] = true;
@@ -116,14 +117,14 @@ public class DecimalColumnInList extends
           int i = sel[j];
           outNulls[i] = nullPos[i];
           if (!nullPos[i]) {
-            outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
+            outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 
0;
           }
         }
       } else {
         System.arraycopy(nullPos, 0, outNulls, 0, n);
         for(int i = 0; i != n; i++) {
           if (!nullPos[i]) {
-            outputVector[i] = inSet.contains(vector[i]) ? 1 : 0;
+            outputVector[i] = inSet.contains(vector[i].getHiveDecimal()) ? 1 : 
0;
           }
         }
       }
@@ -148,11 +149,7 @@ public class DecimalColumnInList extends
     return null;
   }
 
-  public Decimal128[] getInListValues() {
-    return this.inListValues;
-  }
-
-  public void setInListValues(Decimal128[] a) {
+  public void setInListValues(HiveDecimal[] a) {
     this.inListValues = a;
   }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/DecimalUtil.java
 Fri Nov  7 20:41:34 2014
@@ -18,32 +18,60 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.hive.common.type.SqlMathUtil;
-import org.apache.hadoop.hive.common.type.UnsignedInt128;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.udf.generic.RoundUtils;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 /**
  * Utility functions for vector operations on decimal values.
  */
 public class DecimalUtil {
 
-  public static final Decimal128 DECIMAL_ONE = new Decimal128();
-  private static final UnsignedInt128 scratchUInt128 = new UnsignedInt128();
+  public static int compare(HiveDecimalWritable writableLeft, HiveDecimal 
right) {
+    return writableLeft.getHiveDecimal().compareTo(right);
+  }
 
-  static {
-    DECIMAL_ONE.update(1L, (short) 0);
+  public static int compare(HiveDecimal left, HiveDecimalWritable 
writableRight) {
+    return left.compareTo(writableRight.getHiveDecimal());
   }
 
   // Addition with overflow check. Overflow produces NULL output.
-  public static void addChecked(int i, Decimal128 left, Decimal128 right,
+  public static void addChecked(int i, HiveDecimal left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.add(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void addChecked(int i, HiveDecimalWritable left, 
HiveDecimalWritable right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, 
left.getHiveDecimal().add(right.getHiveDecimal()));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void addChecked(int i, HiveDecimalWritable left, HiveDecimal 
right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().add(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void addChecked(int i, HiveDecimal left, HiveDecimalWritable 
right,
       DecimalColumnVector outputColVector) {
     try {
-      Decimal128.add(left, right, outputColVector.vector[i], 
outputColVector.scale);
-      
outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision);
+      outputColVector.set(i, left.add(right.getHiveDecimal()));
     } catch (ArithmeticException e) {  // catch on overflow
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
@@ -51,11 +79,40 @@ public class DecimalUtil {
   }
 
   // Subtraction with overflow check. Overflow produces NULL output.
-  public static void subtractChecked(int i, Decimal128 left, Decimal128 right,
+  public static void subtractChecked(int i, HiveDecimal left, HiveDecimal 
right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.subtract(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void subtractChecked(int i, HiveDecimalWritable left, 
HiveDecimalWritable right,
       DecimalColumnVector outputColVector) {
     try {
-      Decimal128.subtract(left, right, outputColVector.vector[i], 
outputColVector.scale);
-      
outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision);
+      outputColVector.set(i, 
left.getHiveDecimal().subtract(right.getHiveDecimal()));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void subtractChecked(int i, HiveDecimalWritable left, 
HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().subtract(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void subtractChecked(int i, HiveDecimal left, 
HiveDecimalWritable right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.subtract(right.getHiveDecimal()));
     } catch (ArithmeticException e) {  // catch on overflow
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
@@ -63,11 +120,40 @@ public class DecimalUtil {
   }
 
   // Multiplication with overflow check. Overflow produces NULL output.
-  public static void multiplyChecked(int i, Decimal128 left, Decimal128 right,
+  public static void multiplyChecked(int i, HiveDecimal left, HiveDecimal 
right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.multiply(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void multiplyChecked(int i, HiveDecimalWritable left, 
HiveDecimalWritable right,
       DecimalColumnVector outputColVector) {
     try {
-      Decimal128.multiply(left, right, outputColVector.vector[i], 
outputColVector.scale);
-      
outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision);
+      outputColVector.set(i, 
left.getHiveDecimal().multiply(right.getHiveDecimal()));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void multiplyChecked(int i, HiveDecimalWritable left, 
HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().multiply(right));
+    } catch (ArithmeticException e) {  // catch on overflow
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void multiplyChecked(int i, HiveDecimal left, 
HiveDecimalWritable right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.multiply(right.getHiveDecimal()));
     } catch (ArithmeticException e) {  // catch on overflow
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
@@ -75,11 +161,40 @@ public class DecimalUtil {
   }
 
   // Division with overflow/zero-divide check. Error produces NULL output.
-  public static void divideChecked(int i, Decimal128 left, Decimal128 right,
+  public static void divideChecked(int i, HiveDecimal left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.divide(right));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void divideChecked(int i, HiveDecimalWritable left, 
HiveDecimalWritable right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, 
left.getHiveDecimal().divide(right.getHiveDecimal()));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void divideChecked(int i, HiveDecimalWritable left, 
HiveDecimal right,
       DecimalColumnVector outputColVector) {
     try {
-      Decimal128.divide(left, right, outputColVector.vector[i], 
outputColVector.scale);
-      
outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision);
+      outputColVector.set(i, left.getHiveDecimal().divide(right));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void divideChecked(int i, HiveDecimal left, 
HiveDecimalWritable right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.divide(right.getHiveDecimal()));
     } catch (ArithmeticException e) {  // catch on error
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
@@ -87,80 +202,156 @@ public class DecimalUtil {
   }
 
   // Modulo operator with overflow/zero-divide check.
-  public static void moduloChecked(int i, Decimal128 left, Decimal128 right,
+  public static void moduloChecked(int i, HiveDecimal left, HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.remainder(right));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void moduloChecked(int i, HiveDecimalWritable left, 
HiveDecimalWritable right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, 
left.getHiveDecimal().remainder(right.getHiveDecimal()));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void moduloChecked(int i, HiveDecimalWritable left, 
HiveDecimal right,
+      DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, left.getHiveDecimal().remainder(right));
+    } catch (ArithmeticException e) {  // catch on error
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void moduloChecked(int i, HiveDecimal left, 
HiveDecimalWritable right,
       DecimalColumnVector outputColVector) {
     try {
-      Decimal128.modulo(left, right, outputColVector.vector[i], 
outputColVector.scale);
-      
outputColVector.vector[i].checkPrecisionOverflow(outputColVector.precision);
+      outputColVector.set(i, left.remainder(right.getHiveDecimal()));
     } catch (ArithmeticException e) {  // catch on error
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
     }
   }
 
-  public static void floor(int i, Decimal128 input, DecimalColumnVector 
outputColVector) {
+  public static void floor(int i, HiveDecimal input, DecimalColumnVector 
outputColVector) {
     try {
-      Decimal128 result = outputColVector.vector[i];
-      result.update(input);
-      result.zeroFractionPart(scratchUInt128);
-      result.changeScaleDestructive(outputColVector.scale);
-      if ((result.compareTo(input) != 0) && input.getSignum() < 0) {
-        result.subtractDestructive(DECIMAL_ONE, outputColVector.scale);
-      }
+      outputColVector.set(i, input.setScale(0, HiveDecimal.ROUND_FLOOR));
     } catch (ArithmeticException e) {
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
     }
   }
 
-  public static void ceiling(int i, Decimal128 input, DecimalColumnVector 
outputColVector) {
+  public static void floor(int i, HiveDecimalWritable input, 
DecimalColumnVector outputColVector) {
     try {
-      Decimal128 result = outputColVector.vector[i];
-      result.update(input);
-      result.zeroFractionPart(scratchUInt128);
-      result.changeScaleDestructive(outputColVector.scale);
-      if ((result.compareTo(input) != 0) && input.getSignum() > 0) {
-        result.addDestructive(DECIMAL_ONE, outputColVector.scale);
-      }
+      outputColVector.set(i, input.getHiveDecimal().setScale(0, 
HiveDecimal.ROUND_FLOOR));
     } catch (ArithmeticException e) {
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
     }
   }
 
-  public static void round(int i, Decimal128 input, DecimalColumnVector 
outputColVector) {
-    HiveDecimal inputHD = HiveDecimal.create(input.toBigDecimal());
-    HiveDecimal result = RoundUtils.round(inputHD, outputColVector.scale);
-    if (result == null) {
+  public static void ceiling(int i, HiveDecimal input, DecimalColumnVector 
outputColVector) {
+    try {
+      outputColVector.set(i, input.setScale(0, HiveDecimal.ROUND_CEILING));
+    } catch (ArithmeticException e) {
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
-    } else {
-      
outputColVector.vector[i].update(result.bigDecimalValue().toPlainString(), 
outputColVector.scale);
     }
   }
 
-  public static void sign(int i, Decimal128 input, LongColumnVector 
outputColVector) {
-    outputColVector.vector[i] = input.getSignum();
+  public static void ceiling(int i, HiveDecimalWritable input, 
DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, input.getHiveDecimal().setScale(0, 
HiveDecimal.ROUND_CEILING));
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void round(int i, HiveDecimal input, int decimalPlaces, 
DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, RoundUtils.round(input, decimalPlaces));
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void round(int i, HiveDecimalWritable input, int 
decimalPlaces, DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, RoundUtils.round(input.getHiveDecimal(), 
decimalPlaces));
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void round(int i, HiveDecimal input, DecimalColumnVector 
outputColVector) {
+    try {
+      outputColVector.set(i, RoundUtils.round(input, outputColVector.scale));
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void round(int i, HiveDecimalWritable input, 
DecimalColumnVector outputColVector) {
+    try {
+      outputColVector.set(i, RoundUtils.round(input.getHiveDecimal(), 
outputColVector.scale));
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void sign(int i, HiveDecimal input, LongColumnVector 
outputColVector) {
+    outputColVector.vector[i] = input.signum();
+  }
+
+  public static void sign(int i, HiveDecimalWritable input, LongColumnVector 
outputColVector) {
+    outputColVector.vector[i] = input.getHiveDecimal().signum();
+  }
+
+  public static void abs(int i, HiveDecimal input, DecimalColumnVector 
outputColVector) {
+    try {
+      outputColVector.set(i, input.abs());
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
+  }
+
+  public static void abs(int i, HiveDecimalWritable input, DecimalColumnVector 
outputColVector) {
+    try {
+      outputColVector.set(i, input.getHiveDecimal().abs());
+    } catch (ArithmeticException e) {
+      outputColVector.noNulls = false;
+      outputColVector.isNull[i] = true;
+    }
   }
 
-  public static void abs(int i, Decimal128 input, DecimalColumnVector 
outputColVector) {
-    Decimal128 result = outputColVector.vector[i];
+  public static void negate(int i, HiveDecimal input, DecimalColumnVector 
outputColVector) {
     try {
-      result.update(input);
-      result.absDestructive();
-      result.changeScaleDestructive(outputColVector.scale);
+      outputColVector.set(i, input.negate());
     } catch (ArithmeticException e) {
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;
     }
   }
 
-  public static void negate(int i, Decimal128 input, DecimalColumnVector 
outputColVector) {
-    Decimal128 result = outputColVector.vector[i];
+  public static void negate(int i, HiveDecimalWritable input, 
DecimalColumnVector outputColVector) {
     try {
-      result.update(input);
-      result.negateDestructive();
-      result.changeScaleDestructive(outputColVector.scale);
+      outputColVector.set(i, input.getHiveDecimal().negate());
     } catch (ArithmeticException e) {
       outputColVector.noNulls = false;
       outputColVector.isNull[i] = true;

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FilterDecimalColumnInList.java
 Fri Nov  7 20:41:34 2014
@@ -18,10 +18,11 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import 
org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor.Descriptor;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 import java.util.HashSet;
 
@@ -31,10 +32,10 @@ import java.util.HashSet;
 public class FilterDecimalColumnInList extends VectorExpression implements 
IDecimalInExpr {
   private static final long serialVersionUID = 1L;
   private int inputCol;
-  private Decimal128[] inListValues;
+  private HiveDecimal[] inListValues;
 
   // The set object containing the IN list.
-  private transient HashSet<Decimal128> inSet;
+  private transient HashSet<HiveDecimal> inSet;
 
   public FilterDecimalColumnInList() {
     super();
@@ -57,8 +58,8 @@ public class FilterDecimalColumnInList e
     }
 
     if (inSet == null) {
-      inSet = new HashSet<Decimal128>(inListValues.length);
-      for (Decimal128 val : inListValues) {
+      inSet = new HashSet<HiveDecimal>(inListValues.length);
+      for (HiveDecimal val : inListValues) {
         inSet.add(val);
       }
     }
@@ -67,7 +68,7 @@ public class FilterDecimalColumnInList e
     int[] sel = batch.selected;
     boolean[] nullPos = inputColVector.isNull;
     int n = batch.size;
-    Decimal128[] vector = inputColVector.vector;
+    HiveDecimalWritable[] vector = inputColVector.vector;
 
     // return immediately if batch is empty
     if (n == 0) {
@@ -80,7 +81,7 @@ public class FilterDecimalColumnInList e
         // All must be selected otherwise size would be zero
         // Repeating property will not change.
 
-        if (!(inSet.contains(vector[0]))) {
+        if (!(inSet.contains(vector[0].getHiveDecimal()))) {
           //Entire batch is filtered out.
           batch.size = 0;
         }
@@ -88,7 +89,7 @@ public class FilterDecimalColumnInList e
         int newSize = 0;
         for(int j = 0; j != n; j++) {
           int i = sel[j];
-          if (inSet.contains(vector[i])) {
+          if (inSet.contains(vector[i].getHiveDecimal())) {
             sel[newSize++] = i;
           }
         }
@@ -96,7 +97,7 @@ public class FilterDecimalColumnInList e
       } else {
         int newSize = 0;
         for(int i = 0; i != n; i++) {
-          if (inSet.contains(vector[i])) {
+          if (inSet.contains(vector[i].getHiveDecimal())) {
             sel[newSize++] = i;
           }
         }
@@ -111,7 +112,7 @@ public class FilterDecimalColumnInList e
         //All must be selected otherwise size would be zero
         //Repeating property will not change.
         if (!nullPos[0]) {
-          if (!inSet.contains(vector[0])) {
+          if (!inSet.contains(vector[0].getHiveDecimal())) {
 
             //Entire batch is filtered out.
             batch.size = 0;
@@ -124,7 +125,7 @@ public class FilterDecimalColumnInList e
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           if (!nullPos[i]) {
-           if (inSet.contains(vector[i])) {
+           if (inSet.contains(vector[i].getHiveDecimal())) {
              sel[newSize++] = i;
            }
           }
@@ -136,7 +137,7 @@ public class FilterDecimalColumnInList e
         int newSize = 0;
         for(int i = 0; i != n; i++) {
           if (!nullPos[i]) {
-            if (inSet.contains(vector[i])) {
+            if (inSet.contains(vector[i].getHiveDecimal())) {
               sel[newSize++] = i;
             }
           }
@@ -167,11 +168,7 @@ public class FilterDecimalColumnInList e
     return null;
   }
 
-  public Decimal128[] getInListValues() {
-    return this.inListValues;
-  }
-
-  public void setInListValues(Decimal128[] a) {
+  public void setInListValues(HiveDecimal[] a) {
     this.inListValues = a;
   }
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDecimalToLong.java
 Fri Nov  7 20:41:34 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
@@ -117,18 +116,6 @@ public abstract class FuncDecimalToLong 
     return outputColumn;
   }
 
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
-  }
-
-  public int getInputColumn() {
-    return inputColumn;
-  }
-
-  public void setInputColumn(int inputColumn) {
-    this.inputColumn = inputColumn;
-  }
-
   @Override
   public String getOutputType() {
     return "long";

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncDoubleToDecimal.java
 Fri Nov  7 20:41:34 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.DoubleColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncLongToDecimal.java
 Fri Nov  7 20:41:34 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/FuncRoundWithNumDigitsDecimalToDecimal.java
 Fri Nov  7 20:41:34 2014
@@ -21,9 +21,9 @@ package org.apache.hadoop.hive.ql.exec.v
 import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
 import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch;
 import org.apache.hadoop.hive.ql.exec.vector.VectorExpressionDescriptor;
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.ql.exec.vector.DecimalColumnVector;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.DecimalUtil;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 
 import java.util.Arrays;
 
@@ -61,7 +61,7 @@ public class FuncRoundWithNumDigitsDecim
     boolean[] outputIsNull = outputColVector.isNull;
     outputColVector.noNulls = inputColVector.noNulls;
     int n = batch.size;
-    Decimal128[] vector = inputColVector.vector;
+    HiveDecimalWritable[] vector = inputColVector.vector;
 
     // return immediately if batch is empty
     if (n == 0) {
@@ -73,7 +73,7 @@ public class FuncRoundWithNumDigitsDecim
       // All must be selected otherwise size would be zero
       // Repeating property will not change.
       outputIsNull[0] = inputIsNull[0];
-      DecimalUtil.round(0, vector[0], outputColVector);
+      DecimalUtil.round(0, vector[0], decimalPlaces, outputColVector);
       outputColVector.isRepeating = true;
     } else if (inputColVector.noNulls) {
       if (batch.selectedInUse) {
@@ -82,14 +82,14 @@ public class FuncRoundWithNumDigitsDecim
 
           // Set isNull because decimal operation can yield a null.
           outputIsNull[i] = false;
-          DecimalUtil.round(i, vector[i], outputColVector);
+          DecimalUtil.round(i, vector[i], decimalPlaces, outputColVector);
         }
       } else {
 
         // Set isNull because decimal operation can yield a null.
         Arrays.fill(outputIsNull, 0, n, false);
         for(int i = 0; i != n; i++) {
-          DecimalUtil.round(i, vector[i], outputColVector);
+          DecimalUtil.round(i, vector[i], decimalPlaces, outputColVector);
         }
       }
       outputColVector.isRepeating = false;
@@ -98,12 +98,12 @@ public class FuncRoundWithNumDigitsDecim
         for(int j = 0; j != n; j++) {
           int i = sel[j];
           outputIsNull[i] = inputIsNull[i];
-          DecimalUtil.round(i, vector[i], outputColVector);
+          DecimalUtil.round(i, vector[i], decimalPlaces, outputColVector);
         }
       } else {
         System.arraycopy(inputIsNull, 0, outputIsNull, 0, n);
         for(int i = 0; i != n; i++) {
-          DecimalUtil.round(i, vector[i], outputColVector);
+          DecimalUtil.round(i, vector[i], decimalPlaces, outputColVector);
         }
       }
       outputColVector.isRepeating = false;
@@ -119,27 +119,6 @@ public class FuncRoundWithNumDigitsDecim
   public String getOutputType() {
     return outputType;
   }
-  
-  public int getColNum() {
-    return colNum;
-  }
-
-  public void setColNum(int colNum) {
-    this.colNum = colNum;
-  }
-
-  public void setOutputColumn(int outputColumn) {
-    this.outputColumn = outputColumn;
-  }
-
-  public int getDecimalPlaces() {
-    return decimalPlaces;
-  }
-
-  public void setDecimalPlaces(int decimalPlaces) {
-    this.decimalPlaces = decimalPlaces;
-  }
-
 
   @Override
   public VectorExpressionDescriptor.Descriptor getDescriptor() {

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/IDecimalInExpr.java
 Fri Nov  7 20:41:34 2014
@@ -18,8 +18,8 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 
 public interface IDecimalInExpr {
-  void setInListValues(Decimal128[] inVals);
+  void setInListValues(HiveDecimal[] inVals);
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/NullUtil.java
 Fri Nov  7 20:41:34 2014
@@ -300,18 +300,18 @@ public class NullUtil {
     if (v.noNulls) {
       return;
     } else if (v.isRepeating && v.isNull[0]) {
-      v.vector[0].setNullDataValue();
+      v.setNullDataValue(0);
     } else if (selectedInUse) {
       for (int j = 0; j != n; j++) {
         int i = sel[j];
         if(v.isNull[i]) {
-          v.vector[i].setNullDataValue();
+          v.setNullDataValue(i);
         }
       }
     } else {
       for (int i = 0; i != n; i++) {
         if(v.isNull[i]) {
-          v.vector[i].setNullDataValue();
+          v.setNullDataValue(i);
         }
       }
     }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriter.java
 Fri Nov  7 20:41:34 2014
@@ -18,9 +18,10 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import org.apache.hadoop.hive.common.type.Decimal128;
+import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.ql.exec.vector.ColumnVector;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.io.Writable;
 
@@ -34,7 +35,8 @@ public interface VectorExpressionWriter 
   Object writeValue(long value) throws HiveException;
   Object writeValue(double value) throws HiveException;
   Object writeValue(byte[] value, int start, int length) throws HiveException;
-  Object writeValue(Decimal128 value) throws HiveException;
+  Object writeValue(HiveDecimalWritable value) throws HiveException;
+  Object writeValue(HiveDecimal value) throws HiveException;
   Object setValue(Object row, ColumnVector column, int columnRow) throws 
HiveException;
   Object initValue(Object ost) throws HiveException;
 }

Modified: 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
URL: 
http://svn.apache.org/viewvc/hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java?rev=1637444&r1=1637443&r2=1637444&view=diff
==============================================================================
--- 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
 (original)
+++ 
hive/branches/spark/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/expressions/VectorExpressionWriterFactory.java
 Fri Nov  7 20:41:34 2014
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec.vector.expressions;
 
-import java.math.BigDecimal;
 import java.sql.Date;
 import java.sql.Timestamp;
 import java.util.ArrayList;
@@ -27,7 +26,6 @@ import java.util.List;
 
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.common.type.Decimal128;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
@@ -134,14 +132,29 @@ public final class VectorExpressionWrite
      * The base implementation must be overridden by the Decimal specialization
      */
     @Override
-    public Object writeValue(Decimal128 value) throws HiveException {
+    public Object writeValue(HiveDecimal value) throws HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
 
     /**
      * The base implementation must be overridden by the Decimal specialization
      */
-    public Object setValue(Object field, Decimal128 value) throws 
HiveException {
+    @Override
+    public Object writeValue(HiveDecimalWritable value) throws HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
+
+    /**
+     * The base implementation must be overridden by the Decimal specialization
+     */
+    public Object setValue(Object field, HiveDecimalWritable value) throws 
HiveException {
+      throw new HiveException("Internal error: should not reach here");
+    }
+
+    /**
+     * The base implementation must be overridden by the Decimal specialization
+     */
+    public Object setValue(Object field, HiveDecimal value) throws 
HiveException {
       throw new HiveException("Internal error: should not reach here");
     }
   }
@@ -465,24 +478,35 @@ public final class VectorExpressionWrite
       }
 
       @Override
-      public Object writeValue(Decimal128 value) throws HiveException {
-        return ((SettableHiveDecimalObjectInspector) 
this.objectInspector).set(obj,
-            HiveDecimal.create(value.toBigDecimal()));
+      public Object writeValue(HiveDecimalWritable value) throws HiveException 
{
+        return ((SettableHiveDecimalObjectInspector) 
this.objectInspector).set(obj, value);
+      }
+
+      @Override
+      public Object writeValue(HiveDecimal value) throws HiveException {
+        return ((SettableHiveDecimalObjectInspector) 
this.objectInspector).set(obj, value);
+      }
+
+      @Override
+      public Object setValue(Object field, HiveDecimalWritable value) {
+        if (null == field) {
+          field = initValue(null);
+        }
+        return ((SettableHiveDecimalObjectInspector) 
this.objectInspector).set(field, value);
       }
 
       @Override
-      public Object setValue(Object field, Decimal128 value) {
+      public Object setValue(Object field, HiveDecimal value) {
         if (null == field) {
           field = initValue(null);
         }
-        return ((SettableHiveDecimalObjectInspector) 
this.objectInspector).set(field,
-            HiveDecimal.create(value.toBigDecimal()));
+        return ((SettableHiveDecimalObjectInspector) 
this.objectInspector).set(field, value);
       }
 
       @Override
       public Object initValue(Object ignored) {
         return ((SettableHiveDecimalObjectInspector) 
this.objectInspector).create(
-            HiveDecimal.create(BigDecimal.ZERO));
+            HiveDecimal.ZERO);
       }
     }.init(fieldObjInspector);
   }


Reply via email to