http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
----------------------------------------------------------------------
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
index 5d73806..6a96ddd 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableDateObjectInspector.java
@@ -17,13 +17,12 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import java.sql.Date;
-
-import org.apache.hadoop.hive.serde2.io.DateWritable;
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 /**
- * A WritableDateObjectInspector inspects a DateWritable Object.
+ * A WritableDateObjectInspector inspects a DateWritableV2 Object.
  */
 public class WritableDateObjectInspector extends
     AbstractPrimitiveWritableObjectInspector implements
@@ -34,35 +33,49 @@ public class WritableDateObjectInspector extends
   }
 
   @Override
-  public DateWritable getPrimitiveWritableObject(Object o) {
-    return o == null ? null : (DateWritable) o;
+  public DateWritableV2 getPrimitiveWritableObject(Object o) {
+    return o == null ? null : (DateWritableV2) o;
   }
 
   public Date getPrimitiveJavaObject(Object o) {
-    return o == null ? null : ((DateWritable) o).get();
+    return o == null ? null : ((DateWritableV2) o).get();
   }
 
   public Object copyObject(Object o) {
-    return o == null ? null : new DateWritable((DateWritable) o);
+    return o == null ? null : new DateWritableV2((DateWritableV2) o);
   }
 
   public Object set(Object o, Date d) {
     if (d == null) {
       return null;
     }
-    ((DateWritable) o).set(d);
+    ((DateWritableV2) o).set(d);
     return o;
   }
 
-  public Object set(Object o, DateWritable d) {
+  @Deprecated
+  public Object set(Object o, java.sql.Date d) {
     if (d == null) {
       return null;
     }
-    ((DateWritable) o).set(d);
+    ((DateWritableV2) o).set(Date.ofEpochMilli(d.getTime()));
     return o;
   }
 
+  public Object set(Object o, DateWritableV2 d) {
+    if (d == null) {
+      return null;
+    }
+    ((DateWritableV2) o).set(d);
+    return o;
+  }
+
+  @Deprecated
+  public Object create(java.sql.Date value) {
+    return new DateWritableV2(Date.ofEpochMilli(value.getTime()));
+  }
+
   public Object create(Date d) {
-    return new DateWritable(d);
+    return new DateWritableV2(d);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
----------------------------------------------------------------------
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
index ba407aa..17888fd 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveCharObjectInspector.java
@@ -21,7 +21,7 @@ import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseCharUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.io.Text;
@@ -30,10 +30,6 @@ import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.BooleanWritable;
 
-import java.nio.ByteBuffer;
-import java.nio.CharBuffer;
-import java.nio.charset.Charset;
-
 public class WritableHiveCharObjectInspector extends 
AbstractPrimitiveWritableObjectInspector
     implements SettableHiveCharObjectInspector {
   // no-arg ctor required for Kyro serialization
@@ -52,7 +48,7 @@ public class WritableHiveCharObjectInspector extends 
AbstractPrimitiveWritableOb
       return null;
     }
 
-    if ((o instanceof Text) || (o instanceof TimestampWritable)
+    if ((o instanceof Text) || (o instanceof TimestampWritableV2)
         || (o instanceof HiveDecimalWritable) || (o instanceof DoubleWritable)
         || (o instanceof FloatWritable) || (o instanceof LongWritable) || (o 
instanceof IntWritable)
         || (o instanceof BooleanWritable)) {
@@ -75,7 +71,7 @@ public class WritableHiveCharObjectInspector extends 
AbstractPrimitiveWritableOb
       return null;
     }
 
-    if ((o instanceof Text) || (o instanceof TimestampWritable)
+    if ((o instanceof Text) || (o instanceof TimestampWritableV2)
         || (o instanceof HiveDecimalWritable) || (o instanceof DoubleWritable)
         || (o instanceof FloatWritable) || (o instanceof LongWritable) || (o 
instanceof IntWritable)
         || (o instanceof BooleanWritable)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
----------------------------------------------------------------------
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
index 81c0550..456858c 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableHiveVarcharObjectInspector.java
@@ -18,13 +18,13 @@
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.BaseCharUtils;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.FloatWritable;
@@ -52,7 +52,7 @@ implements SettableHiveVarcharObjectInspector {
       return null;
     }
 
-    if ((o instanceof Text) || (o instanceof TimestampWritable)
+    if ((o instanceof Text) || (o instanceof TimestampWritableV2)
         || (o instanceof HiveDecimalWritable) || (o instanceof DoubleWritable)
         || (o instanceof FloatWritable) || (o instanceof LongWritable) || (o 
instanceof IntWritable)
         || (o instanceof BooleanWritable)) {
@@ -75,7 +75,7 @@ implements SettableHiveVarcharObjectInspector {
       return null;
     }
 
-    if ((o instanceof Text) || (o instanceof TimestampWritable)
+    if ((o instanceof Text) || (o instanceof TimestampWritableV2)
         || (o instanceof HiveDecimalWritable) || (o instanceof DoubleWritable)
         || (o instanceof FloatWritable) || (o instanceof LongWritable) || (o 
instanceof IntWritable)
         || (o instanceof BooleanWritable)) {

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
----------------------------------------------------------------------
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
index 47b51f5..e0ab191 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/primitive/WritableTimestampObjectInspector.java
@@ -17,9 +17,8 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector.primitive;
 
-import java.sql.Timestamp;
-
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 
 public class WritableTimestampObjectInspector extends
@@ -31,20 +30,29 @@ public class WritableTimestampObjectInspector extends
   }
 
   @Override
-  public TimestampWritable getPrimitiveWritableObject(Object o) {
-    return o == null ? null : (TimestampWritable) o;
+  public TimestampWritableV2 getPrimitiveWritableObject(Object o) {
+    return o == null ? null : (TimestampWritableV2) o;
   }
 
   public Timestamp getPrimitiveJavaObject(Object o) {
-    return o == null ? null : ((TimestampWritable) o).getTimestamp();
+    return o == null ? null : ((TimestampWritableV2) o).getTimestamp();
   }
 
   public Object copyObject(Object o) {
-    return o == null ? null : new TimestampWritable((TimestampWritable) o);
+    return o == null ? null : new TimestampWritableV2((TimestampWritableV2) o);
   }
 
   public Object set(Object o, byte[] bytes, int offset) {
-    ((TimestampWritable) o).set(bytes, offset);
+    ((TimestampWritableV2) o).set(bytes, offset);
+    return o;
+  }
+
+  @Deprecated
+  public Object set(Object o, java.sql.Timestamp t) {
+    if (t == null) {
+      return null;
+    }
+    ((TimestampWritableV2) o).set(Timestamp.ofEpochMilli(t.getTime(), 
t.getNanos()));
     return o;
   }
 
@@ -52,23 +60,27 @@ public class WritableTimestampObjectInspector extends
     if (t == null) {
       return null;
     }
-    ((TimestampWritable) o).set(t);
+    ((TimestampWritableV2) o).set(t);
     return o;
   }
 
-  public Object set(Object o, TimestampWritable t) {
+  public Object set(Object o, TimestampWritableV2 t) {
     if (t == null) {
       return null;
     }
-    ((TimestampWritable) o).set(t);
+    ((TimestampWritableV2) o).set(t);
     return o;
   }
 
   public Object create(byte[] bytes, int offset) {
-    return new TimestampWritable(bytes, offset);
+    return new TimestampWritableV2(bytes, offset);
+  }
+
+  public Object create(java.sql.Timestamp t) {
+    return new TimestampWritableV2(Timestamp.ofEpochMilli(t.getTime(), 
t.getNanos()));
   }
 
   public Object create(Timestamp t) {
-    return new TimestampWritable(t);
+    return new TimestampWritableV2(t);
   }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
----------------------------------------------------------------------
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java 
b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
index 749d8ac..c0f9726 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/SerdeRandomRowSource.java
@@ -18,19 +18,18 @@
 
 package org.apache.hadoop.hive.serde2;
 
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Random;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
----------------------------------------------------------------------
diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java 
b/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
index 22aadbb..c6b77ed 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/VerifyFast.java
@@ -19,8 +19,6 @@ package org.apache.hadoop.hive.serde2;
 
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -29,16 +27,19 @@ import java.util.Map.Entry;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.fast.DeserializeRead;
+import org.apache.hadoop.hive.serde2.io.DateWritableV2;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import 
org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
 import org.apache.hadoop.hive.serde2.fast.SerializeWrite;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
-import org.apache.hadoop.hive.serde2.io.DateWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
@@ -46,7 +47,6 @@ import 
org.apache.hadoop.hive.serde2.io.HiveIntervalDayTimeWritable;
 import org.apache.hadoop.hive.serde2.io.HiveIntervalYearMonthWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
@@ -244,7 +244,7 @@ public class VerifyFast {
         case DATE:
           {
             Date value = deserializeRead.currentDateWritable.get();
-            Date expected = ((DateWritable) object).get();
+            Date expected = ((DateWritableV2) object).get();
             if (!value.equals(expected)) {
               TestCase.fail("Date field mismatch (expected " + 
expected.toString() + " found " + value.toString() + ")");
             }
@@ -253,7 +253,7 @@ public class VerifyFast {
         case TIMESTAMP:
           {
             Timestamp value = 
deserializeRead.currentTimestampWritable.getTimestamp();
-            Timestamp expected = ((TimestampWritable) object).getTimestamp();
+            Timestamp expected = ((TimestampWritableV2) object).getTimestamp();
             if (!value.equals(expected)) {
               TestCase.fail("Timestamp field mismatch (expected " + 
expected.toString() + " found " + value.toString() + ")");
             }
@@ -394,13 +394,13 @@ public class VerifyFast {
           break;
         case DATE:
           {
-            Date value = ((DateWritable) object).get();
+            Date value = ((DateWritableV2) object).get();
             serializeWrite.writeDate(value);
           }
           break;
         case TIMESTAMP:
           {
-            Timestamp value = ((TimestampWritable) object).getTimestamp();
+            Timestamp value = ((TimestampWritableV2) object).getTimestamp();
             serializeWrite.writeTimestamp(value);
           }
           break;
@@ -571,9 +571,9 @@ public class VerifyFast {
     case DECIMAL:
       return new 
HiveDecimalWritable(deserializeRead.currentHiveDecimalWritable);
     case DATE:
-      return new DateWritable(deserializeRead.currentDateWritable);
+      return new DateWritableV2(deserializeRead.currentDateWritable);
     case TIMESTAMP:
-      return new TimestampWritable(deserializeRead.currentTimestampWritable);
+      return new TimestampWritableV2(deserializeRead.currentTimestampWritable);
     case INTERVAL_YEAR_MONTH:
       return new 
HiveIntervalYearMonthWritable(deserializeRead.currentHiveIntervalYearMonthWritable);
     case INTERVAL_DAY_TIME:

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
----------------------------------------------------------------------
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java 
b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
index c270d71..a486ab1 100644
--- 
a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
+++ 
b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestClass.java
@@ -17,18 +17,18 @@
  */
 package org.apache.hadoop.hive.serde2.binarysortable;
 
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import 
org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
 import 
org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
----------------------------------------------------------------------
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
 
b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
index 82d126a..6febc36 100644
--- 
a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
+++ 
b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/MyTestPrimitiveClass.java
@@ -17,26 +17,21 @@
  */
 package org.apache.hadoop.hive.serde2.binarysortable;
 
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Random;
 
-import junit.framework.TestCase;
-
-import org.apache.commons.lang.ArrayUtils;
-import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.common.type.HiveBaseChar;
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
-import 
org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import 
org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory;
 import 
org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -46,12 +41,12 @@ import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarch
 import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
-import org.apache.hadoop.io.Writable;
 import org.apache.hive.common.util.DateUtils;
 
+import junit.framework.TestCase;
+
 // Just the primitive types.
 public class MyTestPrimitiveClass {
 

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
----------------------------------------------------------------------
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java 
b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
deleted file mode 100644
index 97eb967..0000000
--- a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritable.java
+++ /dev/null
@@ -1,237 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.serde2.io;
-
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-import org.junit.*;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static org.junit.Assert.*;
-import java.io.*;
-import java.sql.Date;
-import java.text.DateFormat;
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.GregorianCalendar;
-import java.util.LinkedList;
-import java.util.TimeZone;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-
-public class TestDateWritable {
-  private static final Logger LOG = 
LoggerFactory.getLogger(TestDateWritable.class);
-
-  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
-  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testConstructor() {
-    Date date = Date.valueOf(getRandomDateString());
-    DateWritable dw1 = new DateWritable(date);
-    DateWritable dw2 = new DateWritable(dw1);
-    DateWritable dw3 = new DateWritable(dw1.getDays());
-
-    assertEquals(dw1, dw1);
-    assertEquals(dw1, dw2);
-    assertEquals(dw2, dw3);
-    assertEquals(date, dw1.get());
-    assertEquals(date, dw2.get());
-    assertEquals(date, dw3.get());
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testComparison() {
-    // Get 2 different dates
-    Date date1 = Date.valueOf(getRandomDateString());
-    Date date2 = Date.valueOf(getRandomDateString());
-    while (date1.equals(date2)) {
-      date2 = Date.valueOf(getRandomDateString());
-    }
-
-    DateWritable dw1 = new DateWritable(date1);
-    DateWritable dw2 = new DateWritable(date2);
-    DateWritable dw3 = new DateWritable(date1);
-
-    assertTrue("Dates should be equal", dw1.equals(dw1));
-    assertTrue("Dates should be equal", dw1.equals(dw3));
-    assertTrue("Dates should be equal", dw3.equals(dw1));
-    assertEquals("Dates should be equal", 0, dw1.compareTo(dw1));
-    assertEquals("Dates should be equal", 0, dw1.compareTo(dw3));
-    assertEquals("Dates should be equal", 0, dw3.compareTo(dw1));
-
-    assertFalse("Dates not should be equal", dw1.equals(dw2));
-    assertFalse("Dates not should be equal", dw2.equals(dw1));
-    assertTrue("Dates not should be equal", 0 != dw1.compareTo(dw2));
-    assertTrue("Dates not should be equal", 0 != dw2.compareTo(dw1));
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testGettersSetters() {
-    Date date1 = Date.valueOf(getRandomDateString());
-    Date date2 = Date.valueOf(getRandomDateString());
-    Date date3 = Date.valueOf(getRandomDateString());
-    DateWritable dw1 = new DateWritable(date1);
-    DateWritable dw2 = new DateWritable(date2);
-    DateWritable dw3 = new DateWritable(date3);
-    DateWritable dw4 = new DateWritable();
-
-    // Getters
-    assertEquals(date1, dw1.get());
-    assertEquals(date1.getTime() / 1000, dw1.getTimeInSeconds());
-
-    dw4.set(Date.valueOf("1970-01-02"));
-    assertEquals(1, dw4.getDays());
-    dw4.set(Date.valueOf("1971-01-01"));
-    assertEquals(365, dw4.getDays());
-
-    // Setters
-    dw4.set(dw1.getDays());
-    assertEquals(dw1, dw4);
-
-    dw4.set(dw2.get());
-    assertEquals(dw2, dw4);
-
-    dw4.set(dw3);
-    assertEquals(dw3, dw4);
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testWritableMethods() throws Throwable {
-    DateWritable dw1 = new DateWritable(Date.valueOf(getRandomDateString()));
-    DateWritable dw2 = new DateWritable();
-    ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
-    DataOutput out = new DataOutputStream(byteStream);
-
-    dw1.write(out);
-    dw2.readFields(new DataInputStream(new 
ByteArrayInputStream(byteStream.toByteArray())));
-
-    assertEquals("Dates should be equal", dw1, dw2);
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testDateValueOf() {
-    // Just making sure Date.valueOf() works ok
-    String dateStr = getRandomDateString();
-    Date date = Date.valueOf(dateStr);
-    assertEquals(dateStr, date.toString());
-  }
-
-  private static String[] dateStrings = new String[365];
-
-  @BeforeClass
-  public static void setupDateStrings() {
-    DateFormat format = new SimpleDateFormat("yyyy-MM-dd");
-    Date initialDate = Date.valueOf("2014-01-01");
-    Calendar cal = Calendar.getInstance();
-    cal.setTime(initialDate);
-    for (int idx = 0; idx < 365; ++idx) {
-      dateStrings[idx] = format.format(cal.getTime());
-      cal.add(1, Calendar.DAY_OF_YEAR);
-    }
-  }
-
-  private static String getRandomDateString() {
-    return dateStrings[(int) (Math.random() * 365)];
-  }
-
-  public static class DateTestCallable implements Callable<Void> {
-    private LinkedList<DtMismatch> bad;
-    private String tz;
-
-    public DateTestCallable(LinkedList<DtMismatch> bad, String tz) {
-      this.bad = bad;
-      this.tz = tz;
-    }
-
-    @Override
-    public Void call() throws Exception {
-      SimpleDateFormat sdf = new SimpleDateFormat("YYYY-MM-dd HH:mm:ss");
-      // Iterate through each day of the year, make sure Date/DateWritable 
match
-      Date originalDate = Date.valueOf("1900-01-01");
-      Calendar cal = Calendar.getInstance();
-      cal.setTimeInMillis(originalDate.getTime());
-      for (int idx = 0; idx < 365*200; ++idx) {
-        originalDate = new Date(cal.getTimeInMillis());
-        // Make sure originalDate is at midnight in the local time zone,
-        // since DateWritable will generate dates at that time.
-        originalDate = Date.valueOf(originalDate.toString());
-        DateWritable dateWritable = new DateWritable(originalDate);
-        Date actual = dateWritable.get(false);
-        if (!originalDate.equals(actual)) {
-          String originalStr = sdf.format(originalDate);
-          String actualStr = sdf.format(actual);
-          if (originalStr.substring(0, 10).equals(actualStr.substring(0, 10))) 
continue;
-          bad.add(new DtMismatch(originalStr, actualStr, tz));
-        }
-        cal.add(Calendar.DAY_OF_YEAR, 1);
-      }
-      // Success!
-      return null;
-    }
-  }
-
-  private static class DtMismatch {
-    String expected, found, tz;
-    public DtMismatch(String originalStr, String actualStr, String tz) {
-      this.expected = originalStr;
-      this.found = actualStr;
-      this.tz = tz;
-    }
-  }
-
-  @Test
-  public void testDaylightSavingsTime() throws Exception {
-    LinkedList<DtMismatch> bad = new LinkedList<>();
-
-    for (String timeZone: TimeZone.getAvailableIDs()) {
-      TimeZone previousDefault = TimeZone.getDefault();
-      TimeZone.setDefault(TimeZone.getTimeZone(timeZone));
-      assertEquals("Default timezone should now be " + timeZone,
-          timeZone, TimeZone.getDefault().getID());
-      ExecutorService threadPool = Executors.newFixedThreadPool(1);
-      try {
-        // TODO: pointless
-        threadPool.submit(new DateTestCallable(bad, timeZone)).get();
-      } finally {
-        threadPool.shutdown(); TimeZone.setDefault(previousDefault);
-      }
-    }
-    StringBuilder errors = new StringBuilder("\nDATE MISMATCH:\n");
-    for (DtMismatch dm : bad) {
-      errors.append("E ").append(dm.tz).append(": 
").append(dm.expected).append(" != ").append(dm.found).append("\n");
-    }
-    LOG.error(errors.toString());
-    if (!bad.isEmpty()) throw new Exception(bad.size() + " mismatches, see 
logs");
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritableV2.java
----------------------------------------------------------------------
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritableV2.java 
b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritableV2.java
new file mode 100644
index 0000000..262e55a
--- /dev/null
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestDateWritableV2.java
@@ -0,0 +1,241 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.serde2.io;
+
+import com.google.code.tempusfugit.concurrency.ConcurrentRule;
+import com.google.code.tempusfugit.concurrency.RepeatingRule;
+import com.google.code.tempusfugit.concurrency.annotations.Concurrent;
+import com.google.code.tempusfugit.concurrency.annotations.Repeating;
+import org.apache.hadoop.hive.common.type.Date;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.util.Calendar;
+import java.util.LinkedList;
+import java.util.TimeZone;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+public class TestDateWritableV2 {
+  private static final Logger LOG = 
LoggerFactory.getLogger(TestDateWritableV2.class);
+
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testConstructor() {
+    Date date = Date.valueOf(getRandomDateString());
+    DateWritableV2 dw1 = new DateWritableV2(date);
+    DateWritableV2 dw2 = new DateWritableV2(dw1);
+    DateWritableV2 dw3 = new DateWritableV2(dw1.getDays());
+
+    assertEquals(dw1, dw1);
+    assertEquals(dw1, dw2);
+    assertEquals(dw2, dw3);
+    assertEquals(date, dw1.get());
+    assertEquals(date, dw2.get());
+    assertEquals(date, dw3.get());
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testComparison() {
+    // Get 2 different dates
+    Date date1 = Date.valueOf(getRandomDateString());
+    Date date2 = Date.valueOf(getRandomDateString());
+    while (date1.equals(date2)) {
+      date2 = Date.valueOf(getRandomDateString());
+    }
+
+    DateWritableV2 dw1 = new DateWritableV2(date1);
+    DateWritableV2 dw2 = new DateWritableV2(date2);
+    DateWritableV2 dw3 = new DateWritableV2(date1);
+
+    assertTrue("Dates should be equal", dw1.equals(dw1));
+    assertTrue("Dates should be equal", dw1.equals(dw3));
+    assertTrue("Dates should be equal", dw3.equals(dw1));
+    assertEquals("Dates should be equal", 0, dw1.compareTo(dw1));
+    assertEquals("Dates should be equal", 0, dw1.compareTo(dw3));
+    assertEquals("Dates should be equal", 0, dw3.compareTo(dw1));
+
+    assertFalse("Dates not should be equal", dw1.equals(dw2));
+    assertFalse("Dates not should be equal", dw2.equals(dw1));
+    assertTrue("Dates not should be equal", 0 != dw1.compareTo(dw2));
+    assertTrue("Dates not should be equal", 0 != dw2.compareTo(dw1));
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testGettersSetters() {
+    Date date1 = Date.valueOf(getRandomDateString());
+    Date date2 = Date.valueOf(getRandomDateString());
+    Date date3 = Date.valueOf(getRandomDateString());
+    DateWritableV2 dw1 = new DateWritableV2(date1);
+    DateWritableV2 dw2 = new DateWritableV2(date2);
+    DateWritableV2 dw3 = new DateWritableV2(date3);
+    DateWritableV2 dw4 = new DateWritableV2();
+
+    // Getters
+    assertEquals(date1, dw1.get());
+    assertEquals(date1.toEpochSecond(), dw1.getTimeInSeconds());
+
+    dw4.set(Date.valueOf("1970-01-02"));
+    assertEquals(1, dw4.getDays());
+    dw4.set(Date.valueOf("1971-01-01"));
+    assertEquals(365, dw4.getDays());
+
+    // Setters
+    dw4.set(dw1.getDays());
+    assertEquals(dw1, dw4);
+
+    dw4.set(dw2.get());
+    assertEquals(dw2, dw4);
+
+    dw4.set(dw3);
+    assertEquals(dw3, dw4);
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testWritableMethods() throws Throwable {
+    DateWritableV2 dw1 = new 
DateWritableV2(Date.valueOf(getRandomDateString()));
+    DateWritableV2 dw2 = new DateWritableV2();
+    ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
+    DataOutput out = new DataOutputStream(byteStream);
+
+    dw1.write(out);
+    dw2.readFields(new DataInputStream(new 
ByteArrayInputStream(byteStream.toByteArray())));
+
+    assertEquals("Dates should be equal", dw1, dw2);
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testDateValueOf() {
+    // Just making sure Date.valueOf() works ok
+    String dateStr = getRandomDateString();
+    Date date = Date.valueOf(dateStr);
+    assertEquals(dateStr, date.toString());
+  }
+
+  private static String[] dateStrings = new String[365];
+
+  @BeforeClass
+  public static void setupDateStrings() {
+    Date initialDate = Date.valueOf("2014-01-01");
+    Calendar cal = Calendar.getInstance();
+    cal.setTimeInMillis(initialDate.toEpochMilli());
+    for (int idx = 0; idx < 365; ++idx) {
+      dateStrings[idx] = Date.ofEpochMilli(cal.getTimeInMillis()).toString();
+      cal.add(1, Calendar.DAY_OF_YEAR);
+    }
+  }
+
+  private static String getRandomDateString() {
+    return dateStrings[(int) (Math.random() * 365)];
+  }
+
+  public static class DateTestCallable implements Callable<Void> {
+    private LinkedList<DtMismatch> bad;
+    private String tz;
+
+    public DateTestCallable(LinkedList<DtMismatch> bad, String tz) {
+      this.bad = bad;
+      this.tz = tz;
+    }
+
+    @Override
+    public Void call() throws Exception {
+      // Iterate through each day of the year, make sure Date/DateWritableV2 
match
+      Date originalDate = Date.valueOf("1900-01-01");
+      Calendar cal = Calendar.getInstance();
+      cal.setTimeInMillis(originalDate.toEpochMilli());
+      for (int idx = 0; idx < 365*200; ++idx) {
+        originalDate = Date.ofEpochMilli(cal.getTimeInMillis());
+        // Make sure originalDate is at midnight in the local time zone,
+        // since DateWritableV2 will generate dates at that time.
+        originalDate = Date.valueOf(originalDate.toString());
+        DateWritableV2 dateWritable = new DateWritableV2(originalDate);
+        Date actual = dateWritable.get();
+        if (!originalDate.equals(actual)) {
+          String originalStr = originalDate.toString();
+          String actualStr = actual.toString();
+          if (originalStr.substring(0, 10).equals(actualStr.substring(0, 10))) 
continue;
+          bad.add(new DtMismatch(originalStr, actualStr, tz));
+        }
+        cal.add(Calendar.DAY_OF_YEAR, 1);
+      }
+      // Success!
+      return null;
+    }
+  }
+
+  private static class DtMismatch {
+    String expected, found, tz;
+    public DtMismatch(String originalStr, String actualStr, String tz) {
+      this.expected = originalStr;
+      this.found = actualStr;
+      this.tz = tz;
+    }
+  }
+
+  @Test
+  public void testDaylightSavingsTime() throws Exception {
+    LinkedList<DtMismatch> bad = new LinkedList<>();
+
+    for (String timeZone: TimeZone.getAvailableIDs()) {
+      TimeZone previousDefault = TimeZone.getDefault();
+      TimeZone.setDefault(TimeZone.getTimeZone(timeZone));
+      assertEquals("Default timezone should now be " + timeZone,
+          timeZone, TimeZone.getDefault().getID());
+      ExecutorService threadPool = Executors.newFixedThreadPool(1);
+      try {
+        // TODO: pointless
+        threadPool.submit(new DateTestCallable(bad, timeZone)).get();
+      } finally {
+        threadPool.shutdown(); TimeZone.setDefault(previousDefault);
+      }
+    }
+    StringBuilder errors = new StringBuilder("\nDATE MISMATCH:\n");
+    for (DtMismatch dm : bad) {
+      errors.append("E ").append(dm.tz).append(": 
").append(dm.expected).append(" != ").append(dm.found).append("\n");
+    }
+    LOG.error(errors.toString());
+    if (!bad.isEmpty()) throw new Exception(bad.size() + " mismatches, see 
logs");
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
----------------------------------------------------------------------
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java 
b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
deleted file mode 100644
index 3fe472e..0000000
--- a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritable.java
+++ /dev/null
@@ -1,520 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.serde2.io;
-
-import com.google.code.tempusfugit.concurrency.annotations.*;
-import com.google.code.tempusfugit.concurrency.*;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.math.BigDecimal;
-import java.sql.Timestamp;
-import java.text.DateFormat;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-import java.util.TimeZone;
-
-import org.apache.hadoop.hive.ql.util.TimestampUtils;
-import org.junit.*;
-import static org.junit.Assert.*;
-
-import org.apache.hadoop.hive.common.type.HiveDecimal;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.io.WritableUtils;
-
-public class TestTimestampWritable {
-
-  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
-  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
-
-  private static ThreadLocal<DateFormat> DATE_FORMAT =
-      new ThreadLocal<DateFormat>() {
-        @Override
-        protected DateFormat initialValue() {
-          return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
-        }
-      };
-
-  private static final int HAS_DECIMAL_MASK = 0x80000000;
-
-  private static final long MAX_ADDITIONAL_SECONDS_BITS = 0x418937;
-
-  private static long MIN_FOUR_DIGIT_YEAR_MILLIS = parseToMillis("0001-01-01 
00:00:00");
-  private static long MAX_FOUR_DIGIT_YEAR_MILLIS = parseToMillis("9999-01-01 
00:00:00");
-
-  private static int BILLION = 1000 * 1000 * 1000;
-
-  private static long getSeconds(Timestamp ts) {
-    // To compute seconds, we first subtract the milliseconds stored in the 
nanos field of the
-    // Timestamp from the result of getTime().
-    long seconds = (ts.getTime() - ts.getNanos() / 1000000) / 1000;
-
-    // It should also be possible to calculate this based on ts.getTime() only.
-    assertEquals(seconds, TimestampUtils.millisToSeconds(ts.getTime()));
-
-    return seconds;
-  }
-
-  private static long parseToMillis(String s) {
-    try {
-      return DATE_FORMAT.get().parse(s).getTime();
-    } catch (ParseException ex) {
-      throw new RuntimeException(ex);
-    }
-  }
-
-  @Before
-  public void setUp() {
-    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
-  }
-
-  private static String normalizeTimestampStr(String timestampStr) {
-    if (timestampStr.endsWith(".0")) {
-      return timestampStr.substring(0, timestampStr.length() - 2);
-    }
-    return timestampStr;
-  }
-
-  private static void assertTSWEquals(TimestampWritable expected, 
TimestampWritable actual) {
-    assertEquals(normalizeTimestampStr(expected.toString()),
-                 normalizeTimestampStr(actual.toString()));
-    assertEquals(expected, actual);
-    assertEquals(expected.getTimestamp(), actual.getTimestamp());
-  }
-
-  private static TimestampWritable deserializeFromBytes(byte[] tsBytes) throws 
IOException {
-    ByteArrayInputStream bais = new ByteArrayInputStream(tsBytes);
-    DataInputStream dis = new DataInputStream(bais);
-    TimestampWritable deserTSW = new TimestampWritable();
-    deserTSW.readFields(dis);
-    return deserTSW;
-  }
-
-  private static int reverseNanos(int nanos) {
-    if (nanos == 0) {
-      return 0;
-    }
-    if (nanos < 0 || nanos >= 1000 * 1000 * 1000) {
-      throw new IllegalArgumentException("Invalid nanosecond value: " + nanos);
-    }
-
-    int x = nanos;
-    StringBuilder reversed = new StringBuilder();
-    while (x != 0) {
-      reversed.append((char)('0' + x % 10));
-      x /= 10;
-    }
-
-    int result = Integer.parseInt(reversed.toString());
-    while (nanos < 100 * 1000 * 1000) {
-      result *= 10;
-      nanos *= 10;
-    }
-    return result;
-  }
-
-  private static byte[] serializeToBytes(Writable w) throws IOException {
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    DataOutputStream dos = new DataOutputStream(baos);
-    w.write(dos);
-    return baos.toByteArray();
-  }
-
-  private static List<Byte> toList(byte[] a) {
-    List<Byte> list = new ArrayList<Byte>(a.length);
-    for (byte b : a) {
-      list.add(b);
-    }
-    return list;
-  }
-
-  /**
-   * Pad the given byte array with the given number of bytes in the beginning. 
The padding bytes
-   * deterministically depend on the passed data.
-   */
-  private static byte[] padBytes(byte[] bytes, int count) {
-    byte[] result = new byte[bytes.length + count];
-    for (int i = 0; i < count; ++i) {
-      // Fill the prefix bytes with deterministic data based on the actual 
meaningful data.
-      result[i] = (byte) (bytes[i % bytes.length] * 37 + 19);
-    }
-    System.arraycopy(bytes, 0, result, count, bytes.length);
-    return result;
-  }
-
-  private static TimestampWritable 
serializeDeserializeAndCheckTimestamp(Timestamp ts)
-      throws IOException {
-    TimestampWritable tsw = new TimestampWritable(ts);
-    assertEquals(ts, tsw.getTimestamp());
-
-    byte[] tsBytes = serializeToBytes(tsw);
-    TimestampWritable deserTSW = deserializeFromBytes(tsBytes);
-    assertTSWEquals(tsw, deserTSW);
-    assertEquals(ts, deserTSW.getTimestamp());
-    assertEquals(tsBytes.length, tsw.getTotalLength());
-
-    // Also convert to/from binary-sortable representation.
-    int binarySortableOffset = Math.abs(tsw.hashCode()) % 10;
-    byte[] binarySortableBytes = padBytes(tsw.getBinarySortable(), 
binarySortableOffset);
-    TimestampWritable fromBinSort = new TimestampWritable();
-    fromBinSort.setBinarySortable(binarySortableBytes, binarySortableOffset);
-    assertTSWEquals(tsw, fromBinSort);
-
-    long timeSeconds = ts.getTime() / 1000;
-    if (0 <= timeSeconds && timeSeconds <= Integer.MAX_VALUE) {
-      assertEquals(new Timestamp(timeSeconds * 1000),
-        fromIntAndVInts((int) timeSeconds, 0).getTimestamp());
-
-      int nanos = reverseNanos(ts.getNanos());
-      assertEquals(ts,
-        fromIntAndVInts((int) timeSeconds | (nanos != 0 ? HAS_DECIMAL_MASK : 
0),
-          nanos).getTimestamp());
-    }
-
-    assertEquals(ts.getNanos(), tsw.getNanos());
-    assertEquals(getSeconds(ts), tsw.getSeconds());
-
-    // Test various set methods and copy constructors.
-    {
-      TimestampWritable tsSet1 = new TimestampWritable();
-      // make the offset non-zero to keep things interesting.
-      int offset = Math.abs(ts.hashCode() % 32);
-      byte[] shiftedBytes = padBytes(tsBytes, offset);
-      tsSet1.set(shiftedBytes, offset);
-      assertTSWEquals(tsw, tsSet1);
-
-      TimestampWritable tswShiftedBytes = new TimestampWritable(shiftedBytes, 
offset);
-      assertTSWEquals(tsw, tswShiftedBytes);
-      assertTSWEquals(tsw, 
deserializeFromBytes(serializeToBytes(tswShiftedBytes)));
-    }
-
-    {
-      TimestampWritable tsSet2 = new TimestampWritable();
-      tsSet2.set(ts);
-      assertTSWEquals(tsw, tsSet2);
-    }
-
-    {
-      TimestampWritable tsSet3 = new TimestampWritable();
-      tsSet3.set(tsw);
-      assertTSWEquals(tsw, tsSet3);
-    }
-
-    {
-      TimestampWritable tsSet4 = new TimestampWritable();
-      tsSet4.set(deserTSW);
-      assertTSWEquals(tsw, tsSet4);
-    }
-
-    double expectedDbl = getSeconds(ts) + 1e-9d * ts.getNanos();
-    assertTrue(Math.abs(tsw.getDouble() - expectedDbl) < 1e-10d);
-
-    return deserTSW;
-  }
-
-  private static int randomNanos(Random rand, int decimalDigits) {
-    // Only keep the most significant decimalDigits digits.
-    int nanos = rand.nextInt(BILLION);
-    return nanos - nanos % (int) Math.pow(10, 9 - decimalDigits);
-  }
-
-  private static int randomNanos(Random rand) {
-    return randomNanos(rand, rand.nextInt(10));
-  }
-
-  private static void checkTimestampWithAndWithoutNanos(Timestamp ts, int 
nanos)
-      throws IOException {
-    serializeDeserializeAndCheckTimestamp(ts);
-
-    ts.setNanos(nanos);
-    assertEquals(serializeDeserializeAndCheckTimestamp(ts).getNanos(), nanos);
-  }
-
-  private static TimestampWritable fromIntAndVInts(int i, long... vints) 
throws IOException {
-    ByteArrayOutputStream baos = new ByteArrayOutputStream();
-    DataOutputStream dos = new DataOutputStream(baos);
-    dos.writeInt(i);
-    if ((i & HAS_DECIMAL_MASK) != 0) {
-      for (long vi : vints) {
-        WritableUtils.writeVLong(dos, vi);
-      }
-    }
-    byte[] bytes = baos.toByteArray();
-    TimestampWritable tsw = deserializeFromBytes(bytes);
-    assertEquals(toList(bytes), toList(serializeToBytes(tsw)));
-    return tsw;
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testReverseNanos() {
-    assertEquals(0, reverseNanos(0));
-    assertEquals(120000000, reverseNanos(21));
-    assertEquals(32100000, reverseNanos(1230));
-    assertEquals(5, reverseNanos(500000000));
-    assertEquals(987654321, reverseNanos(123456789));
-    assertEquals(12345678, reverseNanos(876543210));
-  }
-
-  /**
-   * Test serializing and deserializing timestamps that can be represented by 
a number of seconds
-   * from 0 to 2147483647 since the UNIX epoch.
-   */
-  @Test
-  @Concurrent(count=4)
-  public void testTimestampsWithinPositiveIntRange() throws IOException {
-    Random rand = new Random(294722773L);
-    for (int i = 0; i < 10000; ++i) {
-      long millis = ((long) rand.nextInt(Integer.MAX_VALUE)) * 1000;
-      checkTimestampWithAndWithoutNanos(new Timestamp(millis), 
randomNanos(rand));
-    }
-  }
-
-  private static long randomMillis(long minMillis, long maxMillis, Random 
rand) {
-    return minMillis + (long) ((maxMillis - minMillis) * rand.nextDouble());
-  }
-
-  /**
-   * Test timestamps that don't necessarily fit between 1970 and 2038. This 
depends on HIVE-4525
-   * being fixed.
-   */
-  @Test
-  @Concurrent(count=4)
-  public void testTimestampsOutsidePositiveIntRange() throws IOException {
-    Random rand = new Random(789149717L);
-    for (int i = 0; i < 10000; ++i) {
-      long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, 
MAX_FOUR_DIGIT_YEAR_MILLIS, rand);
-      checkTimestampWithAndWithoutNanos(new Timestamp(millis), 
randomNanos(rand));
-    }
-  }
-
-  @Test
-  @Concurrent(count=4)
-  public void testTimestampsInFullRange() throws IOException {
-    Random rand = new Random(2904974913L);
-    for (int i = 0; i < 10000; ++i) {
-      checkTimestampWithAndWithoutNanos(new Timestamp(rand.nextLong()), 
randomNanos(rand));
-    }
-  }
-
-  @Test
-  @Concurrent(count=4)
-  public void testToFromDouble() {
-    Random rand = new Random(294729777L);
-    for (int nanosPrecision = 0; nanosPrecision <= 4; ++nanosPrecision) {
-      for (int i = 0; i < 10000; ++i) {
-        long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, 
MAX_FOUR_DIGIT_YEAR_MILLIS, rand);
-        Timestamp ts = new Timestamp(millis);
-        int nanos = randomNanos(rand, nanosPrecision);
-        ts.setNanos(nanos);
-        TimestampWritable tsw = new TimestampWritable(ts);
-        double asDouble = tsw.getDouble();
-        int recoveredNanos =
-          (int) (Math.round((asDouble - Math.floor(asDouble)) * Math.pow(10, 
nanosPrecision)) *
-            Math.pow(10, 9 - nanosPrecision));
-        assertEquals(String.format("Invalid nanosecond part recovered from 
%f", asDouble),
-          nanos, recoveredNanos);
-        assertEquals(ts, TimestampUtils.doubleToTimestamp(asDouble));
-        // decimalToTimestamp should be consistent with doubleToTimestamp for 
this level of
-        // precision.
-        assertEquals(ts, TimestampUtils.decimalToTimestamp(
-            HiveDecimal.create(BigDecimal.valueOf(asDouble))));
-      }
-    }
-  }
-
-  private static HiveDecimal timestampToDecimal(Timestamp ts) {
-    BigDecimal d = new BigDecimal(getSeconds(ts));
-    d = d.add(new BigDecimal(ts.getNanos()).divide(new BigDecimal(BILLION)));
-    return HiveDecimal.create(d);
-  }
-
-  @Test
-  @Concurrent(count=4)
-  public void testDecimalToTimestampRandomly() {
-    Random rand = new Random(294729777L);
-    for (int i = 0; i < 10000; ++i) {
-      Timestamp ts = new Timestamp(
-          randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, 
rand));
-      ts.setNanos(randomNanos(rand, 9));  // full precision
-      assertEquals(ts, 
TimestampUtils.decimalToTimestamp(timestampToDecimal(ts)));
-    }
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testDecimalToTimestampCornerCases() {
-    Timestamp ts = new Timestamp(parseToMillis("1969-03-04 05:44:33"));
-    assertEquals(0, ts.getTime() % 1000);
-    for (int nanos : new int[] { 100000, 900000, 999100000, 999900000 }) {
-      ts.setNanos(nanos);
-      HiveDecimal d = timestampToDecimal(ts);
-      assertEquals(ts, TimestampUtils.decimalToTimestamp(d));
-      assertEquals(ts, 
TimestampUtils.doubleToTimestamp(d.bigDecimalValue().doubleValue()));
-    }
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testSerializationFormatDirectly() throws IOException {
-    assertEquals("1970-01-01 00:00:00", fromIntAndVInts(0).toString());
-    assertEquals("1970-01-01 00:00:01", fromIntAndVInts(1).toString());
-    assertEquals("1970-01-01 00:05:00", fromIntAndVInts(300).toString());
-    assertEquals("1970-01-01 02:00:00", fromIntAndVInts(7200).toString());
-    assertEquals("2000-01-02 03:04:05", fromIntAndVInts(946782245).toString());
-
-    // This won't have a decimal part because the HAS_DECIMAL_MASK bit is not 
set.
-    assertEquals("2000-01-02 03:04:05", fromIntAndVInts(946782245, 
3210).toString());
-
-    assertEquals("2000-01-02 03:04:05.0123",
-      fromIntAndVInts(946782245 | HAS_DECIMAL_MASK, 3210).toString());
-
-    assertEquals("2038-01-19 03:14:07", 
fromIntAndVInts(Integer.MAX_VALUE).toString());
-    assertEquals("2038-01-19 03:14:07.012345678",
-      fromIntAndVInts(Integer.MAX_VALUE | HAS_DECIMAL_MASK,  // this is really 
just -1
-        876543210).toString());
-
-    // Timestamps with a second VInt storing additional bits of the seconds 
field.
-    long seconds = 253392390415L;
-    assertEquals("9999-09-08 07:06:55",
-      fromIntAndVInts((int) (seconds & 0x7fffffff) | (1 << 31), -1L, seconds 
>> 31).toString());
-    assertEquals("9999-09-08 07:06:55.0123",
-      fromIntAndVInts((int) (seconds & 0x7fffffff) | (1 << 31),
-                      -3210 - 1, seconds >> 31).toString());
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testMaxSize() {
-    // This many bytes are necessary to store the reversed nanoseconds.
-    assertEquals(5, WritableUtils.getVIntSize(999999999));
-    assertEquals(5, WritableUtils.getVIntSize(-2 - 999999999));
-
-    // Bytes necessary to store extra bits of the second timestamp if storing 
a timestamp
-    // before 1970 or after 2038.
-    assertEquals(3, WritableUtils.getVIntSize(Short.MAX_VALUE));
-    assertEquals(3, WritableUtils.getVIntSize(Short.MIN_VALUE));
-
-    // Test that MAX_ADDITIONAL_SECONDS_BITS is really the maximum value of the
-    // additional bits (beyond 31 bits) of the seconds-since-epoch part of 
timestamp.
-    assertTrue((((long) MAX_ADDITIONAL_SECONDS_BITS) << 31) * 1000 < 
Long.MAX_VALUE);
-    assertTrue((((double) MAX_ADDITIONAL_SECONDS_BITS + 1) * (1L << 31)) * 
1000 >
-      Long.MAX_VALUE);
-
-    // This is how many bytes we need to store those additonal bits as a VInt.
-    assertEquals(4, WritableUtils.getVIntSize(MAX_ADDITIONAL_SECONDS_BITS));
-
-    // Therefore, the maximum total size of a serialized timestamp is 4 + 5 + 
4 = 13.
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testMillisToSeconds() {
-    assertEquals(0, TimestampUtils.millisToSeconds(0));
-    assertEquals(-1, TimestampUtils.millisToSeconds(-1));
-    assertEquals(-1, TimestampUtils.millisToSeconds(-999));
-    assertEquals(-1, TimestampUtils.millisToSeconds(-1000));
-    assertEquals(-2, TimestampUtils.millisToSeconds(-1001));
-    assertEquals(-2, TimestampUtils .millisToSeconds(-1999));
-    assertEquals(-2, TimestampUtils .millisToSeconds(-2000));
-    assertEquals(-3, TimestampUtils .millisToSeconds(-2001));
-    assertEquals(-99, TimestampUtils .millisToSeconds(-99000));
-    assertEquals(-100, TimestampUtils .millisToSeconds(-99001));
-    assertEquals(-100, TimestampUtils .millisToSeconds(-100000));
-    assertEquals(1, TimestampUtils .millisToSeconds(1500));
-    assertEquals(19, TimestampUtils .millisToSeconds(19999));
-    assertEquals(20, TimestampUtils .millisToSeconds(20000));
-  }
-
-  private static int compareEqualLengthByteArrays(byte[] a, byte[] b) {
-    assertEquals(a.length, b.length);
-    for (int i = 0; i < a.length; ++i) {
-      if (a[i] != b[i]) {
-        return (a[i] & 0xff) - (b[i] & 0xff);
-      }
-    }
-    return 0;
-  }
-
-  private static int normalizeComparisonResult(int result) {
-    return result < 0 ? -1 : (result > 0 ? 1 : 0);
-  }
-
-  @Test
-  @Concurrent(count=4)
-  @Repeating(repetition=100)
-  public void testBinarySortable() {
-    Random rand = new Random(5972977L);
-    List<TimestampWritable> tswList = new ArrayList<TimestampWritable>();
-    for (int i = 0; i < 50; ++i) {
-      Timestamp ts = new Timestamp(rand.nextLong());
-      ts.setNanos(randomNanos(rand));
-      tswList.add(new TimestampWritable(ts));
-    }
-    for (TimestampWritable tsw1 : tswList) {
-      byte[] bs1 = tsw1.getBinarySortable();
-      for (TimestampWritable tsw2 : tswList) {
-        byte[] bs2 = tsw2.getBinarySortable();
-        int binaryComparisonResult =
-          normalizeComparisonResult(compareEqualLengthByteArrays(bs1, bs2));
-        int comparisonResult = normalizeComparisonResult(tsw1.compareTo(tsw2));
-        if (binaryComparisonResult != comparisonResult) {
-          throw new AssertionError("TimestampWritables " + tsw1 + " and " + 
tsw2 + " compare as " +
-            comparisonResult + " using compareTo but as " + 
binaryComparisonResult + " using " +
-            "getBinarySortable");
-        }
-      }
-    }
-  }
-
-  @Test
-  public void testSetTimestamp() {
-    // one VInt without nanos
-    verifySetTimestamp(1000);
-
-    // one VInt with nanos
-    verifySetTimestamp(1001);
-
-    // two VInt without nanos
-    verifySetTimestamp((long) Integer.MAX_VALUE * 1000 + 1000);
-
-    // two VInt with nanos
-    verifySetTimestamp((long) Integer.MAX_VALUE * 1000 + 1234);
-  }
-
-  private static void verifySetTimestamp(long time) {
-    Timestamp t1 = new Timestamp(time);
-    TimestampWritable writable = new TimestampWritable(t1);
-    byte[] bytes = writable.getBytes();
-    Timestamp t2 = new Timestamp(0);
-    TimestampWritable.setTimestamp(t2, bytes, 0);
-    assertEquals(t1, t2);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritableV2.java
----------------------------------------------------------------------
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritableV2.java 
b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritableV2.java
new file mode 100644
index 0000000..155dc1f
--- /dev/null
+++ 
b/serde/src/test/org/apache/hadoop/hive/serde2/io/TestTimestampWritableV2.java
@@ -0,0 +1,520 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.serde2.io;
+
+import com.google.code.tempusfugit.concurrency.annotations.*;
+import com.google.code.tempusfugit.concurrency.*;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.text.DateFormat;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+import java.util.TimeZone;
+
+import org.apache.hadoop.hive.common.type.TimestampUtils;
+import org.junit.*;
+import static org.junit.Assert.*;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.apache.hadoop.hive.common.type.Timestamp;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableUtils;
+
+public class TestTimestampWritableV2 {
+
+  @Rule public ConcurrentRule concurrentRule = new ConcurrentRule();
+  @Rule public RepeatingRule repeatingRule = new RepeatingRule();
+
+  private static ThreadLocal<DateFormat> DATE_FORMAT =
+      new ThreadLocal<DateFormat>() {
+        @Override
+        protected DateFormat initialValue() {
+          SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd 
HH:mm:ss");
+          formatter.setTimeZone(TimeZone.getTimeZone("UTC"));
+          return formatter;
+        }
+      };
+
+  private static final int HAS_DECIMAL_MASK = 0x80000000;
+
+  private static final long MAX_ADDITIONAL_SECONDS_BITS = 0x418937;
+
+  private static long MIN_FOUR_DIGIT_YEAR_MILLIS = parseToMillis("0001-01-01 
00:00:00");
+  private static long MAX_FOUR_DIGIT_YEAR_MILLIS = parseToMillis("9999-01-01 
00:00:00");
+
+  private static int BILLION = 1000 * 1000 * 1000;
+
+  private static long getSeconds(Timestamp ts) {
+    // To compute seconds, we first subtract the milliseconds stored in the 
nanos field of the
+    // Timestamp from the result of getTime().
+    long seconds = (ts.toEpochMilli() - ts.getNanos() / 1000000) / 1000;
+
+    // It should also be possible to calculate this based on ts.getTime() only.
+    assertEquals(seconds, TimestampUtils.millisToSeconds(ts.toEpochMilli()));
+
+    return seconds;
+  }
+
+  private static long parseToMillis(String s) {
+    try {
+      return DATE_FORMAT.get().parse(s).getTime();
+    } catch (ParseException ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+
+  @Before
+  public void setUp() {
+    TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
+  }
+
+  private static String normalizeTimestampStr(String timestampStr) {
+    if (timestampStr.endsWith(".0")) {
+      return timestampStr.substring(0, timestampStr.length() - 2);
+    }
+    return timestampStr;
+  }
+
+  private static void assertTSWEquals(TimestampWritableV2 expected, 
TimestampWritableV2 actual) {
+    assertEquals(normalizeTimestampStr(expected.toString()),
+                 normalizeTimestampStr(actual.toString()));
+    assertEquals(expected, actual);
+    assertEquals(expected.getTimestamp(), actual.getTimestamp());
+  }
+
+  private static TimestampWritableV2 deserializeFromBytes(byte[] tsBytes) 
throws IOException {
+    ByteArrayInputStream bais = new ByteArrayInputStream(tsBytes);
+    DataInputStream dis = new DataInputStream(bais);
+    TimestampWritableV2 deserTSW = new TimestampWritableV2();
+    deserTSW.readFields(dis);
+    return deserTSW;
+  }
+
+  private static int reverseNanos(int nanos) {
+    if (nanos == 0) {
+      return 0;
+    }
+    if (nanos < 0 || nanos >= 1000 * 1000 * 1000) {
+      throw new IllegalArgumentException("Invalid nanosecond value: " + nanos);
+    }
+
+    int x = nanos;
+    StringBuilder reversed = new StringBuilder();
+    while (x != 0) {
+      reversed.append((char)('0' + x % 10));
+      x /= 10;
+    }
+
+    int result = Integer.parseInt(reversed.toString());
+    while (nanos < 100 * 1000 * 1000) {
+      result *= 10;
+      nanos *= 10;
+    }
+    return result;
+  }
+
+  private static byte[] serializeToBytes(Writable w) throws IOException {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    DataOutputStream dos = new DataOutputStream(baos);
+    w.write(dos);
+    return baos.toByteArray();
+  }
+
+  private static List<Byte> toList(byte[] a) {
+    List<Byte> list = new ArrayList<Byte>(a.length);
+    for (byte b : a) {
+      list.add(b);
+    }
+    return list;
+  }
+
+  /**
+   * Pad the given byte array with the given number of bytes in the beginning. 
The padding bytes
+   * deterministically depend on the passed data.
+   */
+  private static byte[] padBytes(byte[] bytes, int count) {
+    byte[] result = new byte[bytes.length + count];
+    for (int i = 0; i < count; ++i) {
+      // Fill the prefix bytes with deterministic data based on the actual 
meaningful data.
+      result[i] = (byte) (bytes[i % bytes.length] * 37 + 19);
+    }
+    System.arraycopy(bytes, 0, result, count, bytes.length);
+    return result;
+  }
+
+  private static TimestampWritableV2 
serializeDeserializeAndCheckTimestamp(Timestamp ts)
+      throws IOException {
+    TimestampWritableV2 tsw = new TimestampWritableV2(ts);
+    assertEquals(ts, tsw.getTimestamp());
+
+    byte[] tsBytes = serializeToBytes(tsw);
+    TimestampWritableV2 deserTSW = deserializeFromBytes(tsBytes);
+    assertTSWEquals(tsw, deserTSW);
+    assertEquals(ts, deserTSW.getTimestamp());
+    assertEquals(tsBytes.length, tsw.getTotalLength());
+
+    // Also convert to/from binary-sortable representation.
+    int binarySortableOffset = Math.abs(tsw.hashCode()) % 10;
+    byte[] binarySortableBytes = padBytes(tsw.getBinarySortable(), 
binarySortableOffset);
+    TimestampWritableV2 fromBinSort = new TimestampWritableV2();
+    fromBinSort.setBinarySortable(binarySortableBytes, binarySortableOffset);
+    assertTSWEquals(tsw, fromBinSort);
+
+    long timeSeconds = ts.toEpochSecond();
+    if (0 <= timeSeconds && timeSeconds <= Integer.MAX_VALUE) {
+      assertEquals(Timestamp.ofEpochSecond(timeSeconds),
+        fromIntAndVInts((int) timeSeconds, 0).getTimestamp());
+
+      int nanos = reverseNanos(ts.getNanos());
+      assertEquals(ts,
+        fromIntAndVInts((int) timeSeconds | (nanos != 0 ? HAS_DECIMAL_MASK : 
0),
+          nanos).getTimestamp());
+    }
+
+    assertEquals(ts.getNanos(), tsw.getNanos());
+    assertEquals(getSeconds(ts), tsw.getSeconds());
+
+    // Test various set methods and copy constructors.
+    {
+      TimestampWritableV2 tsSet1 = new TimestampWritableV2();
+      // make the offset non-zero to keep things interesting.
+      int offset = Math.abs(ts.hashCode() % 32);
+      byte[] shiftedBytes = padBytes(tsBytes, offset);
+      tsSet1.set(shiftedBytes, offset);
+      assertTSWEquals(tsw, tsSet1);
+
+      TimestampWritableV2 tswShiftedBytes = new 
TimestampWritableV2(shiftedBytes, offset);
+      assertTSWEquals(tsw, tswShiftedBytes);
+      assertTSWEquals(tsw, 
deserializeFromBytes(serializeToBytes(tswShiftedBytes)));
+    }
+
+    {
+      TimestampWritableV2 tsSet2 = new TimestampWritableV2();
+      tsSet2.set(ts);
+      assertTSWEquals(tsw, tsSet2);
+    }
+
+    {
+      TimestampWritableV2 tsSet3 = new TimestampWritableV2();
+      tsSet3.set(tsw);
+      assertTSWEquals(tsw, tsSet3);
+    }
+
+    {
+      TimestampWritableV2 tsSet4 = new TimestampWritableV2();
+      tsSet4.set(deserTSW);
+      assertTSWEquals(tsw, tsSet4);
+    }
+
+    double expectedDbl = getSeconds(ts) + 1e-9d * ts.getNanos();
+    assertTrue(Math.abs(tsw.getDouble() - expectedDbl) < 1e-10d);
+
+    return deserTSW;
+  }
+
+  private static int randomNanos(Random rand, int decimalDigits) {
+    // Only keep the most significant decimalDigits digits.
+    int nanos = rand.nextInt(BILLION);
+    return nanos - nanos % (int) Math.pow(10, 9 - decimalDigits);
+  }
+
+  private static int randomNanos(Random rand) {
+    return randomNanos(rand, rand.nextInt(10));
+  }
+
+  private static void checkTimestampWithAndWithoutNanos(Timestamp ts, int 
nanos)
+      throws IOException {
+    serializeDeserializeAndCheckTimestamp(ts);
+
+    ts.setNanos(nanos);
+    assertEquals(serializeDeserializeAndCheckTimestamp(ts).getNanos(), nanos);
+  }
+
+  private static TimestampWritableV2 fromIntAndVInts(int i, long... vints) 
throws IOException {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    DataOutputStream dos = new DataOutputStream(baos);
+    dos.writeInt(i);
+    if ((i & HAS_DECIMAL_MASK) != 0) {
+      for (long vi : vints) {
+        WritableUtils.writeVLong(dos, vi);
+      }
+    }
+    byte[] bytes = baos.toByteArray();
+    TimestampWritableV2 tsw = deserializeFromBytes(bytes);
+    assertEquals(toList(bytes), toList(serializeToBytes(tsw)));
+    return tsw;
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testReverseNanos() {
+    assertEquals(0, reverseNanos(0));
+    assertEquals(120000000, reverseNanos(21));
+    assertEquals(32100000, reverseNanos(1230));
+    assertEquals(5, reverseNanos(500000000));
+    assertEquals(987654321, reverseNanos(123456789));
+    assertEquals(12345678, reverseNanos(876543210));
+  }
+
+  /**
+   * Test serializing and deserializing timestamps that can be represented by 
a number of seconds
+   * from 0 to 2147483647 since the UNIX epoch.
+   */
+  @Test
+  @Concurrent(count=4)
+  public void testTimestampsWithinPositiveIntRange() throws IOException {
+    Random rand = new Random(294722773L);
+    for (int i = 0; i < 10000; ++i) {
+      long millis = ((long) rand.nextInt(Integer.MAX_VALUE)) * 1000;
+      checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(millis), 
randomNanos(rand));
+    }
+  }
+
+  private static long randomMillis(long minMillis, long maxMillis, Random 
rand) {
+    return minMillis + (long) ((maxMillis - minMillis) * rand.nextDouble());
+  }
+
+  /**
+   * Test timestamps that don't necessarily fit between 1970 and 2038. This 
depends on HIVE-4525
+   * being fixed.
+   */
+  @Test
+  @Concurrent(count=4)
+  public void testTimestampsOutsidePositiveIntRange() throws IOException {
+    Random rand = new Random(789149717L);
+    for (int i = 0; i < 10000; ++i) {
+      long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, 
MAX_FOUR_DIGIT_YEAR_MILLIS, rand);
+      checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(millis), 
randomNanos(rand));
+    }
+  }
+
+  @Test
+  @Concurrent(count=4)
+  public void testTimestampsInFullRange() throws IOException {
+    Random rand = new Random(2904974913L);
+    for (int i = 0; i < 10000; ++i) {
+      
checkTimestampWithAndWithoutNanos(Timestamp.ofEpochMilli(rand.nextLong()), 
randomNanos(rand));
+    }
+  }
+
+  @Test
+  @Concurrent(count=4)
+  public void testToFromDouble() {
+    Random rand = new Random(294729777L);
+    for (int nanosPrecision = 0; nanosPrecision <= 4; ++nanosPrecision) {
+      for (int i = 0; i < 10000; ++i) {
+        long millis = randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, 
MAX_FOUR_DIGIT_YEAR_MILLIS, rand);
+        int nanos = randomNanos(rand, nanosPrecision);
+        Timestamp ts = Timestamp.ofEpochMilli(millis, nanos);
+        TimestampWritableV2 tsw = new TimestampWritableV2(ts);
+        double asDouble = tsw.getDouble();
+        int recoveredNanos =
+          (int) (Math.round((asDouble - Math.floor(asDouble)) * Math.pow(10, 
nanosPrecision)) *
+            Math.pow(10, 9 - nanosPrecision));
+        assertEquals(String.format("Invalid nanosecond part recovered from 
%f", asDouble),
+          nanos, recoveredNanos);
+        assertEquals(ts, TimestampUtils.doubleToTimestamp(asDouble));
+        // decimalToTimestamp should be consistent with doubleToTimestamp for 
this level of
+        // precision.
+        assertEquals(ts, TimestampUtils.decimalToTimestamp(
+            HiveDecimal.create(BigDecimal.valueOf(asDouble))));
+      }
+    }
+  }
+
+  private static HiveDecimal timestampToDecimal(Timestamp ts) {
+    BigDecimal d = new BigDecimal(getSeconds(ts));
+    d = d.add(new BigDecimal(ts.getNanos()).divide(new BigDecimal(BILLION)));
+    return HiveDecimal.create(d);
+  }
+
+  @Test
+  @Concurrent(count=4)
+  public void testDecimalToTimestampRandomly() {
+    Random rand = new Random(294729777L);
+    for (int i = 0; i < 10000; ++i) {
+      Timestamp ts = Timestamp.ofEpochMilli(
+          randomMillis(MIN_FOUR_DIGIT_YEAR_MILLIS, MAX_FOUR_DIGIT_YEAR_MILLIS, 
rand));
+      ts.setNanos(randomNanos(rand, 9));  // full precision
+      assertEquals(ts, 
TimestampUtils.decimalToTimestamp(timestampToDecimal(ts)));
+    }
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testDecimalToTimestampCornerCases() {
+    Timestamp ts = Timestamp.ofEpochMilli(parseToMillis("1969-03-04 
05:44:33"));
+    assertEquals(0, ts.toEpochMilli() % 1000);
+    for (int nanos : new int[] { 100000, 900000, 999100000, 999900000 }) {
+      ts.setNanos(nanos);
+      HiveDecimal d = timestampToDecimal(ts);
+      assertEquals(ts, TimestampUtils.decimalToTimestamp(d));
+      assertEquals(ts, 
TimestampUtils.doubleToTimestamp(d.bigDecimalValue().doubleValue()));
+    }
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testSerializationFormatDirectly() throws IOException {
+    assertEquals("1970-01-01 00:00:00", fromIntAndVInts(0).toString());
+    assertEquals("1970-01-01 00:00:01", fromIntAndVInts(1).toString());
+    assertEquals("1970-01-01 00:05:00", fromIntAndVInts(300).toString());
+    assertEquals("1970-01-01 02:00:00", fromIntAndVInts(7200).toString());
+    assertEquals("2000-01-02 03:04:05", fromIntAndVInts(946782245).toString());
+
+    // This won't have a decimal part because the HAS_DECIMAL_MASK bit is not 
set.
+    assertEquals("2000-01-02 03:04:05", fromIntAndVInts(946782245, 
3210).toString());
+
+    assertEquals("2000-01-02 03:04:05.0123",
+      fromIntAndVInts(946782245 | HAS_DECIMAL_MASK, 3210).toString());
+
+    assertEquals("2038-01-19 03:14:07", 
fromIntAndVInts(Integer.MAX_VALUE).toString());
+    assertEquals("2038-01-19 03:14:07.012345678",
+      fromIntAndVInts(Integer.MAX_VALUE | HAS_DECIMAL_MASK,  // this is really 
just -1
+        876543210).toString());
+
+    // Timestamps with a second VInt storing additional bits of the seconds 
field.
+    long seconds = 253392390415L;
+    assertEquals("9999-09-08 07:06:55",
+      fromIntAndVInts((int) (seconds & 0x7fffffff) | (1 << 31), -1L, seconds 
>> 31).toString());
+    assertEquals("9999-09-08 07:06:55.0123",
+      fromIntAndVInts((int) (seconds & 0x7fffffff) | (1 << 31),
+                      -3210 - 1, seconds >> 31).toString());
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testMaxSize() {
+    // This many bytes are necessary to store the reversed nanoseconds.
+    assertEquals(5, WritableUtils.getVIntSize(999999999));
+    assertEquals(5, WritableUtils.getVIntSize(-2 - 999999999));
+
+    // Bytes necessary to store extra bits of the second timestamp if storing 
a timestamp
+    // before 1970 or after 2038.
+    assertEquals(3, WritableUtils.getVIntSize(Short.MAX_VALUE));
+    assertEquals(3, WritableUtils.getVIntSize(Short.MIN_VALUE));
+
+    // Test that MAX_ADDITIONAL_SECONDS_BITS is really the maximum value of the
+    // additional bits (beyond 31 bits) of the seconds-since-epoch part of 
timestamp.
+    assertTrue((((long) MAX_ADDITIONAL_SECONDS_BITS) << 31) * 1000 < 
Long.MAX_VALUE);
+    assertTrue((((double) MAX_ADDITIONAL_SECONDS_BITS + 1) * (1L << 31)) * 
1000 >
+      Long.MAX_VALUE);
+
+    // This is how many bytes we need to store those additonal bits as a VInt.
+    assertEquals(4, WritableUtils.getVIntSize(MAX_ADDITIONAL_SECONDS_BITS));
+
+    // Therefore, the maximum total size of a serialized timestamp is 4 + 5 + 
4 = 13.
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testMillisToSeconds() {
+    assertEquals(0, TimestampUtils.millisToSeconds(0));
+    assertEquals(-1, TimestampUtils.millisToSeconds(-1));
+    assertEquals(-1, TimestampUtils.millisToSeconds(-999));
+    assertEquals(-1, TimestampUtils.millisToSeconds(-1000));
+    assertEquals(-2, TimestampUtils.millisToSeconds(-1001));
+    assertEquals(-2, TimestampUtils .millisToSeconds(-1999));
+    assertEquals(-2, TimestampUtils .millisToSeconds(-2000));
+    assertEquals(-3, TimestampUtils .millisToSeconds(-2001));
+    assertEquals(-99, TimestampUtils .millisToSeconds(-99000));
+    assertEquals(-100, TimestampUtils .millisToSeconds(-99001));
+    assertEquals(-100, TimestampUtils .millisToSeconds(-100000));
+    assertEquals(1, TimestampUtils .millisToSeconds(1500));
+    assertEquals(19, TimestampUtils .millisToSeconds(19999));
+    assertEquals(20, TimestampUtils.millisToSeconds(20000));
+  }
+
+  private static int compareEqualLengthByteArrays(byte[] a, byte[] b) {
+    assertEquals(a.length, b.length);
+    for (int i = 0; i < a.length; ++i) {
+      if (a[i] != b[i]) {
+        return (a[i] & 0xff) - (b[i] & 0xff);
+      }
+    }
+    return 0;
+  }
+
+  private static int normalizeComparisonResult(int result) {
+    return result < 0 ? -1 : (result > 0 ? 1 : 0);
+  }
+
+  @Test
+  @Concurrent(count=4)
+  @Repeating(repetition=100)
+  public void testBinarySortable() {
+    Random rand = new Random(5972977L);
+    List<TimestampWritableV2> tswList = new ArrayList<TimestampWritableV2>();
+    for (int i = 0; i < 50; ++i) {
+      Timestamp ts = Timestamp.ofEpochMilli(rand.nextLong(), 
randomNanos(rand));
+      tswList.add(new TimestampWritableV2(ts));
+    }
+    for (TimestampWritableV2 tsw1 : tswList) {
+      byte[] bs1 = tsw1.getBinarySortable();
+      for (TimestampWritableV2 tsw2 : tswList) {
+        byte[] bs2 = tsw2.getBinarySortable();
+        int binaryComparisonResult =
+          normalizeComparisonResult(compareEqualLengthByteArrays(bs1, bs2));
+        int comparisonResult = normalizeComparisonResult(tsw1.compareTo(tsw2));
+        if (binaryComparisonResult != comparisonResult) {
+          throw new AssertionError("TimestampWritables " + tsw1 + " and " + 
tsw2 + " compare as " +
+            comparisonResult + " using compareTo but as " + 
binaryComparisonResult + " using " +
+            "getBinarySortable");
+        }
+      }
+    }
+  }
+
+  @Test
+  public void testSetTimestamp() {
+    // one VInt without nanos
+    verifySetTimestamp(1000);
+
+    // one VInt with nanos
+    verifySetTimestamp(1001);
+
+    // two VInt without nanos
+    verifySetTimestamp((long) Integer.MAX_VALUE * 1000 + 1000);
+
+    // two VInt with nanos
+    verifySetTimestamp((long) Integer.MAX_VALUE * 1000 + 1234);
+  }
+
+  private static void verifySetTimestamp(long time) {
+    Timestamp t1 = Timestamp.ofEpochMilli(time);
+    TimestampWritableV2 writable = new TimestampWritableV2(t1);
+    byte[] bytes = writable.getBytes();
+    Timestamp t2 = new Timestamp();
+    TimestampWritableV2.setTimestamp(t2, bytes, 0);
+    assertEquals(t1, t2);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
----------------------------------------------------------------------
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java 
b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
index fb5dec1..79bf5fb 100644
--- a/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
+++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazy/TestLazyPrimitive.java
@@ -17,11 +17,10 @@
  */
 package org.apache.hadoop.hive.serde2.lazy;
 
-import java.sql.Date;
-import java.sql.Timestamp;
-
 import junit.framework.TestCase;
 
+import org.apache.hadoop.hive.common.type.Date;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.ByteStream;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
----------------------------------------------------------------------
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
 
b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
index 6dcc6f8..398dc5c 100644
--- 
a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
+++ 
b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassBigger.java
@@ -17,24 +17,23 @@
  */
 package org.apache.hadoop.hive.serde2.lazybinary;
 
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Random;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass;
-import org.apache.hadoop.hive.serde2.binarysortable.TestBinarySortableSerDe;
 import 
org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;
 
 /**

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
----------------------------------------------------------------------
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
 
b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
index 732bd42..49df56c 100644
--- 
a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
+++ 
b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/MyTestClassSmaller.java
@@ -17,17 +17,16 @@
  */
 package org.apache.hadoop.hive.serde2.lazybinary;
 
-import java.sql.Date;
-import java.sql.Timestamp;
 import java.util.Random;
 
+import org.apache.hadoop.hive.common.type.Date;
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
 import org.apache.hadoop.hive.common.type.HiveIntervalDayTime;
 import org.apache.hadoop.hive.common.type.HiveIntervalYearMonth;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
-import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass;
+import org.apache.hadoop.hive.serde2.RandomTypeUtil;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct;
 import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass;
 import 
org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo;

http://git-wip-us.apache.org/repos/asf/hive/blob/ae008b79/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java
----------------------------------------------------------------------
diff --git 
a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java
 
b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java
index 17b844c..2c488b0 100644
--- 
a/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java
+++ 
b/serde/src/test/org/apache/hadoop/hive/serde2/objectinspector/TestStandardObjectInspectors.java
@@ -17,7 +17,6 @@
  */
 package org.apache.hadoop.hive.serde2.objectinspector;
 
-import java.sql.Timestamp;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
@@ -26,13 +25,14 @@ import junit.framework.TestCase;
 
 import org.apache.hadoop.hive.common.type.HiveChar;
 import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.common.type.Timestamp;
 import org.apache.hadoop.hive.serde2.SerDeUtils;
 import org.apache.hadoop.hive.serde2.io.ByteWritable;
 import org.apache.hadoop.hive.serde2.io.DoubleWritable;
 import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
 import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
 import org.apache.hadoop.hive.serde2.io.ShortWritable;
-import org.apache.hadoop.hive.serde2.io.TimestampWritable;
+import org.apache.hadoop.hive.serde2.io.TimestampWritableV2;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
 import 
org.apache.hadoop.hive.serde2.objectinspector.StandardUnionObjectInspector.StandardUnion;
 import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
@@ -95,7 +95,7 @@ public class TestStandardObjectInspectors extends TestCase {
       doTestStandardPrimitiveObjectInspector(DoubleWritable.class, 
Double.class);
       doTestStandardPrimitiveObjectInspector(Text.class, String.class);
       doTestStandardPrimitiveObjectInspector(BytesWritable.class, 
byte[].class);
-      doTestStandardPrimitiveObjectInspector(TimestampWritable.class, 
Timestamp.class);
+      doTestStandardPrimitiveObjectInspector(TimestampWritableV2.class, 
Timestamp.class);
     } catch (Throwable e) {
       e.printStackTrace();
       throw e;

Reply via email to