Repository: hive
Updated Branches:
  refs/heads/master 6c4adc9fc -> cefefa24d


HIVE-19226: Extend storage-api to print timestamp values in UTC (Jesus Camacho 
Rodriguez, reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/cefefa24
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/cefefa24
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/cefefa24

Branch: refs/heads/master
Commit: cefefa24dc9e8a1f65770b01b5184069ebe1c6a9
Parents: 6c4adc9
Author: Jesus Camacho Rodriguez <jcama...@apache.org>
Authored: Fri Apr 20 11:00:05 2018 +0200
Committer: Jesus Camacho Rodriguez <jcama...@apache.org>
Committed: Fri Apr 20 11:00:05 2018 +0200

----------------------------------------------------------------------
 storage-api/pom.xml                             |  4 +--
 .../ql/exec/vector/TimestampColumnVector.java   | 23 ++++++++++++-
 .../hadoop/hive/ql/util/TimestampUtils.java     |  2 +-
 .../hive/common/type/TestHiveDecimal.java       |  1 -
 .../ql/exec/vector/TestStructColumnVector.java  | 34 ++++++++++++++++++++
 5 files changed, 59 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/cefefa24/storage-api/pom.xml
----------------------------------------------------------------------
diff --git a/storage-api/pom.xml b/storage-api/pom.xml
index 80fa22c..d768f3f 100644
--- a/storage-api/pom.xml
+++ b/storage-api/pom.xml
@@ -158,8 +158,8 @@
         <artifactId>maven-compiler-plugin</artifactId>
         <version>3.1</version>
         <configuration>
-          <source>1.7</source>
-          <target>1.7</target>
+          <source>1.8</source>
+          <target>1.8</target>
         </configuration>
       </plugin>
       <plugin>

http://git-wip-us.apache.org/repos/asf/hive/blob/cefefa24/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
----------------------------------------------------------------------
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
 
b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
index a6f5369..1744ecb 100644
--- 
a/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
+++ 
b/storage-api/src/java/org/apache/hadoop/hive/ql/exec/vector/TimestampColumnVector.java
@@ -18,6 +18,9 @@
 package org.apache.hadoop.hive.ql.exec.vector;
 
 import java.sql.Timestamp;
+import java.time.Instant;
+import java.time.LocalDateTime;
+import java.time.ZoneOffset;
 import java.util.Arrays;
 
 import org.apache.hadoop.io.Writable;
@@ -53,6 +56,8 @@ public class TimestampColumnVector extends ColumnVector {
   private Writable scratchWritable;
       // Supports keeping a TimestampWritable object without having to import 
that definition...
 
+  private boolean isUTC;
+
   /**
    * Use this constructor by default. All column vectors
    * should normally be the default size.
@@ -75,6 +80,8 @@ public class TimestampColumnVector extends ColumnVector {
     scratchTimestamp = new Timestamp(0);
 
     scratchWritable = null;     // Allocated by caller.
+
+    isUTC = false;
   }
 
   /**
@@ -477,6 +484,14 @@ public class TimestampColumnVector extends ColumnVector {
     this.scratchWritable = scratchWritable;
   }
 
+  /**
+   * Set the utc boolean variable
+   * @param value
+   */
+  public void setIsUTC(boolean value) {
+    this.isUTC = value;
+  }
+
   @Override
   public void stringifyValue(StringBuilder buffer, int row) {
     if (isRepeating) {
@@ -485,7 +500,13 @@ public class TimestampColumnVector extends ColumnVector {
     if (noNulls || !isNull[row]) {
       scratchTimestamp.setTime(time[row]);
       scratchTimestamp.setNanos(nanos[row]);
-      buffer.append(scratchTimestamp.toString());
+      if (isUTC) {
+        LocalDateTime ts =
+            LocalDateTime.ofInstant(Instant.ofEpochMilli(time[row]), 
ZoneOffset.UTC).withNano(nanos[row]);
+        buffer.append(ts.toLocalDate().toString() + ' ' + 
ts.toLocalTime().toString());
+      } else {
+        buffer.append(scratchTimestamp.toString());
+      }
     } else {
       buffer.append("null");
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/cefefa24/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java
----------------------------------------------------------------------
diff --git 
a/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java 
b/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java
index a087a4d..367b932 100644
--- a/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java
+++ b/storage-api/src/java/org/apache/hadoop/hive/ql/util/TimestampUtils.java
@@ -26,7 +26,7 @@ import java.math.BigDecimal;
 import java.sql.Timestamp;
 
 /**
- * Utitilities for Timestamps and the relevant conversions.
+ * Utilities for Timestamps and the relevant conversions.
  */
 public class TimestampUtils {
   public static final BigDecimal BILLION_BIG_DECIMAL = 
BigDecimal.valueOf(1000000000);

http://git-wip-us.apache.org/repos/asf/hive/blob/cefefa24/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
----------------------------------------------------------------------
diff --git 
a/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java 
b/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
index a22a10b..0c0e785 100644
--- 
a/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
+++ 
b/storage-api/src/test/org/apache/hadoop/hive/common/type/TestHiveDecimal.java
@@ -30,7 +30,6 @@ import java.math.BigInteger;
 
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritableV1;
 import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
-import org.apache.hadoop.hive.common.type.RandomTypeUtil;
 import org.apache.hadoop.hive.ql.exec.vector.expressions.StringExpr;
 import org.apache.hadoop.hive.ql.util.TimestampUtils;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/cefefa24/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
----------------------------------------------------------------------
diff --git 
a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
 
b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
index 6ffd6d1..7bc03ed 100644
--- 
a/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
+++ 
b/storage-api/src/test/org/apache/hadoop/hive/ql/exec/vector/TestStructColumnVector.java
@@ -132,4 +132,38 @@ public class TestStructColumnVector {
         "[[27, 2000-01-01 00:00:10.0], \"value 9\"]");
     assertEquals(EXPECTED, batch.toString());
   }
+
+  @Test
+  public void testStringify2() throws IOException {
+    VectorizedRowBatch batch = new VectorizedRowBatch(2);
+    LongColumnVector x1 = new LongColumnVector();
+    TimestampColumnVector x2 = new TimestampColumnVector();
+    x2.setIsUTC(true);
+    StructColumnVector x = new StructColumnVector(1024, x1, x2);
+    BytesColumnVector y = new BytesColumnVector();
+    batch.cols[0] = x;
+    batch.cols[1] = y;
+    batch.reset();
+    Timestamp ts = new Timestamp(946684800000L);
+    for(int r=0; r < 10; ++r) {
+      batch.size += 1;
+      x1.vector[r] = 3 * r;
+      ts.setTime(ts.getTime() + 1000);
+      x2.set(r, ts);
+      byte[] buffer = ("value " + r).getBytes(StandardCharsets.UTF_8);
+      y.setRef(r, buffer, 0, buffer.length);
+    }
+    final String EXPECTED = ("Column vector types: 0:STRUCT, 1:BYTES\n" +
+        "[[0, 2000-01-01 00:00:01], \"value 0\"]\n" +
+        "[[3, 2000-01-01 00:00:02], \"value 1\"]\n" +
+        "[[6, 2000-01-01 00:00:03], \"value 2\"]\n" +
+        "[[9, 2000-01-01 00:00:04], \"value 3\"]\n" +
+        "[[12, 2000-01-01 00:00:05], \"value 4\"]\n" +
+        "[[15, 2000-01-01 00:00:06], \"value 5\"]\n" +
+        "[[18, 2000-01-01 00:00:07], \"value 6\"]\n" +
+        "[[21, 2000-01-01 00:00:08], \"value 7\"]\n" +
+        "[[24, 2000-01-01 00:00:09], \"value 8\"]\n" +
+        "[[27, 2000-01-01 00:00:10], \"value 9\"]");
+    assertEquals(EXPECTED, batch.stringify(""));
+  }
 }

Reply via email to