HBASE-18754 Get rid of Writable from TimeRangeTracker

Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/93bac3de
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/93bac3de
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/93bac3de

Branch: refs/heads/HBASE-18410
Commit: 93bac3de0a5ccd3977fb9d3760efa19481955d80
Parents: 7cdfbde
Author: Chia-Ping Tsai <chia7...@gmail.com>
Authored: Sun Oct 22 17:17:05 2017 +0800
Committer: Chia-Ping Tsai <chia7...@gmail.com>
Committed: Tue Oct 24 14:54:34 2017 +0800

----------------------------------------------------------------------
 .../hbase/mapreduce/TestHFileOutputFormat2.java |  4 +-
 .../src/main/protobuf/HBase.proto               |  5 ++
 .../hbase/io/hfile/HFilePrettyPrinter.java      |  2 +-
 .../hadoop/hbase/regionserver/HStoreFile.java   |  3 +-
 .../hbase/regionserver/StoreFileWriter.java     |  5 +-
 .../hbase/regionserver/TimeRangeTracker.java    | 58 ++++++++++----------
 .../regionserver/compactions/Compactor.java     |  3 +-
 .../TestSimpleTimeRangeTracker.java             | 22 ++++++--
 8 files changed, 59 insertions(+), 43 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index f2a3527..372737a 100644
--- 
a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ 
b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -94,7 +94,6 @@ import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
 import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.ReflectionUtils;
-import org.apache.hadoop.hbase.util.Writables;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
 import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
@@ -399,8 +398,7 @@ public class TestHFileOutputFormat2  {
       assertNotNull(range);
 
       // unmarshall and check values.
-      TimeRangeTracker timeRangeTracker = 
TimeRangeTracker.create(TimeRangeTracker.Type.SYNC);
-      Writables.copyWritable(range, timeRangeTracker);
+      TimeRangeTracker timeRangeTracker =TimeRangeTracker.parseFrom(range);
       LOG.info(timeRangeTracker.getMin() +
           "...." + timeRangeTracker.getMax());
       assertEquals(1000, timeRangeTracker.getMin());

http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-protocol-shaded/src/main/protobuf/HBase.proto
----------------------------------------------------------------------
diff --git a/hbase-protocol-shaded/src/main/protobuf/HBase.proto 
b/hbase-protocol-shaded/src/main/protobuf/HBase.proto
index 9de897a..cc1ae8f 100644
--- a/hbase-protocol-shaded/src/main/protobuf/HBase.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/HBase.proto
@@ -118,6 +118,11 @@ message TimeRange {
   optional uint64 to = 2;
 }
 
+message TimeRangeTracker {
+  optional uint64 from = 1;
+  optional uint64 to = 2;
+}
+
 /* ColumnFamily Specific TimeRange */
 message ColumnFamilyTimeRange {
   required bytes column_family = 1;

http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
index acb25fc..a800ef1 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
@@ -539,7 +539,7 @@ public class HFilePrettyPrinter extends Configured 
implements Tool {
           || Bytes.equals(e.getKey(), HStoreFile.BULKLOAD_TIME_KEY)) {
         out.println(Bytes.toLong(e.getValue()));
       } else if (Bytes.equals(e.getKey(), HStoreFile.TIMERANGE_KEY)) {
-        TimeRangeTracker timeRangeTracker = 
TimeRangeTracker.getTimeRangeTracker(e.getValue());
+        TimeRangeTracker timeRangeTracker = 
TimeRangeTracker.parseFrom(e.getValue());
         out.println(timeRangeTracker.getMin() + "...." + 
timeRangeTracker.getMax());
       } else if (Bytes.equals(e.getKey(), FileInfo.AVG_KEY_LEN)
           || Bytes.equals(e.getKey(), FileInfo.AVG_VALUE_LEN)

http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
index 0ca01a5..b405c86 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
@@ -439,7 +439,8 @@ public class HStoreFile implements StoreFile {
     reader.loadBloomfilter(BlockType.DELETE_FAMILY_BLOOM_META);
 
     try {
-      this.reader.timeRange = 
TimeRangeTracker.getTimeRange(metadataMap.get(TIMERANGE_KEY));
+      byte[] data = metadataMap.get(TIMERANGE_KEY);
+      this.reader.timeRange = data == null ? null : 
TimeRangeTracker.parseFrom(data).toTimeRange();
     } catch (IllegalArgumentException e) {
       LOG.error("Error reading timestamp range data from meta -- " +
           "proceeding without", e);

http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
index 29bd3af..5fc96ef 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
@@ -52,7 +52,6 @@ import org.apache.hadoop.hbase.util.Bytes;
 import org.apache.hadoop.hbase.util.FSUtils;
 import org.apache.hadoop.hbase.util.RowBloomContext;
 import org.apache.hadoop.hbase.util.RowColBloomContext;
-import org.apache.hadoop.io.WritableUtils;
 import org.apache.yetus.audience.InterfaceAudience;
 
 import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@@ -184,7 +183,9 @@ public class StoreFileWriter implements CellSink, 
ShipperListener {
    * Add TimestampRange and earliest put timestamp to Metadata
    */
   public void appendTrackedTimestampsToMetadata() throws IOException {
-    appendFileInfo(TIMERANGE_KEY, WritableUtils.toByteArray(timeRangeTracker));
+    // TODO: The StoreFileReader always converts the byte[] to TimeRange
+    // via TimeRangeTracker, so we should write the serialization data of 
TimeRange directly.
+    appendFileInfo(TIMERANGE_KEY, 
TimeRangeTracker.toByteArray(timeRangeTracker));
     appendFileInfo(EARLIEST_PUT_TS, Bytes.toBytes(earliestPutTs));
   }
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
index cefbd9a..08d9853 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
@@ -18,19 +18,20 @@
  */
 package org.apache.hadoop.hbase.regionserver;
 
-import java.io.DataInput;
-import java.io.DataOutput;
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
 import java.io.IOException;
 import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.hadoop.hbase.Cell;
 import org.apache.hadoop.hbase.CellUtil;
 import org.apache.hadoop.hbase.io.TimeRange;
-import org.apache.hadoop.hbase.util.Writables;
-import org.apache.hadoop.io.Writable;
 import org.apache.yetus.audience.InterfaceAudience;
 
 import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
 
 /**
  * Stores minimum and maximum timestamp values, it is [minimumTimestamp, 
maximumTimestamp] in
@@ -44,7 +45,7 @@ import 
org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
  * at read time via an instance of {@link TimeRange} to test if Cells fit the 
StoreFile TimeRange.
  */
 @InterfaceAudience.Private
-public abstract class TimeRangeTracker implements Writable {
+public abstract class TimeRangeTracker {
 
   public enum Type {
     // thread-unsafe
@@ -175,42 +176,41 @@ public abstract class TimeRangeTracker implements 
Writable {
    */
   public abstract long getMax();
 
-  public void write(final DataOutput out) throws IOException {
-    out.writeLong(getMin());
-    out.writeLong(getMax());
-  }
-
-  public void readFields(final DataInput in) throws IOException {
-    setMin(in.readLong());
-    setMax(in.readLong());
-  }
-
   @Override
   public String toString() {
     return "[" + getMin() + "," + getMax() + "]";
   }
 
   /**
+   * @param data the serialization data. It can't be null!
    * @return An instance of NonSyncTimeRangeTracker filled w/ the content of 
serialized
    * NonSyncTimeRangeTracker in <code>timeRangeTrackerBytes</code>.
    * @throws IOException
    */
-  public static TimeRangeTracker getTimeRangeTracker(final byte [] 
timeRangeTrackerBytes)
-  throws IOException {
-    if (timeRangeTrackerBytes == null) return null;
-    TimeRangeTracker trt = TimeRangeTracker.create(Type.NON_SYNC);
-    Writables.copyWritable(timeRangeTrackerBytes, trt);
-    return trt;
+  public static TimeRangeTracker parseFrom(final byte[] data) throws 
IOException {
+    return parseFrom(data, Type.NON_SYNC);
   }
 
-  /**
-   * @return An instance of a TimeRange made from the serialized 
TimeRangeTracker passed in
-   * <code>timeRangeTrackerBytes</code>.
-   * @throws IOException
-   */
-  static TimeRange getTimeRange(final byte [] timeRangeTrackerBytes) throws 
IOException {
-    TimeRangeTracker trt = getTimeRangeTracker(timeRangeTrackerBytes);
-    return trt == null? null: trt.toTimeRange();
+  public static TimeRangeTracker parseFrom(final byte[] data, Type type) 
throws IOException {
+    Preconditions.checkNotNull(data, "input data is null!");
+    if (ProtobufUtil.isPBMagicPrefix(data)) {
+      int pblen = ProtobufUtil.lengthOfPBMagic();
+      HBaseProtos.TimeRangeTracker.Builder builder = 
HBaseProtos.TimeRangeTracker.newBuilder();
+      ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
+      return TimeRangeTracker.create(type, builder.getFrom(), builder.getTo());
+    } else {
+      DataInputStream in = new DataInputStream(new ByteArrayInputStream(data));
+      return TimeRangeTracker.create(type, in.readLong(), in.readLong());
+    }
+  }
+
+  public static byte[] toByteArray(TimeRangeTracker tracker) {
+    return ProtobufUtil.prependPBMagic(
+        HBaseProtos.TimeRangeTracker.newBuilder()
+          .setFrom(tracker.getMin())
+          .setTo(tracker.getMax())
+          .build()
+          .toByteArray());
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
index f9efd98..dc1f41c 100644
--- 
a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
+++ 
b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
@@ -197,8 +197,7 @@ public abstract class Compactor<T extends CellSink> {
         }
       }
       tmp = fileInfo.get(TIMERANGE_KEY);
-      TimeRangeTracker trt = TimeRangeTracker.getTimeRangeTracker(tmp);
-      fd.latestPutTs = trt == null? HConstants.LATEST_TIMESTAMP: trt.getMax();
+      fd.latestPutTs = tmp == null ? HConstants.LATEST_TIMESTAMP: 
TimeRangeTracker.parseFrom(tmp).getMax();
       if (LOG.isDebugEnabled()) {
         LOG.debug("Compacting " + file +
           ", keycount=" + keyCount +

http://git-wip-us.apache.org/repos/asf/hbase/blob/93bac3de/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSimpleTimeRangeTracker.java
----------------------------------------------------------------------
diff --git 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSimpleTimeRangeTracker.java
 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSimpleTimeRangeTracker.java
index 66b936a..b660366 100644
--- 
a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSimpleTimeRangeTracker.java
+++ 
b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSimpleTimeRangeTracker.java
@@ -21,12 +21,14 @@ import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
+import java.io.DataOutputStream;
 import java.io.IOException;
+import java.util.Optional;
 
+import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
 import org.apache.hadoop.hbase.io.TimeRange;
 import org.apache.hadoop.hbase.testclassification.RegionServerTests;
 import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.Writables;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -62,8 +64,8 @@ public class TestSimpleTimeRangeTracker {
   @Test
   public void testTimeRangeTrackerNullIsSameAsTimeRangeNull() throws 
IOException {
     TimeRangeTracker src = getTimeRangeTracker(1, 2);
-    byte [] bytes = Writables.getBytes(src);
-    TimeRange tgt = TimeRangeTracker.getTimeRange(bytes);
+    byte[] bytes = TimeRangeTracker.toByteArray(src);
+    TimeRange tgt = TimeRangeTracker.parseFrom(bytes).toTimeRange();
     assertEquals(src.getMin(), tgt.getMin());
     assertEquals(src.getMax(), tgt.getMax());
   }
@@ -71,13 +73,23 @@ public class TestSimpleTimeRangeTracker {
   @Test
   public void testSerialization() throws IOException {
     TimeRangeTracker src = getTimeRangeTracker(1, 2);
-    TimeRangeTracker tgt = getTimeRangeTracker();
-    Writables.copyWritable(src, tgt);
+    TimeRangeTracker tgt = 
TimeRangeTracker.parseFrom(TimeRangeTracker.toByteArray(src));
     assertEquals(src.getMin(), tgt.getMin());
     assertEquals(src.getMax(), tgt.getMax());
   }
 
   @Test
+  public void testLegacySerialization() throws IOException {
+    ByteArrayOutputStream data = new ByteArrayOutputStream();
+    DataOutputStream output = new DataOutputStream(data);
+    output.writeLong(100);
+    output.writeLong(200);
+    TimeRangeTracker tgt = TimeRangeTracker.parseFrom(data.toByteArray());
+    assertEquals(100, tgt.getMin());
+    assertEquals(200, tgt.getMax());
+  }
+
+  @Test
   public void testAlwaysDecrementingSetsMaximum() {
     TimeRangeTracker trr = getTimeRangeTracker();
     trr.includeTimestamp(3);

Reply via email to