http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
----------------------------------------------------------------------
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
index 9513ccb..e032be7 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/WALProtos.java
@@ -6,7 +6,13 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class WALProtos {
   private WALProtos() {}
   public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
       com.google.protobuf.ExtensionRegistry registry) {
+    registerAllExtensions(
+        (com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
    * Protobuf enum {@code hbase.pb.ScopeType}
@@ -16,15 +22,15 @@ public final class WALProtos {
     /**
      * <code>REPLICATION_SCOPE_LOCAL = 0;</code>
      */
-    REPLICATION_SCOPE_LOCAL(0, 0),
+    REPLICATION_SCOPE_LOCAL(0),
     /**
      * <code>REPLICATION_SCOPE_GLOBAL = 1;</code>
      */
-    REPLICATION_SCOPE_GLOBAL(1, 1),
+    REPLICATION_SCOPE_GLOBAL(1),
     /**
      * <code>REPLICATION_SCOPE_SERIAL = 2;</code>
      */
-    REPLICATION_SCOPE_SERIAL(2, 2),
+    REPLICATION_SCOPE_SERIAL(2),
     ;
 
     /**
@@ -41,9 +47,19 @@ public final class WALProtos {
     public static final int REPLICATION_SCOPE_SERIAL_VALUE = 2;
 
 
-    public final int getNumber() { return value; }
+    public final int getNumber() {
+      return value;
+    }
 
+    /**
+     * @deprecated Use {@link #forNumber(int)} instead.
+     */
+    @java.lang.Deprecated
     public static ScopeType valueOf(int value) {
+      return forNumber(value);
+    }
+
+    public static ScopeType forNumber(int value) {
       switch (value) {
         case 0: return REPLICATION_SCOPE_LOCAL;
         case 1: return REPLICATION_SCOPE_GLOBAL;
@@ -56,17 +72,17 @@ public final class WALProtos {
         internalGetValueMap() {
       return internalValueMap;
     }
-    private static com.google.protobuf.Internal.EnumLiteMap<ScopeType>
-        internalValueMap =
+    private static final com.google.protobuf.Internal.EnumLiteMap<
+        ScopeType> internalValueMap =
           new com.google.protobuf.Internal.EnumLiteMap<ScopeType>() {
             public ScopeType findValueByNumber(int number) {
-              return ScopeType.valueOf(number);
+              return ScopeType.forNumber(number);
             }
           };
 
     public final com.google.protobuf.Descriptors.EnumValueDescriptor
         getValueDescriptor() {
-      return getDescriptor().getValues().get(index);
+      return getDescriptor().getValues().get(ordinal());
     }
     public final com.google.protobuf.Descriptors.EnumDescriptor
         getDescriptorForType() {
@@ -88,21 +104,19 @@ public final class WALProtos {
       return VALUES[desc.getIndex()];
     }
 
-    private final int index;
     private final int value;
 
-    private ScopeType(int index, int value) {
-      this.index = index;
+    private ScopeType(int value) {
       this.value = value;
     }
 
     // @@protoc_insertion_point(enum_scope:hbase.pb.ScopeType)
   }
 
-  public interface WALHeaderOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
+  public interface WALHeaderOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.WALHeader)
+      com.google.protobuf.MessageOrBuilder {
 
-    // optional bool has_compression = 1;
     /**
      * <code>optional bool has_compression = 1;</code>
      */
@@ -112,7 +126,6 @@ public final class WALProtos {
      */
     boolean getHasCompression();
 
-    // optional bytes encryption_key = 2;
     /**
      * <code>optional bytes encryption_key = 2;</code>
      */
@@ -122,7 +135,6 @@ public final class WALProtos {
      */
     com.google.protobuf.ByteString getEncryptionKey();
 
-    // optional bool has_tag_compression = 3;
     /**
      * <code>optional bool has_tag_compression = 3;</code>
      */
@@ -132,7 +144,6 @@ public final class WALProtos {
      */
     boolean getHasTagCompression();
 
-    // optional string writer_cls_name = 4;
     /**
      * <code>optional string writer_cls_name = 4;</code>
      */
@@ -147,7 +158,6 @@ public final class WALProtos {
     com.google.protobuf.ByteString
         getWriterClsNameBytes();
 
-    // optional string cell_codec_cls_name = 5;
     /**
      * <code>optional string cell_codec_cls_name = 5;</code>
      */
@@ -165,36 +175,32 @@ public final class WALProtos {
   /**
    * Protobuf type {@code hbase.pb.WALHeader}
    */
-  public static final class WALHeader extends
-      com.google.protobuf.GeneratedMessage
-      implements WALHeaderOrBuilder {
+  public  static final class WALHeader extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.WALHeader)
+      WALHeaderOrBuilder {
     // Use WALHeader.newBuilder() to construct.
-    private WALHeader(com.google.protobuf.GeneratedMessage.Builder<?> builder) 
{
+    private WALHeader(com.google.protobuf.GeneratedMessageV3.Builder<?> 
builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
     }
-    private WALHeader(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final WALHeader defaultInstance;
-    public static WALHeader getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public WALHeader getDefaultInstanceForType() {
-      return defaultInstance;
+    private WALHeader() {
+      hasCompression_ = false;
+      encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
+      hasTagCompression_ = false;
+      writerClsName_ = "";
+      cellCodecClsName_ = "";
     }
 
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
     @java.lang.Override
     public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
+    getUnknownFields() {
       return this.unknownFields;
     }
     private WALHeader(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
+      this();
       int mutable_bitField0_ = 0;
       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
           com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -229,13 +235,15 @@ public final class WALProtos {
               break;
             }
             case 34: {
+              com.google.protobuf.ByteString bs = input.readBytes();
               bitField0_ |= 0x00000008;
-              writerClsName_ = input.readBytes();
+              writerClsName_ = bs;
               break;
             }
             case 42: {
+              com.google.protobuf.ByteString bs = input.readBytes();
               bitField0_ |= 0x00000010;
-              cellCodecClsName_ = input.readBytes();
+              cellCodecClsName_ = bs;
               break;
             }
           }
@@ -244,7 +252,7 @@ public final class WALProtos {
         throw e.setUnfinishedMessage(this);
       } catch (java.io.IOException e) {
         throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
+            e).setUnfinishedMessage(this);
       } finally {
         this.unknownFields = unknownFields.build();
         makeExtensionsImmutable();
@@ -255,30 +263,14 @@ public final class WALProtos {
       return 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor;
     }
 
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
         internalGetFieldAccessorTable() {
       return 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.class, 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader.Builder.class);
     }
 
-    public static com.google.protobuf.Parser<WALHeader> PARSER =
-        new com.google.protobuf.AbstractParser<WALHeader>() {
-      public WALHeader parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new WALHeader(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<WALHeader> getParserForType() {
-      return PARSER;
-    }
-
     private int bitField0_;
-    // optional bool has_compression = 1;
     public static final int HAS_COMPRESSION_FIELD_NUMBER = 1;
     private boolean hasCompression_;
     /**
@@ -294,7 +286,6 @@ public final class WALProtos {
       return hasCompression_;
     }
 
-    // optional bytes encryption_key = 2;
     public static final int ENCRYPTION_KEY_FIELD_NUMBER = 2;
     private com.google.protobuf.ByteString encryptionKey_;
     /**
@@ -310,7 +301,6 @@ public final class WALProtos {
       return encryptionKey_;
     }
 
-    // optional bool has_tag_compression = 3;
     public static final int HAS_TAG_COMPRESSION_FIELD_NUMBER = 3;
     private boolean hasTagCompression_;
     /**
@@ -326,9 +316,8 @@ public final class WALProtos {
       return hasTagCompression_;
     }
 
-    // optional string writer_cls_name = 4;
     public static final int WRITER_CLS_NAME_FIELD_NUMBER = 4;
-    private java.lang.Object writerClsName_;
+    private volatile java.lang.Object writerClsName_;
     /**
      * <code>optional string writer_cls_name = 4;</code>
      */
@@ -369,9 +358,8 @@ public final class WALProtos {
       }
     }
 
-    // optional string cell_codec_cls_name = 5;
     public static final int CELL_CODEC_CLS_NAME_FIELD_NUMBER = 5;
-    private java.lang.Object cellCodecClsName_;
+    private volatile java.lang.Object cellCodecClsName_;
     /**
      * <code>optional string cell_codec_cls_name = 5;</code>
      */
@@ -412,17 +400,11 @@ public final class WALProtos {
       }
     }
 
-    private void initFields() {
-      hasCompression_ = false;
-      encryptionKey_ = com.google.protobuf.ByteString.EMPTY;
-      hasTagCompression_ = false;
-      writerClsName_ = "";
-      cellCodecClsName_ = "";
-    }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
 
       memoizedIsInitialized = 1;
       return true;
@@ -430,7 +412,6 @@ public final class WALProtos {
 
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
-      getSerializedSize();
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         output.writeBool(1, hasCompression_);
       }
@@ -441,17 +422,16 @@ public final class WALProtos {
         output.writeBool(3, hasTagCompression_);
       }
       if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        output.writeBytes(4, getWriterClsNameBytes());
+        com.google.protobuf.GeneratedMessageV3.writeString(output, 4, 
writerClsName_);
       }
       if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        output.writeBytes(5, getCellCodecClsNameBytes());
+        com.google.protobuf.GeneratedMessageV3.writeString(output, 5, 
cellCodecClsName_);
       }
-      getUnknownFields().writeTo(output);
+      unknownFields.writeTo(output);
     }
 
-    private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
-      int size = memoizedSerializedSize;
+      int size = memoizedSize;
       if (size != -1) return size;
 
       size = 0;
@@ -468,26 +448,18 @@ public final class WALProtos {
           .computeBoolSize(3, hasTagCompression_);
       }
       if (((bitField0_ & 0x00000008) == 0x00000008)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(4, getWriterClsNameBytes());
+        size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, 
writerClsName_);
       }
       if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        size += com.google.protobuf.CodedOutputStream
-          .computeBytesSize(5, getCellCodecClsNameBytes());
+        size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, 
cellCodecClsName_);
       }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
       return size;
     }
 
     private static final long serialVersionUID = 0L;
     @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
@@ -523,12 +495,10 @@ public final class WALProtos {
         result = result && getCellCodecClsName()
             .equals(other.getCellCodecClsName());
       }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
+      result = result && unknownFields.equals(other.unknownFields);
       return result;
     }
 
-    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
       if (memoizedHashCode != 0) {
@@ -538,7 +508,8 @@ public final class WALProtos {
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasHasCompression()) {
         hash = (37 * hash) + HAS_COMPRESSION_FIELD_NUMBER;
-        hash = (53 * hash) + hashBoolean(getHasCompression());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
+            getHasCompression());
       }
       if (hasEncryptionKey()) {
         hash = (37 * hash) + ENCRYPTION_KEY_FIELD_NUMBER;
@@ -546,7 +517,8 @@ public final class WALProtos {
       }
       if (hasHasTagCompression()) {
         hash = (37 * hash) + HAS_TAG_COMPRESSION_FIELD_NUMBER;
-        hash = (53 * hash) + hashBoolean(getHasTagCompression());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(
+            getHasTagCompression());
       }
       if (hasWriterClsName()) {
         hash = (37 * hash) + WRITER_CLS_NAME_FIELD_NUMBER;
@@ -556,7 +528,7 @@ public final class WALProtos {
         hash = (37 * hash) + CELL_CODEC_CLS_NAME_FIELD_NUMBER;
         hash = (53 * hash) + getCellCodecClsName().hashCode();
       }
-      hash = (29 * hash) + getUnknownFields().hashCode();
+      hash = (29 * hash) + unknownFields.hashCode();
       memoizedHashCode = hash;
       return hash;
     }
@@ -584,46 +556,57 @@ public final class WALProtos {
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader 
parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader 
parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader 
parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
     public static Builder 
newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader
 prototype) {
-      return newBuilder().mergeFrom(prototype);
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
     }
-    public Builder toBuilder() { return newBuilder(this); }
 
     @java.lang.Override
     protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
@@ -631,14 +614,15 @@ public final class WALProtos {
      * Protobuf type {@code hbase.pb.WALHeader}
      */
     public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeaderOrBuilder {
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:hbase.pb.WALHeader)
+        
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeaderOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor;
       }
 
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
           internalGetFieldAccessorTable() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
@@ -651,18 +635,15 @@ public final class WALProtos {
       }
 
       private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
       private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
         }
       }
-      private static Builder create() {
-        return new Builder();
-      }
-
       public Builder clear() {
         super.clear();
         hasCompression_ = false;
@@ -678,10 +659,6 @@ public final class WALProtos {
         return this;
       }
 
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALHeader_descriptor;
@@ -728,6 +705,32 @@ public final class WALProtos {
         return result;
       }
 
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader) {
           return 
mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader)other);
@@ -758,7 +761,8 @@ public final class WALProtos {
           cellCodecClsName_ = other.cellCodecClsName_;
           onChanged();
         }
-        this.mergeUnknownFields(other.getUnknownFields());
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
         return this;
       }
 
@@ -775,7 +779,7 @@ public final class WALProtos {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
           parsedMessage = 
(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader) 
e.getUnfinishedMessage();
-          throw e;
+          throw e.unwrapIOException();
         } finally {
           if (parsedMessage != null) {
             mergeFrom(parsedMessage);
@@ -785,7 +789,6 @@ public final class WALProtos {
       }
       private int bitField0_;
 
-      // optional bool has_compression = 1;
       private boolean hasCompression_ ;
       /**
        * <code>optional bool has_compression = 1;</code>
@@ -818,7 +821,6 @@ public final class WALProtos {
         return this;
       }
 
-      // optional bytes encryption_key = 2;
       private com.google.protobuf.ByteString encryptionKey_ = 
com.google.protobuf.ByteString.EMPTY;
       /**
        * <code>optional bytes encryption_key = 2;</code>
@@ -854,7 +856,6 @@ public final class WALProtos {
         return this;
       }
 
-      // optional bool has_tag_compression = 3;
       private boolean hasTagCompression_ ;
       /**
        * <code>optional bool has_tag_compression = 3;</code>
@@ -887,7 +888,6 @@ public final class WALProtos {
         return this;
       }
 
-      // optional string writer_cls_name = 4;
       private java.lang.Object writerClsName_ = "";
       /**
        * <code>optional string writer_cls_name = 4;</code>
@@ -901,9 +901,12 @@ public final class WALProtos {
       public java.lang.String getWriterClsName() {
         java.lang.Object ref = writerClsName_;
         if (!(ref instanceof java.lang.String)) {
-          java.lang.String s = ((com.google.protobuf.ByteString) ref)
-              .toStringUtf8();
-          writerClsName_ = s;
+          com.google.protobuf.ByteString bs =
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            writerClsName_ = s;
+          }
           return s;
         } else {
           return (java.lang.String) ref;
@@ -961,7 +964,6 @@ public final class WALProtos {
         return this;
       }
 
-      // optional string cell_codec_cls_name = 5;
       private java.lang.Object cellCodecClsName_ = "";
       /**
        * <code>optional string cell_codec_cls_name = 5;</code>
@@ -975,9 +977,12 @@ public final class WALProtos {
       public java.lang.String getCellCodecClsName() {
         java.lang.Object ref = cellCodecClsName_;
         if (!(ref instanceof java.lang.String)) {
-          java.lang.String s = ((com.google.protobuf.ByteString) ref)
-              .toStringUtf8();
-          cellCodecClsName_ = s;
+          com.google.protobuf.ByteString bs =
+              (com.google.protobuf.ByteString) ref;
+          java.lang.String s = bs.toStringUtf8();
+          if (bs.isValidUtf8()) {
+            cellCodecClsName_ = s;
+          }
           return s;
         } else {
           return (java.lang.String) ref;
@@ -1034,22 +1039,59 @@ public final class WALProtos {
         onChanged();
         return this;
       }
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
 
       // @@protoc_insertion_point(builder_scope:hbase.pb.WALHeader)
     }
 
+    // @@protoc_insertion_point(class_scope:hbase.pb.WALHeader)
+    private static final 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader 
DEFAULT_INSTANCE;
     static {
-      defaultInstance = new WALHeader(true);
-      defaultInstance.initFields();
+      DEFAULT_INSTANCE = new 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader();
+    }
+
+    public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader 
getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    @java.lang.Deprecated public static final 
com.google.protobuf.Parser<WALHeader>
+        PARSER = new com.google.protobuf.AbstractParser<WALHeader>() {
+      public WALHeader parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+          return new WALHeader(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<WALHeader> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<WALHeader> getParserForType() {
+      return PARSER;
+    }
+
+    public 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALHeader 
getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
     }
 
-    // @@protoc_insertion_point(class_scope:hbase.pb.WALHeader)
   }
 
-  public interface WALKeyOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
+  public interface WALKeyOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.WALKey)
+      com.google.protobuf.MessageOrBuilder {
 
-    // required bytes encoded_region_name = 1;
     /**
      * <code>required bytes encoded_region_name = 1;</code>
      */
@@ -1059,7 +1101,6 @@ public final class WALProtos {
      */
     com.google.protobuf.ByteString getEncodedRegionName();
 
-    // required bytes table_name = 2;
     /**
      * <code>required bytes table_name = 2;</code>
      */
@@ -1069,7 +1110,6 @@ public final class WALProtos {
      */
     com.google.protobuf.ByteString getTableName();
 
-    // required uint64 log_sequence_number = 3;
     /**
      * <code>required uint64 log_sequence_number = 3;</code>
      */
@@ -1079,7 +1119,6 @@ public final class WALProtos {
      */
     long getLogSequenceNumber();
 
-    // required uint64 write_time = 4;
     /**
      * <code>required uint64 write_time = 4;</code>
      */
@@ -1089,45 +1128,40 @@ public final class WALProtos {
      */
     long getWriteTime();
 
-    // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];
     /**
-     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
-     *
      * <pre>
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * </pre>
+     *
+     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
      */
     @java.lang.Deprecated boolean hasClusterId();
     /**
-     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
-     *
      * <pre>
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * </pre>
+     *
+     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
      */
     @java.lang.Deprecated 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID 
getClusterId();
     /**
-     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
-     *
      * <pre>
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * </pre>
+     *
+     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
      */
     @java.lang.Deprecated 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder 
getClusterIdOrBuilder();
 
-    // repeated .hbase.pb.FamilyScope scopes = 6;
     /**
      * <code>repeated .hbase.pb.FamilyScope scopes = 6;</code>
      */
@@ -1152,7 +1186,6 @@ public final class WALProtos {
     
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder
 getScopesOrBuilder(
         int index);
 
-    // optional uint32 following_kv_count = 7;
     /**
      * <code>optional uint32 following_kv_count = 7;</code>
      */
@@ -1162,62 +1195,55 @@ public final class WALProtos {
      */
     int getFollowingKvCount();
 
-    // repeated .hbase.pb.UUID cluster_ids = 8;
     /**
-     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-     *
      * <pre>
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * </pre>
+     *
+     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
      */
     
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID>
 
         getClusterIdsList();
     /**
-     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-     *
      * <pre>
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * </pre>
+     *
+     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
      */
     org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID 
getClusterIds(int index);
     /**
-     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-     *
      * <pre>
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * </pre>
+     *
+     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
      */
     int getClusterIdsCount();
     /**
-     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-     *
      * <pre>
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * </pre>
+     *
+     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
      */
     java.util.List<? extends 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
         getClusterIdsOrBuilderList();
     /**
-     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-     *
      * <pre>
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * </pre>
+     *
+     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
      */
     
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder 
getClusterIdsOrBuilder(
         int index);
 
-    // optional uint64 nonceGroup = 9;
     /**
      * <code>optional uint64 nonceGroup = 9;</code>
      */
@@ -1227,7 +1253,6 @@ public final class WALProtos {
      */
     long getNonceGroup();
 
-    // optional uint64 nonce = 10;
     /**
      * <code>optional uint64 nonce = 10;</code>
      */
@@ -1237,7 +1262,6 @@ public final class WALProtos {
      */
     long getNonce();
 
-    // optional uint64 orig_sequence_number = 11;
     /**
      * <code>optional uint64 orig_sequence_number = 11;</code>
      */
@@ -1248,44 +1272,44 @@ public final class WALProtos {
     long getOrigSequenceNumber();
   }
   /**
-   * Protobuf type {@code hbase.pb.WALKey}
-   *
    * <pre>
-   *
    * Protocol buffer version of WALKey; see WALKey comment, not really a key 
but WALEdit header
    * for some KVs
    * </pre>
+   *
+   * Protobuf type {@code hbase.pb.WALKey}
    */
-  public static final class WALKey extends
-      com.google.protobuf.GeneratedMessage
-      implements WALKeyOrBuilder {
+  public  static final class WALKey extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.WALKey)
+      WALKeyOrBuilder {
     // Use WALKey.newBuilder() to construct.
-    private WALKey(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
+    private WALKey(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
-    }
-    private WALKey(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final WALKey defaultInstance;
-    public static WALKey getDefaultInstance() {
-      return defaultInstance;
     }
-
-    public WALKey getDefaultInstanceForType() {
-      return defaultInstance;
+    private WALKey() {
+      encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
+      tableName_ = com.google.protobuf.ByteString.EMPTY;
+      logSequenceNumber_ = 0L;
+      writeTime_ = 0L;
+      scopes_ = java.util.Collections.emptyList();
+      followingKvCount_ = 0;
+      clusterIds_ = java.util.Collections.emptyList();
+      nonceGroup_ = 0L;
+      nonce_ = 0L;
+      origSequenceNumber_ = 0L;
     }
 
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
     @java.lang.Override
     public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
+    getUnknownFields() {
       return this.unknownFields;
     }
     private WALKey(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
+      this();
       int mutable_bitField0_ = 0;
       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
           com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -1342,7 +1366,8 @@ public final class WALProtos {
                 scopes_ = new 
java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope>();
                 mutable_bitField0_ |= 0x00000020;
               }
-              
scopes_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.PARSER,
 extensionRegistry));
+              scopes_.add(
+                  
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.PARSER,
 extensionRegistry));
               break;
             }
             case 56: {
@@ -1355,7 +1380,8 @@ public final class WALProtos {
                 clusterIds_ = new 
java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID>();
                 mutable_bitField0_ |= 0x00000080;
               }
-              
clusterIds_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.PARSER,
 extensionRegistry));
+              clusterIds_.add(
+                  
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.PARSER,
 extensionRegistry));
               break;
             }
             case 72: {
@@ -1379,7 +1405,7 @@ public final class WALProtos {
         throw e.setUnfinishedMessage(this);
       } catch (java.io.IOException e) {
         throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
+            e).setUnfinishedMessage(this);
       } finally {
         if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) {
           scopes_ = java.util.Collections.unmodifiableList(scopes_);
@@ -1396,30 +1422,14 @@ public final class WALProtos {
       return 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor;
     }
 
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
         internalGetFieldAccessorTable() {
       return 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.class, 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey.Builder.class);
     }
 
-    public static com.google.protobuf.Parser<WALKey> PARSER =
-        new com.google.protobuf.AbstractParser<WALKey>() {
-      public WALKey parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new WALKey(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<WALKey> getParserForType() {
-      return PARSER;
-    }
-
     private int bitField0_;
-    // required bytes encoded_region_name = 1;
     public static final int ENCODED_REGION_NAME_FIELD_NUMBER = 1;
     private com.google.protobuf.ByteString encodedRegionName_;
     /**
@@ -1435,7 +1445,6 @@ public final class WALProtos {
       return encodedRegionName_;
     }
 
-    // required bytes table_name = 2;
     public static final int TABLE_NAME_FIELD_NUMBER = 2;
     private com.google.protobuf.ByteString tableName_;
     /**
@@ -1451,7 +1460,6 @@ public final class WALProtos {
       return tableName_;
     }
 
-    // required uint64 log_sequence_number = 3;
     public static final int LOG_SEQUENCE_NUMBER_FIELD_NUMBER = 3;
     private long logSequenceNumber_;
     /**
@@ -1467,7 +1475,6 @@ public final class WALProtos {
       return logSequenceNumber_;
     }
 
-    // required uint64 write_time = 4;
     public static final int WRITE_TIME_FIELD_NUMBER = 4;
     private long writeTime_;
     /**
@@ -1483,53 +1490,48 @@ public final class WALProtos {
       return writeTime_;
     }
 
-    // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];
     public static final int CLUSTER_ID_FIELD_NUMBER = 5;
     private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID 
clusterId_;
     /**
-     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
-     *
      * <pre>
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * </pre>
+     *
+     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
      */
     @java.lang.Deprecated public boolean hasClusterId() {
       return ((bitField0_ & 0x00000010) == 0x00000010);
     }
     /**
-     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
-     *
      * <pre>
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * </pre>
+     *
+     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
      */
     @java.lang.Deprecated public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID 
getClusterId() {
-      return clusterId_;
+      return clusterId_ == null ? 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()
 : clusterId_;
     }
     /**
-     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
-     *
      * <pre>
-     *
      *This parameter is deprecated in favor of clusters which
      *contains the list of clusters that have consumed the change.
      *It is retained so that the log created by earlier releases (0.94)
      *can be read by the newer releases.
      * </pre>
+     *
+     * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];</code>
      */
     @java.lang.Deprecated public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder 
getClusterIdOrBuilder() {
-      return clusterId_;
+      return clusterId_ == null ? 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()
 : clusterId_;
     }
 
-    // repeated .hbase.pb.FamilyScope scopes = 6;
     public static final int SCOPES_FIELD_NUMBER = 6;
     private 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope>
 scopes_;
     /**
@@ -1565,7 +1567,6 @@ public final class WALProtos {
       return scopes_.get(index);
     }
 
-    // optional uint32 following_kv_count = 7;
     public static final int FOLLOWING_KV_COUNT_FIELD_NUMBER = 7;
     private int followingKvCount_;
     /**
@@ -1581,73 +1582,66 @@ public final class WALProtos {
       return followingKvCount_;
     }
 
-    // repeated .hbase.pb.UUID cluster_ids = 8;
     public static final int CLUSTER_IDS_FIELD_NUMBER = 8;
     private 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID>
 clusterIds_;
     /**
-     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-     *
      * <pre>
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * </pre>
+     *
+     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
      */
     public 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID>
 getClusterIdsList() {
       return clusterIds_;
     }
     /**
-     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-     *
      * <pre>
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * </pre>
+     *
+     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
      */
     public java.util.List<? extends 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
         getClusterIdsOrBuilderList() {
       return clusterIds_;
     }
     /**
-     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-     *
      * <pre>
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * </pre>
+     *
+     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
      */
     public int getClusterIdsCount() {
       return clusterIds_.size();
     }
     /**
-     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-     *
      * <pre>
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * </pre>
+     *
+     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
      */
     public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID 
getClusterIds(int index) {
       return clusterIds_.get(index);
     }
     /**
-     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-     *
      * <pre>
-     *
      *This field contains the list of clusters that have
      *consumed the change
      * </pre>
+     *
+     * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
      */
     public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder 
getClusterIdsOrBuilder(
         int index) {
       return clusterIds_.get(index);
     }
 
-    // optional uint64 nonceGroup = 9;
     public static final int NONCEGROUP_FIELD_NUMBER = 9;
     private long nonceGroup_;
     /**
@@ -1663,7 +1657,6 @@ public final class WALProtos {
       return nonceGroup_;
     }
 
-    // optional uint64 nonce = 10;
     public static final int NONCE_FIELD_NUMBER = 10;
     private long nonce_;
     /**
@@ -1679,7 +1672,6 @@ public final class WALProtos {
       return nonce_;
     }
 
-    // optional uint64 orig_sequence_number = 11;
     public static final int ORIG_SEQUENCE_NUMBER_FIELD_NUMBER = 11;
     private long origSequenceNumber_;
     /**
@@ -1695,23 +1687,11 @@ public final class WALProtos {
       return origSequenceNumber_;
     }
 
-    private void initFields() {
-      encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
-      tableName_ = com.google.protobuf.ByteString.EMPTY;
-      logSequenceNumber_ = 0L;
-      writeTime_ = 0L;
-      clusterId_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
-      scopes_ = java.util.Collections.emptyList();
-      followingKvCount_ = 0;
-      clusterIds_ = java.util.Collections.emptyList();
-      nonceGroup_ = 0L;
-      nonce_ = 0L;
-      origSequenceNumber_ = 0L;
-    }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
 
       if (!hasEncodedRegionName()) {
         memoizedIsInitialized = 0;
@@ -1753,7 +1733,6 @@ public final class WALProtos {
 
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
-      getSerializedSize();
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         output.writeBytes(1, encodedRegionName_);
       }
@@ -1767,7 +1746,7 @@ public final class WALProtos {
         output.writeUInt64(4, writeTime_);
       }
       if (((bitField0_ & 0x00000010) == 0x00000010)) {
-        output.writeMessage(5, clusterId_);
+        output.writeMessage(5, getClusterId());
       }
       for (int i = 0; i < scopes_.size(); i++) {
         output.writeMessage(6, scopes_.get(i));
@@ -1787,12 +1766,11 @@ public final class WALProtos {
       if (((bitField0_ & 0x00000100) == 0x00000100)) {
         output.writeUInt64(11, origSequenceNumber_);
       }
-      getUnknownFields().writeTo(output);
+      unknownFields.writeTo(output);
     }
 
-    private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
-      int size = memoizedSerializedSize;
+      int size = memoizedSize;
       if (size != -1) return size;
 
       size = 0;
@@ -1814,7 +1792,7 @@ public final class WALProtos {
       }
       if (((bitField0_ & 0x00000010) == 0x00000010)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(5, clusterId_);
+          .computeMessageSize(5, getClusterId());
       }
       for (int i = 0; i < scopes_.size(); i++) {
         size += com.google.protobuf.CodedOutputStream
@@ -1840,19 +1818,13 @@ public final class WALProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeUInt64Size(11, origSequenceNumber_);
       }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
       return size;
     }
 
     private static final long serialVersionUID = 0L;
     @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
@@ -1912,12 +1884,10 @@ public final class WALProtos {
         result = result && (getOrigSequenceNumber()
             == other.getOrigSequenceNumber());
       }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
+      result = result && unknownFields.equals(other.unknownFields);
       return result;
     }
 
-    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
       if (memoizedHashCode != 0) {
@@ -1935,11 +1905,13 @@ public final class WALProtos {
       }
       if (hasLogSequenceNumber()) {
         hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getLogSequenceNumber());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+            getLogSequenceNumber());
       }
       if (hasWriteTime()) {
         hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getWriteTime());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+            getWriteTime());
       }
       if (hasClusterId()) {
         hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER;
@@ -1959,17 +1931,20 @@ public final class WALProtos {
       }
       if (hasNonceGroup()) {
         hash = (37 * hash) + NONCEGROUP_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getNonceGroup());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+            getNonceGroup());
       }
       if (hasNonce()) {
         hash = (37 * hash) + NONCE_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getNonce());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+            getNonce());
       }
       if (hasOrigSequenceNumber()) {
         hash = (37 * hash) + ORIG_SEQUENCE_NUMBER_FIELD_NUMBER;
-        hash = (53 * hash) + hashLong(getOrigSequenceNumber());
+        hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
+            getOrigSequenceNumber());
       }
-      hash = (29 * hash) + getUnknownFields().hashCode();
+      hash = (29 * hash) + unknownFields.hashCode();
       memoizedHashCode = hash;
       return hash;
     }
@@ -1997,67 +1972,78 @@ public final class WALProtos {
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey 
parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey 
parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey 
parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
     public static Builder 
newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey 
prototype) {
-      return newBuilder().mergeFrom(prototype);
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
     }
-    public Builder toBuilder() { return newBuilder(this); }
 
     @java.lang.Override
     protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
     /**
-     * Protobuf type {@code hbase.pb.WALKey}
-     *
      * <pre>
-     *
      * Protocol buffer version of WALKey; see WALKey comment, not really a key 
but WALEdit header
      * for some KVs
      * </pre>
+     *
+     * Protobuf type {@code hbase.pb.WALKey}
      */
     public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKeyOrBuilder {
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:hbase.pb.WALKey)
+        
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKeyOrBuilder {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor;
       }
 
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
           internalGetFieldAccessorTable() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
@@ -2070,21 +2056,18 @@ public final class WALProtos {
       }
 
       private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
       private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
           getClusterIdFieldBuilder();
           getScopesFieldBuilder();
           getClusterIdsFieldBuilder();
         }
       }
-      private static Builder create() {
-        return new Builder();
-      }
-
       public Builder clear() {
         super.clear();
         encodedRegionName_ = com.google.protobuf.ByteString.EMPTY;
@@ -2096,7 +2079,7 @@ public final class WALProtos {
         writeTime_ = 0L;
         bitField0_ = (bitField0_ & ~0x00000008);
         if (clusterIdBuilder_ == null) {
-          clusterId_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
+          clusterId_ = null;
         } else {
           clusterIdBuilder_.clear();
         }
@@ -2124,10 +2107,6 @@ public final class WALProtos {
         return this;
       }
 
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.internal_static_hbase_pb_WALKey_descriptor;
@@ -2212,6 +2191,32 @@ public final class WALProtos {
         return result;
       }
 
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey) {
           return 
mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey)other);
@@ -2257,7 +2262,7 @@ public final class WALProtos {
               scopes_ = other.scopes_;
               bitField0_ = (bitField0_ & ~0x00000020);
               scopesBuilder_ = 
-                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                    getScopesFieldBuilder() : null;
             } else {
               scopesBuilder_.addAllMessages(other.scopes_);
@@ -2286,7 +2291,7 @@ public final class WALProtos {
               clusterIds_ = other.clusterIds_;
               bitField0_ = (bitField0_ & ~0x00000080);
               clusterIdsBuilder_ = 
-                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                    getClusterIdsFieldBuilder() : null;
             } else {
               clusterIdsBuilder_.addAllMessages(other.clusterIds_);
@@ -2302,42 +2307,36 @@ public final class WALProtos {
         if (other.hasOrigSequenceNumber()) {
           setOrigSequenceNumber(other.getOrigSequenceNumber());
         }
-        this.mergeUnknownFields(other.getUnknownFields());
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
         return this;
       }
 
       public final boolean isInitialized() {
         if (!hasEncodedRegionName()) {
-          
           return false;
         }
         if (!hasTableName()) {
-          
           return false;
         }
         if (!hasLogSequenceNumber()) {
-          
           return false;
         }
         if (!hasWriteTime()) {
-          
           return false;
         }
         if (hasClusterId()) {
           if (!getClusterId().isInitialized()) {
-            
             return false;
           }
         }
         for (int i = 0; i < getScopesCount(); i++) {
           if (!getScopes(i).isInitialized()) {
-            
             return false;
           }
         }
         for (int i = 0; i < getClusterIdsCount(); i++) {
           if (!getClusterIds(i).isInitialized()) {
-            
             return false;
           }
         }
@@ -2353,7 +2352,7 @@ public final class WALProtos {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
           parsedMessage = 
(org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey) 
e.getUnfinishedMessage();
-          throw e;
+          throw e.unwrapIOException();
         } finally {
           if (parsedMessage != null) {
             mergeFrom(parsedMessage);
@@ -2363,7 +2362,6 @@ public final class WALProtos {
       }
       private int bitField0_;
 
-      // required bytes encoded_region_name = 1;
       private com.google.protobuf.ByteString encodedRegionName_ = 
com.google.protobuf.ByteString.EMPTY;
       /**
        * <code>required bytes encoded_region_name = 1;</code>
@@ -2399,7 +2397,6 @@ public final class WALProtos {
         return this;
       }
 
-      // required bytes table_name = 2;
       private com.google.protobuf.ByteString tableName_ = 
com.google.protobuf.ByteString.EMPTY;
       /**
        * <code>required bytes table_name = 2;</code>
@@ -2435,7 +2432,6 @@ public final class WALProtos {
         return this;
       }
 
-      // required uint64 log_sequence_number = 3;
       private long logSequenceNumber_ ;
       /**
        * <code>required uint64 log_sequence_number = 3;</code>
@@ -2468,7 +2464,6 @@ public final class WALProtos {
         return this;
       }
 
-      // required uint64 write_time = 4;
       private long writeTime_ ;
       /**
        * <code>required uint64 write_time = 4;</code>
@@ -2501,52 +2496,48 @@ public final class WALProtos {
         return this;
       }
 
-      // optional .hbase.pb.UUID cluster_id = 5 [deprecated = true];
-      private 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID clusterId_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
+      private 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID clusterId_ = 
null;
+      private com.google.protobuf.SingleFieldBuilderV3<
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
clusterIdBuilder_;
       /**
-       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
-       *
        * <pre>
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * </pre>
+       *
+       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
        */
       @java.lang.Deprecated public boolean hasClusterId() {
         return ((bitField0_ & 0x00000010) == 0x00000010);
       }
       /**
-       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
-       *
        * <pre>
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * </pre>
+       *
+       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
        */
       @java.lang.Deprecated public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID 
getClusterId() {
         if (clusterIdBuilder_ == null) {
-          return clusterId_;
+          return clusterId_ == null ? 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()
 : clusterId_;
         } else {
           return clusterIdBuilder_.getMessage();
         }
       }
       /**
-       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
-       *
        * <pre>
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * </pre>
+       *
+       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
        */
       @java.lang.Deprecated public Builder 
setClusterId(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID 
value) {
         if (clusterIdBuilder_ == null) {
@@ -2562,15 +2553,14 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
-       *
        * <pre>
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * </pre>
+       *
+       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
        */
       @java.lang.Deprecated public Builder setClusterId(
           
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder 
builderForValue) {
@@ -2584,19 +2574,19 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
-       *
        * <pre>
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * </pre>
+       *
+       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
        */
       @java.lang.Deprecated public Builder 
mergeClusterId(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID
 value) {
         if (clusterIdBuilder_ == null) {
           if (((bitField0_ & 0x00000010) == 0x00000010) &&
+              clusterId_ != null &&
               clusterId_ != 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance())
 {
             clusterId_ =
               
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.newBuilder(clusterId_).mergeFrom(value).buildPartial();
@@ -2611,19 +2601,18 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
-       *
        * <pre>
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * </pre>
+       *
+       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
        */
       @java.lang.Deprecated public Builder clearClusterId() {
         if (clusterIdBuilder_ == null) {
-          clusterId_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance();
+          clusterId_ = null;
           onChanged();
         } else {
           clusterIdBuilder_.clear();
@@ -2632,15 +2621,14 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
-       *
        * <pre>
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * </pre>
+       *
+       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
        */
       @java.lang.Deprecated public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder 
getClusterIdBuilder() {
         bitField0_ |= 0x00000010;
@@ -2648,41 +2636,40 @@ public final class WALProtos {
         return getClusterIdFieldBuilder().getBuilder();
       }
       /**
-       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
-       *
        * <pre>
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * </pre>
+       *
+       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
        */
       @java.lang.Deprecated public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder 
getClusterIdOrBuilder() {
         if (clusterIdBuilder_ != null) {
           return clusterIdBuilder_.getMessageOrBuilder();
         } else {
-          return clusterId_;
+          return clusterId_ == null ?
+              
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance()
 : clusterId_;
         }
       }
       /**
-       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
-       *
        * <pre>
-       *
        *This parameter is deprecated in favor of clusters which
        *contains the list of clusters that have consumed the change.
        *It is retained so that the log created by earlier releases (0.94)
        *can be read by the newer releases.
        * </pre>
+       *
+       * <code>optional .hbase.pb.UUID cluster_id = 5 [deprecated = 
true];</code>
        */
-      private com.google.protobuf.SingleFieldBuilder<
+      private com.google.protobuf.SingleFieldBuilderV3<
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
           getClusterIdFieldBuilder() {
         if (clusterIdBuilder_ == null) {
-          clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+          clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
               
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder>(
-                  clusterId_,
+                  getClusterId(),
                   getParentForChildren(),
                   isClean());
           clusterId_ = null;
@@ -2690,7 +2677,6 @@ public final class WALProtos {
         return clusterIdBuilder_;
       }
 
-      // repeated .hbase.pb.FamilyScope scopes = 6;
       private 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope>
 scopes_ =
         java.util.Collections.emptyList();
       private void ensureScopesIsMutable() {
@@ -2700,7 +2686,7 @@ public final class WALProtos {
          }
       }
 
-      private com.google.protobuf.RepeatedFieldBuilder<
+      private com.google.protobuf.RepeatedFieldBuilderV3<
           
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope, 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder>
 scopesBuilder_;
 
       /**
@@ -2832,7 +2818,8 @@ public final class WALProtos {
           java.lang.Iterable<? extends 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope> 
values) {
         if (scopesBuilder_ == null) {
           ensureScopesIsMutable();
-          super.addAll(values, scopes_);
+          com.google.protobuf.AbstractMessageLite.Builder.addAll(
+              values, scopes_);
           onChanged();
         } else {
           scopesBuilder_.addAllMessages(values);
@@ -2915,11 +2902,11 @@ public final class WALProtos {
            getScopesBuilderList() {
         return getScopesFieldBuilder().getBuilderList();
       }
-      private com.google.protobuf.RepeatedFieldBuilder<
+      private com.google.protobuf.RepeatedFieldBuilderV3<
           
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope, 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder>
 
           getScopesFieldBuilder() {
         if (scopesBuilder_ == null) {
-          scopesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+          scopesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
               
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope, 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScope.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.FamilyScopeOrBuilder>(
                   scopes_,
                   ((bitField0_ & 0x00000020) == 0x00000020),
@@ -2930,7 +2917,6 @@ public final class WALProtos {
         return scopesBuilder_;
       }
 
-      // optional uint32 following_kv_count = 7;
       private int followingKvCount_ ;
       /**
        * <code>optional uint32 following_kv_count = 7;</code>
@@ -2963,7 +2949,6 @@ public final class WALProtos {
         return this;
       }
 
-      // repeated .hbase.pb.UUID cluster_ids = 8;
       private 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID>
 clusterIds_ =
         java.util.Collections.emptyList();
       private void ensureClusterIdsIsMutable() {
@@ -2973,17 +2958,16 @@ public final class WALProtos {
          }
       }
 
-      private com.google.protobuf.RepeatedFieldBuilder<
+      private com.google.protobuf.RepeatedFieldBuilderV3<
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
clusterIdsBuilder_;
 
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID>
 getClusterIdsList() {
         if (clusterIdsBuilder_ == null) {
@@ -2993,13 +2977,12 @@ public final class WALProtos {
         }
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public int getClusterIdsCount() {
         if (clusterIdsBuilder_ == null) {
@@ -3009,13 +2992,12 @@ public final class WALProtos {
         }
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID 
getClusterIds(int index) {
         if (clusterIdsBuilder_ == null) {
@@ -3025,13 +3007,12 @@ public final class WALProtos {
         }
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public Builder setClusterIds(
           int index, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) {
@@ -3048,13 +3029,12 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public Builder setClusterIds(
           int index, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder 
builderForValue) {
@@ -3068,13 +3048,12 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public Builder 
addClusterIds(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID
 value) {
         if (clusterIdsBuilder_ == null) {
@@ -3090,13 +3069,12 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public Builder addClusterIds(
           int index, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID value) {
@@ -3113,13 +3091,12 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public Builder addClusterIds(
           
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder 
builderForValue) {
@@ -3133,13 +3110,12 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public Builder addClusterIds(
           int index, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder 
builderForValue) {
@@ -3153,19 +3129,19 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public Builder addAllClusterIds(
           java.lang.Iterable<? extends 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID> values) {
         if (clusterIdsBuilder_ == null) {
           ensureClusterIdsIsMutable();
-          super.addAll(values, clusterIds_);
+          com.google.protobuf.AbstractMessageLite.Builder.addAll(
+              values, clusterIds_);
           onChanged();
         } else {
           clusterIdsBuilder_.addAllMessages(values);
@@ -3173,13 +3149,12 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public Builder clearClusterIds() {
         if (clusterIdsBuilder_ == null) {
@@ -3192,13 +3167,12 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public Builder removeClusterIds(int index) {
         if (clusterIdsBuilder_ == null) {
@@ -3211,26 +3185,24 @@ public final class WALProtos {
         return this;
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder 
getClusterIdsBuilder(
           int index) {
         return getClusterIdsFieldBuilder().getBuilder(index);
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder 
getClusterIdsOrBuilder(
           int index) {
@@ -3240,13 +3212,12 @@ public final class WALProtos {
         }
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public java.util.List<? extends 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
            getClusterIdsOrBuilderList() {
@@ -3257,26 +3228,24 @@ public final class WALProtos {
         }
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder 
addClusterIdsBuilder() {
         return getClusterIdsFieldBuilder().addBuilder(
             
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance());
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder 
addClusterIdsBuilder(
           int index) {
@@ -3284,23 +3253,22 @@ public final class WALProtos {
             index, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.getDefaultInstance());
       }
       /**
-       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
-       *
        * <pre>
-       *
        *This field contains the list of clusters that have
        *consumed the change
        * </pre>
+       *
+       * <code>repeated .hbase.pb.UUID cluster_ids = 8;</code>
        */
       public 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder>
 
            getClusterIdsBuilderList() {
         return getClusterIdsFieldBuilder().getBuilderList();
       }
-      private com.google.protobuf.RepeatedFieldBuilder<
+      private com.google.protobuf.RepeatedFieldBuilderV3<
           org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder> 
           getClusterIdsFieldBuilder() {
         if (clusterIdsBuilder_ == null) {
-          clusterIdsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+          clusterIdsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
               
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUID.Builder, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.UUIDOrBuilder>(
                   clusterIds_,
                   ((bitField0_ & 0x00000080) == 0x00000080),
@@ -3311,7 +3279,6 @@ public final class WALProtos {
         return clusterIdsBuilder_;
       }
 
-      // optional uint64 nonceGroup = 9;
       private long nonceGroup_ ;
       /**
        * <code>optional uint64 nonceGroup = 9;</code>
@@ -3344,7 +3311,6 @@ public final class WALProtos {
         return this;
       }
 
-      // optional uint64 nonce = 10;
       private long nonce_ ;
       /**
        * <code>optional uint64 nonce = 10;</code>
@@ -3377,7 +3343,6 @@ public final class WALProtos {
         return this;
       }
 
-      // optional uint64 orig_sequence_number = 11;
       private long origSequenceNumber_ ;
       /**
        * <code>optional uint64 orig_sequence_number = 11;</code>
@@ -3409,22 +3374,59 @@ public final class WALProtos {
         onChanged();
         return this;
       }
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
 
       // @@protoc_insertion_point(builder_scope:hbase.pb.WALKey)
     }
 
+    // @@protoc_insertion_point(class_scope:hbase.pb.WALKey)
+    private static final 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey 
DEFAULT_INSTANCE;
     static {
-      defaultInstance = new WALKey(true);
-      defaultInstance.initFields();
+      DEFAULT_INSTANCE = new 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey();
+    }
+
+    public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey 
getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    @java.lang.Deprecated public static final 
com.google.protobuf.Parser<WALKey>
+        PARSER = new com.google.protobuf.AbstractParser<WALKey>() {
+      public WALKey parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+          return new WALKey(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<WALKey> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<WALKey> getParserForType() {
+      return PARSER;
+    }
+
+    public org.apache.hadoop.hbase.shaded.protobuf.generated.WALProtos.WALKey 
getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
     }
 
-    // @@protoc_insertion_point(class_scope:hbase.pb.WALKey)
   }
 
-  public interface FamilyScopeOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
+  public interface FamilyScopeOrBuilder extends
+      // @@protoc_insertion_point(interface_exten

<TRUNCATED>

Reply via email to