http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
----------------------------------------------------------------------
diff --git 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
index 1259d3c..2ae9f9c 100644
--- 
a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
+++ 
b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/HBaseProtos.java
@@ -6,45 +6,51 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated;
 public final class HBaseProtos {
   private HBaseProtos() {}
   public static void registerAllExtensions(
+      com.google.protobuf.ExtensionRegistryLite registry) {
+  }
+
+  public static void registerAllExtensions(
       com.google.protobuf.ExtensionRegistry registry) {
+    registerAllExtensions(
+        (com.google.protobuf.ExtensionRegistryLite) registry);
   }
   /**
-   * Protobuf enum {@code hbase.pb.CompareType}
-   *
    * <pre>
    * Comparison operators 
    * </pre>
+   *
+   * Protobuf enum {@code hbase.pb.CompareType}
    */
   public enum CompareType
       implements com.google.protobuf.ProtocolMessageEnum {
     /**
      * <code>LESS = 0;</code>
      */
-    LESS(0, 0),
+    LESS(0),
     /**
      * <code>LESS_OR_EQUAL = 1;</code>
      */
-    LESS_OR_EQUAL(1, 1),
+    LESS_OR_EQUAL(1),
     /**
      * <code>EQUAL = 2;</code>
      */
-    EQUAL(2, 2),
+    EQUAL(2),
     /**
      * <code>NOT_EQUAL = 3;</code>
      */
-    NOT_EQUAL(3, 3),
+    NOT_EQUAL(3),
     /**
      * <code>GREATER_OR_EQUAL = 4;</code>
      */
-    GREATER_OR_EQUAL(4, 4),
+    GREATER_OR_EQUAL(4),
     /**
      * <code>GREATER = 5;</code>
      */
-    GREATER(5, 5),
+    GREATER(5),
     /**
      * <code>NO_OP = 6;</code>
      */
-    NO_OP(6, 6),
+    NO_OP(6),
     ;
 
     /**
@@ -77,9 +83,19 @@ public final class HBaseProtos {
     public static final int NO_OP_VALUE = 6;
 
 
-    public final int getNumber() { return value; }
+    public final int getNumber() {
+      return value;
+    }
 
+    /**
+     * @deprecated Use {@link #forNumber(int)} instead.
+     */
+    @java.lang.Deprecated
     public static CompareType valueOf(int value) {
+      return forNumber(value);
+    }
+
+    public static CompareType forNumber(int value) {
       switch (value) {
         case 0: return LESS;
         case 1: return LESS_OR_EQUAL;
@@ -96,17 +112,17 @@ public final class HBaseProtos {
         internalGetValueMap() {
       return internalValueMap;
     }
-    private static com.google.protobuf.Internal.EnumLiteMap<CompareType>
-        internalValueMap =
+    private static final com.google.protobuf.Internal.EnumLiteMap<
+        CompareType> internalValueMap =
           new com.google.protobuf.Internal.EnumLiteMap<CompareType>() {
             public CompareType findValueByNumber(int number) {
-              return CompareType.valueOf(number);
+              return CompareType.forNumber(number);
             }
           };
 
     public final com.google.protobuf.Descriptors.EnumValueDescriptor
         getValueDescriptor() {
-      return getDescriptor().getValues().get(index);
+      return getDescriptor().getValues().get(ordinal());
     }
     public final com.google.protobuf.Descriptors.EnumDescriptor
         getDescriptorForType() {
@@ -128,11 +144,9 @@ public final class HBaseProtos {
       return VALUES[desc.getIndex()];
     }
 
-    private final int index;
     private final int value;
 
-    private CompareType(int index, int value) {
-      this.index = index;
+    private CompareType(int value) {
       this.value = value;
     }
 
@@ -147,31 +161,31 @@ public final class HBaseProtos {
     /**
      * <code>NANOSECONDS = 1;</code>
      */
-    NANOSECONDS(0, 1),
+    NANOSECONDS(1),
     /**
      * <code>MICROSECONDS = 2;</code>
      */
-    MICROSECONDS(1, 2),
+    MICROSECONDS(2),
     /**
      * <code>MILLISECONDS = 3;</code>
      */
-    MILLISECONDS(2, 3),
+    MILLISECONDS(3),
     /**
      * <code>SECONDS = 4;</code>
      */
-    SECONDS(3, 4),
+    SECONDS(4),
     /**
      * <code>MINUTES = 5;</code>
      */
-    MINUTES(4, 5),
+    MINUTES(5),
     /**
      * <code>HOURS = 6;</code>
      */
-    HOURS(5, 6),
+    HOURS(6),
     /**
      * <code>DAYS = 7;</code>
      */
-    DAYS(6, 7),
+    DAYS(7),
     ;
 
     /**
@@ -204,9 +218,19 @@ public final class HBaseProtos {
     public static final int DAYS_VALUE = 7;
 
 
-    public final int getNumber() { return value; }
+    public final int getNumber() {
+      return value;
+    }
 
+    /**
+     * @deprecated Use {@link #forNumber(int)} instead.
+     */
+    @java.lang.Deprecated
     public static TimeUnit valueOf(int value) {
+      return forNumber(value);
+    }
+
+    public static TimeUnit forNumber(int value) {
       switch (value) {
         case 1: return NANOSECONDS;
         case 2: return MICROSECONDS;
@@ -223,17 +247,17 @@ public final class HBaseProtos {
         internalGetValueMap() {
       return internalValueMap;
     }
-    private static com.google.protobuf.Internal.EnumLiteMap<TimeUnit>
-        internalValueMap =
+    private static final com.google.protobuf.Internal.EnumLiteMap<
+        TimeUnit> internalValueMap =
           new com.google.protobuf.Internal.EnumLiteMap<TimeUnit>() {
             public TimeUnit findValueByNumber(int number) {
-              return TimeUnit.valueOf(number);
+              return TimeUnit.forNumber(number);
             }
           };
 
     public final com.google.protobuf.Descriptors.EnumValueDescriptor
         getValueDescriptor() {
-      return getDescriptor().getValues().get(index);
+      return getDescriptor().getValues().get(ordinal());
     }
     public final com.google.protobuf.Descriptors.EnumDescriptor
         getDescriptorForType() {
@@ -255,21 +279,19 @@ public final class HBaseProtos {
       return VALUES[desc.getIndex()];
     }
 
-    private final int index;
     private final int value;
 
-    private TimeUnit(int index, int value) {
-      this.index = index;
+    private TimeUnit(int value) {
       this.value = value;
     }
 
     // @@protoc_insertion_point(enum_scope:hbase.pb.TimeUnit)
   }
 
-  public interface TableNameOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
+  public interface TableNameOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.TableName)
+      com.google.protobuf.MessageOrBuilder {
 
-    // required bytes namespace = 1;
     /**
      * <code>required bytes namespace = 1;</code>
      */
@@ -279,7 +301,6 @@ public final class HBaseProtos {
      */
     com.google.protobuf.ByteString getNamespace();
 
-    // required bytes qualifier = 2;
     /**
      * <code>required bytes qualifier = 2;</code>
      */
@@ -290,43 +311,36 @@ public final class HBaseProtos {
     com.google.protobuf.ByteString getQualifier();
   }
   /**
-   * Protobuf type {@code hbase.pb.TableName}
-   *
    * <pre>
    **
    * Table Name
    * </pre>
+   *
+   * Protobuf type {@code hbase.pb.TableName}
    */
-  public static final class TableName extends
-      com.google.protobuf.GeneratedMessage
-      implements TableNameOrBuilder {
+  public  static final class TableName extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.TableName)
+      TableNameOrBuilder {
     // Use TableName.newBuilder() to construct.
-    private TableName(com.google.protobuf.GeneratedMessage.Builder<?> builder) 
{
+    private TableName(com.google.protobuf.GeneratedMessageV3.Builder<?> 
builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
-    }
-    private TableName(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final TableName defaultInstance;
-    public static TableName getDefaultInstance() {
-      return defaultInstance;
     }
-
-    public TableName getDefaultInstanceForType() {
-      return defaultInstance;
+    private TableName() {
+      namespace_ = com.google.protobuf.ByteString.EMPTY;
+      qualifier_ = com.google.protobuf.ByteString.EMPTY;
     }
 
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
     @java.lang.Override
     public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
+    getUnknownFields() {
       return this.unknownFields;
     }
     private TableName(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
+      this();
       int mutable_bitField0_ = 0;
       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
           com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -361,7 +375,7 @@ public final class HBaseProtos {
         throw e.setUnfinishedMessage(this);
       } catch (java.io.IOException e) {
         throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
+            e).setUnfinishedMessage(this);
       } finally {
         this.unknownFields = unknownFields.build();
         makeExtensionsImmutable();
@@ -372,30 +386,14 @@ public final class HBaseProtos {
       return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor;
     }
 
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
         internalGetFieldAccessorTable() {
       return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.class, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder.class);
     }
 
-    public static com.google.protobuf.Parser<TableName> PARSER =
-        new com.google.protobuf.AbstractParser<TableName>() {
-      public TableName parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new TableName(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<TableName> getParserForType() {
-      return PARSER;
-    }
-
     private int bitField0_;
-    // required bytes namespace = 1;
     public static final int NAMESPACE_FIELD_NUMBER = 1;
     private com.google.protobuf.ByteString namespace_;
     /**
@@ -411,7 +409,6 @@ public final class HBaseProtos {
       return namespace_;
     }
 
-    // required bytes qualifier = 2;
     public static final int QUALIFIER_FIELD_NUMBER = 2;
     private com.google.protobuf.ByteString qualifier_;
     /**
@@ -427,14 +424,11 @@ public final class HBaseProtos {
       return qualifier_;
     }
 
-    private void initFields() {
-      namespace_ = com.google.protobuf.ByteString.EMPTY;
-      qualifier_ = com.google.protobuf.ByteString.EMPTY;
-    }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
 
       if (!hasNamespace()) {
         memoizedIsInitialized = 0;
@@ -450,19 +444,17 @@ public final class HBaseProtos {
 
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
-      getSerializedSize();
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         output.writeBytes(1, namespace_);
       }
       if (((bitField0_ & 0x00000002) == 0x00000002)) {
         output.writeBytes(2, qualifier_);
       }
-      getUnknownFields().writeTo(output);
+      unknownFields.writeTo(output);
     }
 
-    private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
-      int size = memoizedSerializedSize;
+      int size = memoizedSize;
       if (size != -1) return size;
 
       size = 0;
@@ -474,19 +466,13 @@ public final class HBaseProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeBytesSize(2, qualifier_);
       }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
       return size;
     }
 
     private static final long serialVersionUID = 0L;
     @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
@@ -507,12 +493,10 @@ public final class HBaseProtos {
         result = result && getQualifier()
             .equals(other.getQualifier());
       }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
+      result = result && unknownFields.equals(other.unknownFields);
       return result;
     }
 
-    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
       if (memoizedHashCode != 0) {
@@ -528,7 +512,7 @@ public final class HBaseProtos {
         hash = (37 * hash) + QUALIFIER_FIELD_NUMBER;
         hash = (53 * hash) + getQualifier().hashCode();
       }
-      hash = (29 * hash) + getUnknownFields().hashCode();
+      hash = (29 * hash) + unknownFields.hashCode();
       memoizedHashCode = hash;
       return hash;
     }
@@ -556,66 +540,78 @@ public final class HBaseProtos {
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
     public static Builder 
newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName
 prototype) {
-      return newBuilder().mergeFrom(prototype);
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
     }
-    public Builder toBuilder() { return newBuilder(this); }
 
     @java.lang.Override
     protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
     /**
-     * Protobuf type {@code hbase.pb.TableName}
-     *
      * <pre>
      **
      * Table Name
      * </pre>
+     *
+     * Protobuf type {@code hbase.pb.TableName}
      */
     public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder
 {
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:hbase.pb.TableName)
+        
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder
 {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor;
       }
 
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
           internalGetFieldAccessorTable() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
@@ -628,18 +624,15 @@ public final class HBaseProtos {
       }
 
       private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
       private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
         }
       }
-      private static Builder create() {
-        return new Builder();
-      }
-
       public Builder clear() {
         super.clear();
         namespace_ = com.google.protobuf.ByteString.EMPTY;
@@ -649,10 +642,6 @@ public final class HBaseProtos {
         return this;
       }
 
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableName_descriptor;
@@ -687,6 +676,32 @@ public final class HBaseProtos {
         return result;
       }
 
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName) {
           return 
mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName)other);
@@ -704,17 +719,16 @@ public final class HBaseProtos {
         if (other.hasQualifier()) {
           setQualifier(other.getQualifier());
         }
-        this.mergeUnknownFields(other.getUnknownFields());
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
         return this;
       }
 
       public final boolean isInitialized() {
         if (!hasNamespace()) {
-          
           return false;
         }
         if (!hasQualifier()) {
-          
           return false;
         }
         return true;
@@ -729,7 +743,7 @@ public final class HBaseProtos {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
           parsedMessage = 
(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName) 
e.getUnfinishedMessage();
-          throw e;
+          throw e.unwrapIOException();
         } finally {
           if (parsedMessage != null) {
             mergeFrom(parsedMessage);
@@ -739,7 +753,6 @@ public final class HBaseProtos {
       }
       private int bitField0_;
 
-      // required bytes namespace = 1;
       private com.google.protobuf.ByteString namespace_ = 
com.google.protobuf.ByteString.EMPTY;
       /**
        * <code>required bytes namespace = 1;</code>
@@ -775,7 +788,6 @@ public final class HBaseProtos {
         return this;
       }
 
-      // required bytes qualifier = 2;
       private com.google.protobuf.ByteString qualifier_ = 
com.google.protobuf.ByteString.EMPTY;
       /**
        * <code>required bytes qualifier = 2;</code>
@@ -810,22 +822,59 @@ public final class HBaseProtos {
         onChanged();
         return this;
       }
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
 
       // @@protoc_insertion_point(builder_scope:hbase.pb.TableName)
     }
 
+    // @@protoc_insertion_point(class_scope:hbase.pb.TableName)
+    private static final 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
DEFAULT_INSTANCE;
     static {
-      defaultInstance = new TableName(true);
-      defaultInstance.initFields();
+      DEFAULT_INSTANCE = new 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName();
+    }
+
+    public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    @java.lang.Deprecated public static final 
com.google.protobuf.Parser<TableName>
+        PARSER = new com.google.protobuf.AbstractParser<TableName>() {
+      public TableName parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+          return new TableName(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<TableName> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<TableName> getParserForType() {
+      return PARSER;
+    }
+
+    public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
     }
 
-    // @@protoc_insertion_point(class_scope:hbase.pb.TableName)
   }
 
-  public interface TableSchemaOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
+  public interface TableSchemaOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.TableSchema)
+      com.google.protobuf.MessageOrBuilder {
 
-    // optional .hbase.pb.TableName table_name = 1;
     /**
      * <code>optional .hbase.pb.TableName table_name = 1;</code>
      */
@@ -839,7 +888,6 @@ public final class HBaseProtos {
      */
     
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder
 getTableNameOrBuilder();
 
-    // repeated .hbase.pb.BytesBytesPair attributes = 2;
     /**
      * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
      */
@@ -864,7 +912,6 @@ public final class HBaseProtos {
     
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder
 getAttributesOrBuilder(
         int index);
 
-    // repeated .hbase.pb.ColumnFamilySchema column_families = 3;
     /**
      * <code>repeated .hbase.pb.ColumnFamilySchema column_families = 3;</code>
      */
@@ -889,7 +936,6 @@ public final class HBaseProtos {
     
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder
 getColumnFamiliesOrBuilder(
         int index);
 
-    // repeated .hbase.pb.NameStringPair configuration = 4;
     /**
      * <code>repeated .hbase.pb.NameStringPair configuration = 4;</code>
      */
@@ -915,44 +961,38 @@ public final class HBaseProtos {
         int index);
   }
   /**
-   * Protobuf type {@code hbase.pb.TableSchema}
-   *
    * <pre>
    **
    * Table Schema
    * Inspired by the rest TableSchema
    * </pre>
+   *
+   * Protobuf type {@code hbase.pb.TableSchema}
    */
-  public static final class TableSchema extends
-      com.google.protobuf.GeneratedMessage
-      implements TableSchemaOrBuilder {
+  public  static final class TableSchema extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.TableSchema)
+      TableSchemaOrBuilder {
     // Use TableSchema.newBuilder() to construct.
-    private TableSchema(com.google.protobuf.GeneratedMessage.Builder<?> 
builder) {
+    private TableSchema(com.google.protobuf.GeneratedMessageV3.Builder<?> 
builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
     }
-    private TableSchema(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final TableSchema defaultInstance;
-    public static TableSchema getDefaultInstance() {
-      return defaultInstance;
-    }
-
-    public TableSchema getDefaultInstanceForType() {
-      return defaultInstance;
+    private TableSchema() {
+      attributes_ = java.util.Collections.emptyList();
+      columnFamilies_ = java.util.Collections.emptyList();
+      configuration_ = java.util.Collections.emptyList();
     }
 
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
     @java.lang.Override
     public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
+    getUnknownFields() {
       return this.unknownFields;
     }
     private TableSchema(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
+      this();
       int mutable_bitField0_ = 0;
       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
           com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -989,7 +1029,8 @@ public final class HBaseProtos {
                 attributes_ = new 
java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>();
                 mutable_bitField0_ |= 0x00000002;
               }
-              
attributes_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER,
 extensionRegistry));
+              attributes_.add(
+                  
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER,
 extensionRegistry));
               break;
             }
             case 26: {
@@ -997,7 +1038,8 @@ public final class HBaseProtos {
                 columnFamilies_ = new 
java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema>();
                 mutable_bitField0_ |= 0x00000004;
               }
-              
columnFamilies_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER,
 extensionRegistry));
+              columnFamilies_.add(
+                  
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.PARSER,
 extensionRegistry));
               break;
             }
             case 34: {
@@ -1005,7 +1047,8 @@ public final class HBaseProtos {
                 configuration_ = new 
java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair>();
                 mutable_bitField0_ |= 0x00000008;
               }
-              
configuration_.add(input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER,
 extensionRegistry));
+              configuration_.add(
+                  
input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.PARSER,
 extensionRegistry));
               break;
             }
           }
@@ -1014,7 +1057,7 @@ public final class HBaseProtos {
         throw e.setUnfinishedMessage(this);
       } catch (java.io.IOException e) {
         throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
+            e).setUnfinishedMessage(this);
       } finally {
         if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) {
           attributes_ = java.util.Collections.unmodifiableList(attributes_);
@@ -1034,30 +1077,14 @@ public final class HBaseProtos {
       return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor;
     }
 
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
         internalGetFieldAccessorTable() {
       return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.class,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema.Builder.class);
     }
 
-    public static com.google.protobuf.Parser<TableSchema> PARSER =
-        new com.google.protobuf.AbstractParser<TableSchema>() {
-      public TableSchema parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new TableSchema(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<TableSchema> getParserForType() {
-      return PARSER;
-    }
-
     private int bitField0_;
-    // optional .hbase.pb.TableName table_name = 1;
     public static final int TABLE_NAME_FIELD_NUMBER = 1;
     private 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
tableName_;
     /**
@@ -1070,16 +1097,15 @@ public final class HBaseProtos {
      * <code>optional .hbase.pb.TableName table_name = 1;</code>
      */
     public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
getTableName() {
-      return tableName_;
+      return tableName_ == null ? 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()
 : tableName_;
     }
     /**
      * <code>optional .hbase.pb.TableName table_name = 1;</code>
      */
     public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder
 getTableNameOrBuilder() {
-      return tableName_;
+      return tableName_ == null ? 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()
 : tableName_;
     }
 
-    // repeated .hbase.pb.BytesBytesPair attributes = 2;
     public static final int ATTRIBUTES_FIELD_NUMBER = 2;
     private 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>
 attributes_;
     /**
@@ -1115,7 +1141,6 @@ public final class HBaseProtos {
       return attributes_.get(index);
     }
 
-    // repeated .hbase.pb.ColumnFamilySchema column_families = 3;
     public static final int COLUMN_FAMILIES_FIELD_NUMBER = 3;
     private 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema>
 columnFamilies_;
     /**
@@ -1151,7 +1176,6 @@ public final class HBaseProtos {
       return columnFamilies_.get(index);
     }
 
-    // repeated .hbase.pb.NameStringPair configuration = 4;
     public static final int CONFIGURATION_FIELD_NUMBER = 4;
     private 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair>
 configuration_;
     /**
@@ -1187,16 +1211,11 @@ public final class HBaseProtos {
       return configuration_.get(index);
     }
 
-    private void initFields() {
-      tableName_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
-      attributes_ = java.util.Collections.emptyList();
-      columnFamilies_ = java.util.Collections.emptyList();
-      configuration_ = java.util.Collections.emptyList();
-    }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
 
       if (hasTableName()) {
         if (!getTableName().isInitialized()) {
@@ -1228,9 +1247,8 @@ public final class HBaseProtos {
 
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
-      getSerializedSize();
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeMessage(1, tableName_);
+        output.writeMessage(1, getTableName());
       }
       for (int i = 0; i < attributes_.size(); i++) {
         output.writeMessage(2, attributes_.get(i));
@@ -1241,18 +1259,17 @@ public final class HBaseProtos {
       for (int i = 0; i < configuration_.size(); i++) {
         output.writeMessage(4, configuration_.get(i));
       }
-      getUnknownFields().writeTo(output);
+      unknownFields.writeTo(output);
     }
 
-    private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
-      int size = memoizedSerializedSize;
+      int size = memoizedSize;
       if (size != -1) return size;
 
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeMessageSize(1, tableName_);
+          .computeMessageSize(1, getTableName());
       }
       for (int i = 0; i < attributes_.size(); i++) {
         size += com.google.protobuf.CodedOutputStream
@@ -1266,19 +1283,13 @@ public final class HBaseProtos {
         size += com.google.protobuf.CodedOutputStream
           .computeMessageSize(4, configuration_.get(i));
       }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
       return size;
     }
 
     private static final long serialVersionUID = 0L;
     @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
@@ -1300,12 +1311,10 @@ public final class HBaseProtos {
           .equals(other.getColumnFamiliesList());
       result = result && getConfigurationList()
           .equals(other.getConfigurationList());
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
+      result = result && unknownFields.equals(other.unknownFields);
       return result;
     }
 
-    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
       if (memoizedHashCode != 0) {
@@ -1329,7 +1338,7 @@ public final class HBaseProtos {
         hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER;
         hash = (53 * hash) + getConfigurationList().hashCode();
       }
-      hash = (29 * hash) + getUnknownFields().hashCode();
+      hash = (29 * hash) + unknownFields.hashCode();
       memoizedHashCode = hash;
       return hash;
     }
@@ -1357,67 +1366,79 @@ public final class HBaseProtos {
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema 
parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema 
parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema 
parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema 
parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema 
parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema 
parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
     public static Builder 
newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema
 prototype) {
-      return newBuilder().mergeFrom(prototype);
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
     }
-    public Builder toBuilder() { return newBuilder(this); }
 
     @java.lang.Override
     protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
     /**
-     * Protobuf type {@code hbase.pb.TableSchema}
-     *
      * <pre>
      **
      * Table Schema
      * Inspired by the rest TableSchema
      * </pre>
+     *
+     * Protobuf type {@code hbase.pb.TableSchema}
      */
     public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder
 {
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:hbase.pb.TableSchema)
+        
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchemaOrBuilder
 {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor;
       }
 
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
           internalGetFieldAccessorTable() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
@@ -1430,26 +1451,23 @@ public final class HBaseProtos {
       }
 
       private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
       private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
           getTableNameFieldBuilder();
           getAttributesFieldBuilder();
           getColumnFamiliesFieldBuilder();
           getConfigurationFieldBuilder();
         }
       }
-      private static Builder create() {
-        return new Builder();
-      }
-
       public Builder clear() {
         super.clear();
         if (tableNameBuilder_ == null) {
-          tableName_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+          tableName_ = null;
         } else {
           tableNameBuilder_.clear();
         }
@@ -1475,10 +1493,6 @@ public final class HBaseProtos {
         return this;
       }
 
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableSchema_descriptor;
@@ -1540,6 +1554,32 @@ public final class HBaseProtos {
         return result;
       }
 
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema) {
           return 
mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema)other);
@@ -1573,7 +1613,7 @@ public final class HBaseProtos {
               attributes_ = other.attributes_;
               bitField0_ = (bitField0_ & ~0x00000002);
               attributesBuilder_ = 
-                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                    getAttributesFieldBuilder() : null;
             } else {
               attributesBuilder_.addAllMessages(other.attributes_);
@@ -1599,7 +1639,7 @@ public final class HBaseProtos {
               columnFamilies_ = other.columnFamilies_;
               bitField0_ = (bitField0_ & ~0x00000004);
               columnFamiliesBuilder_ = 
-                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                    getColumnFamiliesFieldBuilder() : null;
             } else {
               columnFamiliesBuilder_.addAllMessages(other.columnFamilies_);
@@ -1625,39 +1665,36 @@ public final class HBaseProtos {
               configuration_ = other.configuration_;
               bitField0_ = (bitField0_ & ~0x00000008);
               configurationBuilder_ = 
-                com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
+                com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                    getConfigurationFieldBuilder() : null;
             } else {
               configurationBuilder_.addAllMessages(other.configuration_);
             }
           }
         }
-        this.mergeUnknownFields(other.getUnknownFields());
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
         return this;
       }
 
       public final boolean isInitialized() {
         if (hasTableName()) {
           if (!getTableName().isInitialized()) {
-            
             return false;
           }
         }
         for (int i = 0; i < getAttributesCount(); i++) {
           if (!getAttributes(i).isInitialized()) {
-            
             return false;
           }
         }
         for (int i = 0; i < getColumnFamiliesCount(); i++) {
           if (!getColumnFamilies(i).isInitialized()) {
-            
             return false;
           }
         }
         for (int i = 0; i < getConfigurationCount(); i++) {
           if (!getConfiguration(i).isInitialized()) {
-            
             return false;
           }
         }
@@ -1673,7 +1710,7 @@ public final class HBaseProtos {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
           parsedMessage = 
(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema) 
e.getUnfinishedMessage();
-          throw e;
+          throw e.unwrapIOException();
         } finally {
           if (parsedMessage != null) {
             mergeFrom(parsedMessage);
@@ -1683,9 +1720,8 @@ public final class HBaseProtos {
       }
       private int bitField0_;
 
-      // optional .hbase.pb.TableName table_name = 1;
-      private 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
tableName_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
-      private com.google.protobuf.SingleFieldBuilder<
+      private 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
tableName_ = null;
+      private com.google.protobuf.SingleFieldBuilderV3<
           
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
 tableNameBuilder_;
       /**
        * <code>optional .hbase.pb.TableName table_name = 1;</code>
@@ -1698,7 +1734,7 @@ public final class HBaseProtos {
        */
       public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName 
getTableName() {
         if (tableNameBuilder_ == null) {
-          return tableName_;
+          return tableName_ == null ? 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()
 : tableName_;
         } else {
           return tableNameBuilder_.getMessage();
         }
@@ -1739,6 +1775,7 @@ public final class HBaseProtos {
       public Builder 
mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName
 value) {
         if (tableNameBuilder_ == null) {
           if (((bitField0_ & 0x00000001) == 0x00000001) &&
+              tableName_ != null &&
               tableName_ != 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance())
 {
             tableName_ =
               
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
@@ -1757,7 +1794,7 @@ public final class HBaseProtos {
        */
       public Builder clearTableName() {
         if (tableNameBuilder_ == null) {
-          tableName_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
+          tableName_ = null;
           onChanged();
         } else {
           tableNameBuilder_.clear();
@@ -1780,19 +1817,20 @@ public final class HBaseProtos {
         if (tableNameBuilder_ != null) {
           return tableNameBuilder_.getMessageOrBuilder();
         } else {
-          return tableName_;
+          return tableName_ == null ?
+              
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()
 : tableName_;
         }
       }
       /**
        * <code>optional .hbase.pb.TableName table_name = 1;</code>
        */
-      private com.google.protobuf.SingleFieldBuilder<
+      private com.google.protobuf.SingleFieldBuilderV3<
           
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>
 
           getTableNameFieldBuilder() {
         if (tableNameBuilder_ == null) {
-          tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+          tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
               
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
-                  tableName_,
+                  getTableName(),
                   getParentForChildren(),
                   isClean());
           tableName_ = null;
@@ -1800,7 +1838,6 @@ public final class HBaseProtos {
         return tableNameBuilder_;
       }
 
-      // repeated .hbase.pb.BytesBytesPair attributes = 2;
       private 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair>
 attributes_ =
         java.util.Collections.emptyList();
       private void ensureAttributesIsMutable() {
@@ -1810,7 +1847,7 @@ public final class HBaseProtos {
          }
       }
 
-      private com.google.protobuf.RepeatedFieldBuilder<
+      private com.google.protobuf.RepeatedFieldBuilderV3<
           
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
 attributesBuilder_;
 
       /**
@@ -1942,7 +1979,8 @@ public final class HBaseProtos {
           java.lang.Iterable<? extends 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair> 
values) {
         if (attributesBuilder_ == null) {
           ensureAttributesIsMutable();
-          super.addAll(values, attributes_);
+          com.google.protobuf.AbstractMessageLite.Builder.addAll(
+              values, attributes_);
           onChanged();
         } else {
           attributesBuilder_.addAllMessages(values);
@@ -2025,11 +2063,11 @@ public final class HBaseProtos {
            getAttributesBuilderList() {
         return getAttributesFieldBuilder().getBuilderList();
       }
-      private com.google.protobuf.RepeatedFieldBuilder<
+      private com.google.protobuf.RepeatedFieldBuilderV3<
           
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
 
           getAttributesFieldBuilder() {
         if (attributesBuilder_ == null) {
-          attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+          attributesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
               
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPair.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>(
                   attributes_,
                   ((bitField0_ & 0x00000002) == 0x00000002),
@@ -2040,7 +2078,6 @@ public final class HBaseProtos {
         return attributesBuilder_;
       }
 
-      // repeated .hbase.pb.ColumnFamilySchema column_families = 3;
       private 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema>
 columnFamilies_ =
         java.util.Collections.emptyList();
       private void ensureColumnFamiliesIsMutable() {
@@ -2050,7 +2087,7 @@ public final class HBaseProtos {
          }
       }
 
-      private com.google.protobuf.RepeatedFieldBuilder<
+      private com.google.protobuf.RepeatedFieldBuilderV3<
           
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
 columnFamiliesBuilder_;
 
       /**
@@ -2182,7 +2219,8 @@ public final class HBaseProtos {
           java.lang.Iterable<? extends 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema>
 values) {
         if (columnFamiliesBuilder_ == null) {
           ensureColumnFamiliesIsMutable();
-          super.addAll(values, columnFamilies_);
+          com.google.protobuf.AbstractMessageLite.Builder.addAll(
+              values, columnFamilies_);
           onChanged();
         } else {
           columnFamiliesBuilder_.addAllMessages(values);
@@ -2265,11 +2303,11 @@ public final class HBaseProtos {
            getColumnFamiliesBuilderList() {
         return getColumnFamiliesFieldBuilder().getBuilderList();
       }
-      private com.google.protobuf.RepeatedFieldBuilder<
+      private com.google.protobuf.RepeatedFieldBuilderV3<
           
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>
 
           getColumnFamiliesFieldBuilder() {
         if (columnFamiliesBuilder_ == null) {
-          columnFamiliesBuilder_ = new 
com.google.protobuf.RepeatedFieldBuilder<
+          columnFamiliesBuilder_ = new 
com.google.protobuf.RepeatedFieldBuilderV3<
               
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchemaOrBuilder>(
                   columnFamilies_,
                   ((bitField0_ & 0x00000004) == 0x00000004),
@@ -2280,7 +2318,6 @@ public final class HBaseProtos {
         return columnFamiliesBuilder_;
       }
 
-      // repeated .hbase.pb.NameStringPair configuration = 4;
       private 
java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair>
 configuration_ =
         java.util.Collections.emptyList();
       private void ensureConfigurationIsMutable() {
@@ -2290,7 +2327,7 @@ public final class HBaseProtos {
          }
       }
 
-      private com.google.protobuf.RepeatedFieldBuilder<
+      private com.google.protobuf.RepeatedFieldBuilderV3<
           
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
 configurationBuilder_;
 
       /**
@@ -2422,7 +2459,8 @@ public final class HBaseProtos {
           java.lang.Iterable<? extends 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair> 
values) {
         if (configurationBuilder_ == null) {
           ensureConfigurationIsMutable();
-          super.addAll(values, configuration_);
+          com.google.protobuf.AbstractMessageLite.Builder.addAll(
+              values, configuration_);
           onChanged();
         } else {
           configurationBuilder_.addAllMessages(values);
@@ -2505,11 +2543,11 @@ public final class HBaseProtos {
            getConfigurationBuilderList() {
         return getConfigurationFieldBuilder().getBuilderList();
       }
-      private com.google.protobuf.RepeatedFieldBuilder<
+      private com.google.protobuf.RepeatedFieldBuilderV3<
           
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
 
           getConfigurationFieldBuilder() {
         if (configurationBuilder_ == null) {
-          configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
+          configurationBuilder_ = new 
com.google.protobuf.RepeatedFieldBuilderV3<
               
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPair.Builder,
 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>(
                   configuration_,
                   ((bitField0_ & 0x00000008) == 0x00000008),
@@ -2519,76 +2557,105 @@ public final class HBaseProtos {
         }
         return configurationBuilder_;
       }
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
 
       // @@protoc_insertion_point(builder_scope:hbase.pb.TableSchema)
     }
 
+    // @@protoc_insertion_point(class_scope:hbase.pb.TableSchema)
+    private static final 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema 
DEFAULT_INSTANCE;
     static {
-      defaultInstance = new TableSchema(true);
-      defaultInstance.initFields();
+      DEFAULT_INSTANCE = new 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema();
+    }
+
+    public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema 
getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
+
+    @java.lang.Deprecated public static final 
com.google.protobuf.Parser<TableSchema>
+        PARSER = new com.google.protobuf.AbstractParser<TableSchema>() {
+      public TableSchema parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+          return new TableSchema(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<TableSchema> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<TableSchema> getParserForType() {
+      return PARSER;
+    }
+
+    public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema 
getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
     }
 
-    // @@protoc_insertion_point(class_scope:hbase.pb.TableSchema)
   }
 
-  public interface TableStateOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
+  public interface TableStateOrBuilder extends
+      // @@protoc_insertion_point(interface_extends:hbase.pb.TableState)
+      com.google.protobuf.MessageOrBuilder {
 
-    // required .hbase.pb.TableState.State state = 1;
     /**
-     * <code>required .hbase.pb.TableState.State state = 1;</code>
-     *
      * <pre>
      * This is the table's state.
      * </pre>
+     *
+     * <code>required .hbase.pb.TableState.State state = 1;</code>
      */
     boolean hasState();
     /**
-     * <code>required .hbase.pb.TableState.State state = 1;</code>
-     *
      * <pre>
      * This is the table's state.
      * </pre>
+     *
+     * <code>required .hbase.pb.TableState.State state = 1;</code>
      */
     
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State 
getState();
   }
   /**
-   * Protobuf type {@code hbase.pb.TableState}
-   *
    * <pre>
    ** Denotes state of the table 
    * </pre>
+   *
+   * Protobuf type {@code hbase.pb.TableState}
    */
-  public static final class TableState extends
-      com.google.protobuf.GeneratedMessage
-      implements TableStateOrBuilder {
+  public  static final class TableState extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // @@protoc_insertion_point(message_implements:hbase.pb.TableState)
+      TableStateOrBuilder {
     // Use TableState.newBuilder() to construct.
-    private TableState(com.google.protobuf.GeneratedMessage.Builder<?> 
builder) {
+    private TableState(com.google.protobuf.GeneratedMessageV3.Builder<?> 
builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
-    }
-    private TableState(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final TableState defaultInstance;
-    public static TableState getDefaultInstance() {
-      return defaultInstance;
     }
-
-    public TableState getDefaultInstanceForType() {
-      return defaultInstance;
+    private TableState() {
+      state_ = 0;
     }
 
-    private final com.google.protobuf.UnknownFieldSet unknownFields;
     @java.lang.Override
     public final com.google.protobuf.UnknownFieldSet
-        getUnknownFields() {
+    getUnknownFields() {
       return this.unknownFields;
     }
     private TableState(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws com.google.protobuf.InvalidProtocolBufferException {
-      initFields();
+      this();
       int mutable_bitField0_ = 0;
       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
           com.google.protobuf.UnknownFieldSet.newBuilder();
@@ -2614,7 +2681,7 @@ public final class HBaseProtos {
                 unknownFields.mergeVarintField(1, rawValue);
               } else {
                 bitField0_ |= 0x00000001;
-                state_ = value;
+                state_ = rawValue;
               }
               break;
             }
@@ -2624,7 +2691,7 @@ public final class HBaseProtos {
         throw e.setUnfinishedMessage(this);
       } catch (java.io.IOException e) {
         throw new com.google.protobuf.InvalidProtocolBufferException(
-            e.getMessage()).setUnfinishedMessage(this);
+            e).setUnfinishedMessage(this);
       } finally {
         this.unknownFields = unknownFields.build();
         makeExtensionsImmutable();
@@ -2635,53 +2702,38 @@ public final class HBaseProtos {
       return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor;
     }
 
-    protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+    protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
         internalGetFieldAccessorTable() {
       return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_fieldAccessorTable
           .ensureFieldAccessorsInitialized(
               
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.class, 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.Builder.class);
     }
 
-    public static com.google.protobuf.Parser<TableState> PARSER =
-        new com.google.protobuf.AbstractParser<TableState>() {
-      public TableState parsePartialFrom(
-          com.google.protobuf.CodedInputStream input,
-          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
-          throws com.google.protobuf.InvalidProtocolBufferException {
-        return new TableState(input, extensionRegistry);
-      }
-    };
-
-    @java.lang.Override
-    public com.google.protobuf.Parser<TableState> getParserForType() {
-      return PARSER;
-    }
-
     /**
-     * Protobuf enum {@code hbase.pb.TableState.State}
-     *
      * <pre>
      * Table's current state
      * </pre>
+     *
+     * Protobuf enum {@code hbase.pb.TableState.State}
      */
     public enum State
         implements com.google.protobuf.ProtocolMessageEnum {
       /**
        * <code>ENABLED = 0;</code>
        */
-      ENABLED(0, 0),
+      ENABLED(0),
       /**
        * <code>DISABLED = 1;</code>
        */
-      DISABLED(1, 1),
+      DISABLED(1),
       /**
        * <code>DISABLING = 2;</code>
        */
-      DISABLING(2, 2),
+      DISABLING(2),
       /**
        * <code>ENABLING = 3;</code>
        */
-      ENABLING(3, 3),
+      ENABLING(3),
       ;
 
       /**
@@ -2702,9 +2754,19 @@ public final class HBaseProtos {
       public static final int ENABLING_VALUE = 3;
 
 
-      public final int getNumber() { return value; }
+      public final int getNumber() {
+        return value;
+      }
 
+      /**
+       * @deprecated Use {@link #forNumber(int)} instead.
+       */
+      @java.lang.Deprecated
       public static State valueOf(int value) {
+        return forNumber(value);
+      }
+
+      public static State forNumber(int value) {
         switch (value) {
           case 0: return ENABLED;
           case 1: return DISABLED;
@@ -2718,17 +2780,17 @@ public final class HBaseProtos {
           internalGetValueMap() {
         return internalValueMap;
       }
-      private static com.google.protobuf.Internal.EnumLiteMap<State>
-          internalValueMap =
+      private static final com.google.protobuf.Internal.EnumLiteMap<
+          State> internalValueMap =
             new com.google.protobuf.Internal.EnumLiteMap<State>() {
               public State findValueByNumber(int number) {
-                return State.valueOf(number);
+                return State.forNumber(number);
               }
             };
 
       public final com.google.protobuf.Descriptors.EnumValueDescriptor
           getValueDescriptor() {
-        return getDescriptor().getValues().get(index);
+        return getDescriptor().getValues().get(ordinal());
       }
       public final com.google.protobuf.Descriptors.EnumDescriptor
           getDescriptorForType() {
@@ -2750,11 +2812,9 @@ public final class HBaseProtos {
         return VALUES[desc.getIndex()];
       }
 
-      private final int index;
       private final int value;
 
-      private State(int index, int value) {
-        this.index = index;
+      private State(int value) {
         this.value = value;
       }
 
@@ -2762,37 +2822,35 @@ public final class HBaseProtos {
     }
 
     private int bitField0_;
-    // required .hbase.pb.TableState.State state = 1;
     public static final int STATE_FIELD_NUMBER = 1;
-    private 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State 
state_;
+    private int state_;
     /**
-     * <code>required .hbase.pb.TableState.State state = 1;</code>
-     *
      * <pre>
      * This is the table's state.
      * </pre>
+     *
+     * <code>required .hbase.pb.TableState.State state = 1;</code>
      */
     public boolean hasState() {
       return ((bitField0_ & 0x00000001) == 0x00000001);
     }
     /**
-     * <code>required .hbase.pb.TableState.State state = 1;</code>
-     *
      * <pre>
      * This is the table's state.
      * </pre>
+     *
+     * <code>required .hbase.pb.TableState.State state = 1;</code>
      */
     public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State 
getState() {
-      return state_;
+      
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State 
result = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.valueOf(state_);
+      return result == null ? 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED
 : result;
     }
 
-    private void initFields() {
-      state_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED;
-    }
     private byte memoizedIsInitialized = -1;
     public final boolean isInitialized() {
       byte isInitialized = memoizedIsInitialized;
-      if (isInitialized != -1) return isInitialized == 1;
+      if (isInitialized == 1) return true;
+      if (isInitialized == 0) return false;
 
       if (!hasState()) {
         memoizedIsInitialized = 0;
@@ -2804,36 +2862,28 @@ public final class HBaseProtos {
 
     public void writeTo(com.google.protobuf.CodedOutputStream output)
                         throws java.io.IOException {
-      getSerializedSize();
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
-        output.writeEnum(1, state_.getNumber());
+        output.writeEnum(1, state_);
       }
-      getUnknownFields().writeTo(output);
+      unknownFields.writeTo(output);
     }
 
-    private int memoizedSerializedSize = -1;
     public int getSerializedSize() {
-      int size = memoizedSerializedSize;
+      int size = memoizedSize;
       if (size != -1) return size;
 
       size = 0;
       if (((bitField0_ & 0x00000001) == 0x00000001)) {
         size += com.google.protobuf.CodedOutputStream
-          .computeEnumSize(1, state_.getNumber());
+          .computeEnumSize(1, state_);
       }
-      size += getUnknownFields().getSerializedSize();
-      memoizedSerializedSize = size;
+      size += unknownFields.getSerializedSize();
+      memoizedSize = size;
       return size;
     }
 
     private static final long serialVersionUID = 0L;
     @java.lang.Override
-    protected java.lang.Object writeReplace()
-        throws java.io.ObjectStreamException {
-      return super.writeReplace();
-    }
-
-    @java.lang.Override
     public boolean equals(final java.lang.Object obj) {
       if (obj == this) {
        return true;
@@ -2846,15 +2896,12 @@ public final class HBaseProtos {
       boolean result = true;
       result = result && (hasState() == other.hasState());
       if (hasState()) {
-        result = result &&
-            (getState() == other.getState());
+        result = result && state_ == other.state_;
       }
-      result = result &&
-          getUnknownFields().equals(other.getUnknownFields());
+      result = result && unknownFields.equals(other.unknownFields);
       return result;
     }
 
-    private int memoizedHashCode = 0;
     @java.lang.Override
     public int hashCode() {
       if (memoizedHashCode != 0) {
@@ -2864,9 +2911,9 @@ public final class HBaseProtos {
       hash = (19 * hash) + getDescriptorForType().hashCode();
       if (hasState()) {
         hash = (37 * hash) + STATE_FIELD_NUMBER;
-        hash = (53 * hash) + hashEnum(getState());
+        hash = (53 * hash) + state_;
       }
-      hash = (29 * hash) + getUnknownFields().hashCode();
+      hash = (29 * hash) + unknownFields.hashCode();
       memoizedHashCode = hash;
       return hash;
     }
@@ -2894,65 +2941,77 @@ public final class HBaseProtos {
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState 
parseFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState 
parseFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState 
parseDelimitedFrom(java.io.InputStream input)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState 
parseDelimitedFrom(
         java.io.InputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseDelimitedFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState 
parseFrom(
         com.google.protobuf.CodedInputStream input)
         throws java.io.IOException {
-      return PARSER.parseFrom(input);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input);
     }
     public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState 
parseFrom(
         com.google.protobuf.CodedInputStream input,
         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
         throws java.io.IOException {
-      return PARSER.parseFrom(input, extensionRegistry);
+      return com.google.protobuf.GeneratedMessageV3
+          .parseWithIOException(PARSER, input, extensionRegistry);
     }
 
-    public static Builder newBuilder() { return Builder.create(); }
     public Builder newBuilderForType() { return newBuilder(); }
+    public static Builder newBuilder() {
+      return DEFAULT_INSTANCE.toBuilder();
+    }
     public static Builder 
newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState
 prototype) {
-      return newBuilder().mergeFrom(prototype);
+      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
+    }
+    public Builder toBuilder() {
+      return this == DEFAULT_INSTANCE
+          ? new Builder() : new Builder().mergeFrom(this);
     }
-    public Builder toBuilder() { return newBuilder(this); }
 
     @java.lang.Override
     protected Builder newBuilderForType(
-        com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+        com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
       Builder builder = new Builder(parent);
       return builder;
     }
     /**
-     * Protobuf type {@code hbase.pb.TableState}
-     *
      * <pre>
      ** Denotes state of the table 
      * </pre>
+     *
+     * Protobuf type {@code hbase.pb.TableState}
      */
     public static final class Builder extends
-        com.google.protobuf.GeneratedMessage.Builder<Builder>
-       implements 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableStateOrBuilder
 {
+        com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
+        // @@protoc_insertion_point(builder_implements:hbase.pb.TableState)
+        
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableStateOrBuilder
 {
       public static final com.google.protobuf.Descriptors.Descriptor
           getDescriptor() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor;
       }
 
-      protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+      protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
           internalGetFieldAccessorTable() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_fieldAccessorTable
             .ensureFieldAccessorsInitialized(
@@ -2965,29 +3024,22 @@ public final class HBaseProtos {
       }
 
       private Builder(
-          com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+          com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
         super(parent);
         maybeForceBuilderInitialization();
       }
       private void maybeForceBuilderInitialization() {
-        if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+        if (com.google.protobuf.GeneratedMessageV3
+                .alwaysUseFieldBuilders) {
         }
       }
-      private static Builder create() {
-        return new Builder();
-      }
-
       public Builder clear() {
         super.clear();
-        state_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED;
+        state_ = 0;
         bitField0_ = (bitField0_ & ~0x00000001);
         return this;
       }
 
-      public Builder clone() {
-        return create().mergeFrom(buildPartial());
-      }
-
       public com.google.protobuf.Descriptors.Descriptor
           getDescriptorForType() {
         return 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.internal_static_hbase_pb_TableState_descriptor;
@@ -3018,6 +3070,32 @@ public final class HBaseProtos {
         return result;
       }
 
+      public Builder clone() {
+        return (Builder) super.clone();
+      }
+      public Builder setField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.setField(field, value);
+      }
+      public Builder clearField(
+          com.google.protobuf.Descriptors.FieldDescriptor field) {
+        return (Builder) super.clearField(field);
+      }
+      public Builder clearOneof(
+          com.google.protobuf.Descriptors.OneofDescriptor oneof) {
+        return (Builder) super.clearOneof(oneof);
+      }
+      public Builder setRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          int index, Object value) {
+        return (Builder) super.setRepeatedField(field, index, value);
+      }
+      public Builder addRepeatedField(
+          com.google.protobuf.Descriptors.FieldDescriptor field,
+          Object value) {
+        return (Builder) super.addRepeatedField(field, value);
+      }
       public Builder mergeFrom(com.google.protobuf.Message other) {
         if (other instanceof 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState) {
           return 
mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState)other);
@@ -3032,13 +3110,13 @@ public final class HBaseProtos {
         if (other.hasState()) {
           setState(other.getState());
         }
-        this.mergeUnknownFields(other.getUnknownFields());
+        this.mergeUnknownFields(other.unknownFields);
+        onChanged();
         return this;
       }
 
       public final boolean isInitialized() {
         if (!hasState()) {
-          
           return false;
         }
         return true;
@@ -3053,7 +3131,7 @@ public final class HBaseProtos {
           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
           parsedMessage = 
(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState) 
e.getUnfinishedMessage();
-          throw e;
+          throw e.unwrapIOException();
         } finally {
           if (parsedMessage != null) {
             mergeFrom(parsedMessage);
@@ -3063,74 +3141,111 @@ public final class HBaseProtos {
       }
       private int bitField0_;
 
-      // required .hbase.pb.TableState.State state = 1;
-      private 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State 
state_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED;
+      private int state_ = 0;
       /**
-       * <code>required .hbase.pb.TableState.State state = 1;</code>
-       *
        * <pre>
        * This is the table's state.
        * </pre>
+       *
+       * <code>required .hbase.pb.TableState.State state = 1;</code>
        */
       public boolean hasState() {
         return ((bitField0_ & 0x00000001) == 0x00000001);
       }
       /**
-       * <code>required .hbase.pb.TableState.State state = 1;</code>
-       *
        * <pre>
        * This is the table's state.
        * </pre>
+       *
+       * <code>required .hbase.pb.TableState.State state = 1;</code>
        */
       public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State 
getState() {
-        return state_;
+        
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State 
result = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.valueOf(state_);
+        return result == null ? 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED
 : result;
       }
       /**
-       * <code>required .hbase.pb.TableState.State state = 1;</code>
-       *
        * <pre>
        * This is the table's state.
        * </pre>
+       *
+       * <code>required .hbase.pb.TableState.State state = 1;</code>
        */
       public Builder 
setState(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State
 value) {
         if (value == null) {
           throw new NullPointerException();
         }
         bitField0_ |= 0x00000001;
-        state_ = value;
+        state_ = value.getNumber();
         onChanged();
         return this;
       }
       /**
-       * <code>required .hbase.pb.TableState.State state = 1;</code>
-       *
        * <pre>
        * This is the table's state.
        * </pre>
+       *
+       * <code>required .hbase.pb.TableState.State state = 1;</code>
        */
       public Builder clearState() {
         bitField0_ = (bitField0_ & ~0x00000001);
-        state_ = 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState.State.ENABLED;
+        state_ = 0;
         onChanged();
         return this;
       }
+      public final Builder setUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.setUnknownFields(unknownFields);
+      }
+
+      public final Builder mergeUnknownFields(
+          final com.google.protobuf.UnknownFieldSet unknownFields) {
+        return super.mergeUnknownFields(unknownFields);
+      }
+
 
       // @@protoc_insertion_point(builder_scope:hbase.pb.TableState)
     }
 
+    // @@protoc_insertion_point(class_scope:hbase.pb.TableState)
+    private static final 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState 
DEFAULT_INSTANCE;
     static {
-      defaultInstance = new TableState(true);
-      defaultInstance.initFields();
+      DEFAULT_INSTANCE = new 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState();
     }
 
-    // @@protoc_insertion_point(class_scope:hbase.pb.TableState)
-  }
-
-  public interface ColumnFamilySchemaOrBuilder
-      extends com.google.protobuf.MessageOrBuilder {
+    public static 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState 
getDefaultInstance() {
+      return DEFAULT_INSTANCE;
+    }
 
-    // required bytes name = 1;
-    /**
+    @java.lang.Deprecated public static final 
com.google.protobuf.Parser<TableState>
+        PARSER = new com.google.protobuf.AbstractParser<TableState>() {
+      public TableState parsePartialFrom(
+          com.google.protobuf.CodedInputStream input,
+          com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+          throws com.google.protobuf.InvalidProtocolBufferException {
+          return new TableState(input, extensionRegistry);
+      }
+    };
+
+    public static com.google.protobuf.Parser<TableState> parser() {
+      return PARSER;
+    }
+
+    @java.lang.Override
+    public com.google.protobuf.Parser<TableState> getParserForType() {
+      return PARSER;
+    }
+
+    public 
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableState 
getDefaultInstanceForType() {
+      return DEFAULT_INSTANCE;
+    }
+
+  }
+
+  public interface ColumnFamilySchemaOrBuilder extends
+      // 
@@protoc_insertion_point(interface_extends:hbase.pb.ColumnFamilySchema)
+      com.google.protobuf.MessageOrBuilder {
+
+    /**
      * <code>required bytes name = 1;</code>
      */
     boolean hasName();
@@ -3139,7 +3254,6 @@ public final class HBaseProtos {
      */
     com.google.protobuf.ByteString getName();
 
-    // repeated .hbase.pb.BytesBytesPair attributes = 2;
     /**
      * <code>repeated .hbase.pb.BytesBytesPair attributes = 2;</code>
      */
@@ -3164,7 +3278,6 @@ public final class HBaseProtos {
     
org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder
 getAttributesOrBuilder(
         int index);
 
-    // repeated .hbase.pb.NameStringPair configuration = 3;
     /**
      * <code>repeated .hbase.pb.NameStringPair configuration = 3;</code>
      */
@@ -3190,44 +3303,38 @@ public final class HBaseProtos {
         int index);
   }
   /**
-   * Protobuf type {@code hbase.pb.ColumnFamilySchema}
-   *
    * <pre>
    **
    * Column Family Schema
    * Inspired by the rest ColumSchemaMessage
    * </pre>
+   *
+   * Protobuf type {@code hbase.pb.ColumnFamilySchema}
    */
-  public static final class ColumnFamilySchema extends
-      com.google.protobuf.GeneratedMessage
-      implements ColumnFamilySchemaOrBuilder {
+  public  static final class ColumnFamilySchema extends
+      com.google.protobuf.GeneratedMessageV3 implements
+      // 
@@protoc_insertion_point(message_implements:hbase.pb.ColumnFamilySchema)
+      ColumnFamilySchemaOrBuilder {
     // Use ColumnFamilySchema.newBuilder() to construct.
-    private ColumnFamilySchema(com.google.protobuf.GeneratedMessage.Builder<?> 
builder) {
+    private 
ColumnFamilySchema(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
       super(builder);
-      this.unknownFields = builder.getUnknownFields();
-    }
-    private ColumnFamilySchema(boolean noInit) { this.unknownFields = 
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
-
-    private static final ColumnFamilySchema defaultInstance;
-    public static ColumnFamilySchema getDefaultInstance() {
-      return defaultInstance;
     }
-
-    public ColumnFamilySchema getDefaultInstanceForType() {
-      return defaultInstance;
+    private ColumnFamilySchema() {
+      name_ = com.google.protobuf.ByteString.EMPTY;
+      attributes_ = java.util.Collections.emptyL

<TRUNCATED>

Reply via email to