http://git-wip-us.apache.org/repos/asf/hbase/blob/b4a729ed/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java index c9e34d9..f945184 100644 --- a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/ClusterStatusProtos.java @@ -6,12 +6,18 @@ package org.apache.hadoop.hbase.shaded.protobuf.generated; public final class ClusterStatusProtos { private ClusterStatusProtos() {} public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); } - public interface RegionStateOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionStateOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionState) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionInfo region_info = 1; /** * <code>required .hbase.pb.RegionInfo region_info = 1;</code> */ @@ -25,7 +31,6 @@ public final class ClusterStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); - // required .hbase.pb.RegionState.State state = 2; /** * <code>required .hbase.pb.RegionState.State state = 2;</code> */ @@ -35,7 +40,6 @@ public final class ClusterStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState(); - // optional uint64 stamp = 3; /** * <code>optional uint64 stamp = 3;</code> */ @@ -48,36 +52,29 @@ public final class ClusterStatusProtos { /** * Protobuf type {@code hbase.pb.RegionState} */ - public static final class RegionState extends - com.google.protobuf.GeneratedMessage - implements RegionStateOrBuilder { + public static final class RegionState extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionState) + RegionStateOrBuilder { // Use RegionState.newBuilder() to construct. - private RegionState(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + private RegionState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionState defaultInstance; - public static RegionState getDefaultInstance() { - return defaultInstance; } - - public RegionState getDefaultInstanceForType() { - return defaultInstance; + private RegionState() { + state_ = 0; + stamp_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -116,7 +113,7 @@ public final class ClusterStatusProtos { unknownFields.mergeVarintField(2, rawValue); } else { bitField0_ |= 0x00000002; - state_ = value; + state_ = rawValue; } break; } @@ -131,7 +128,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -142,282 +139,277 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionState_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.Builder.class); } - public static com.google.protobuf.Parser<RegionState> PARSER = - new com.google.protobuf.AbstractParser<RegionState>() { - public RegionState parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionState(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser<RegionState> getParserForType() { - return PARSER; - } - /** * Protobuf enum {@code hbase.pb.RegionState.State} */ public enum State implements com.google.protobuf.ProtocolMessageEnum { /** - * <code>OFFLINE = 0;</code> - * * <pre> * region is in an offline state * </pre> + * + * <code>OFFLINE = 0;</code> */ - OFFLINE(0, 0), + OFFLINE(0), /** - * <code>PENDING_OPEN = 1;</code> - * * <pre> * sent rpc to server to open but has not begun * </pre> + * + * <code>PENDING_OPEN = 1;</code> */ - PENDING_OPEN(1, 1), + PENDING_OPEN(1), /** - * <code>OPENING = 2;</code> - * * <pre> * server has begun to open but not yet done * </pre> + * + * <code>OPENING = 2;</code> */ - OPENING(2, 2), + OPENING(2), /** - * <code>OPEN = 3;</code> - * * <pre> * server opened region and updated meta * </pre> + * + * <code>OPEN = 3;</code> */ - OPEN(3, 3), + OPEN(3), /** - * <code>PENDING_CLOSE = 4;</code> - * * <pre> * sent rpc to server to close but has not begun * </pre> + * + * <code>PENDING_CLOSE = 4;</code> */ - PENDING_CLOSE(4, 4), + PENDING_CLOSE(4), /** - * <code>CLOSING = 5;</code> - * * <pre> * server has begun to close but not yet done * </pre> + * + * <code>CLOSING = 5;</code> */ - CLOSING(5, 5), + CLOSING(5), /** - * <code>CLOSED = 6;</code> - * * <pre> * server closed region and updated meta * </pre> + * + * <code>CLOSED = 6;</code> */ - CLOSED(6, 6), + CLOSED(6), /** - * <code>SPLITTING = 7;</code> - * * <pre> * server started split of a region * </pre> + * + * <code>SPLITTING = 7;</code> */ - SPLITTING(7, 7), + SPLITTING(7), /** - * <code>SPLIT = 8;</code> - * * <pre> * server completed split of a region * </pre> + * + * <code>SPLIT = 8;</code> */ - SPLIT(8, 8), + SPLIT(8), /** - * <code>FAILED_OPEN = 9;</code> - * * <pre> * failed to open, and won't retry any more * </pre> + * + * <code>FAILED_OPEN = 9;</code> */ - FAILED_OPEN(9, 9), + FAILED_OPEN(9), /** - * <code>FAILED_CLOSE = 10;</code> - * * <pre> * failed to close, and won't retry any more * </pre> + * + * <code>FAILED_CLOSE = 10;</code> */ - FAILED_CLOSE(10, 10), + FAILED_CLOSE(10), /** - * <code>MERGING = 11;</code> - * * <pre> * server started merge a region * </pre> + * + * <code>MERGING = 11;</code> */ - MERGING(11, 11), + MERGING(11), /** - * <code>MERGED = 12;</code> - * * <pre> * server completed merge of a region * </pre> + * + * <code>MERGED = 12;</code> */ - MERGED(12, 12), + MERGED(12), /** - * <code>SPLITTING_NEW = 13;</code> - * * <pre> * new region to be created when RS splits a parent * </pre> + * + * <code>SPLITTING_NEW = 13;</code> */ - SPLITTING_NEW(13, 13), + SPLITTING_NEW(13), /** - * <code>MERGING_NEW = 14;</code> - * * <pre> * region but hasn't be created yet, or master doesn't * know it's already created * </pre> + * + * <code>MERGING_NEW = 14;</code> */ - MERGING_NEW(14, 14), + MERGING_NEW(14), ; /** - * <code>OFFLINE = 0;</code> - * * <pre> * region is in an offline state * </pre> + * + * <code>OFFLINE = 0;</code> */ public static final int OFFLINE_VALUE = 0; /** - * <code>PENDING_OPEN = 1;</code> - * * <pre> * sent rpc to server to open but has not begun * </pre> + * + * <code>PENDING_OPEN = 1;</code> */ public static final int PENDING_OPEN_VALUE = 1; /** - * <code>OPENING = 2;</code> - * * <pre> * server has begun to open but not yet done * </pre> + * + * <code>OPENING = 2;</code> */ public static final int OPENING_VALUE = 2; /** - * <code>OPEN = 3;</code> - * * <pre> * server opened region and updated meta * </pre> + * + * <code>OPEN = 3;</code> */ public static final int OPEN_VALUE = 3; /** - * <code>PENDING_CLOSE = 4;</code> - * * <pre> * sent rpc to server to close but has not begun * </pre> + * + * <code>PENDING_CLOSE = 4;</code> */ public static final int PENDING_CLOSE_VALUE = 4; /** - * <code>CLOSING = 5;</code> - * * <pre> * server has begun to close but not yet done * </pre> + * + * <code>CLOSING = 5;</code> */ public static final int CLOSING_VALUE = 5; /** - * <code>CLOSED = 6;</code> - * * <pre> * server closed region and updated meta * </pre> + * + * <code>CLOSED = 6;</code> */ public static final int CLOSED_VALUE = 6; /** - * <code>SPLITTING = 7;</code> - * * <pre> * server started split of a region * </pre> + * + * <code>SPLITTING = 7;</code> */ public static final int SPLITTING_VALUE = 7; /** - * <code>SPLIT = 8;</code> - * * <pre> * server completed split of a region * </pre> + * + * <code>SPLIT = 8;</code> */ public static final int SPLIT_VALUE = 8; /** - * <code>FAILED_OPEN = 9;</code> - * * <pre> * failed to open, and won't retry any more * </pre> + * + * <code>FAILED_OPEN = 9;</code> */ public static final int FAILED_OPEN_VALUE = 9; /** - * <code>FAILED_CLOSE = 10;</code> - * * <pre> * failed to close, and won't retry any more * </pre> + * + * <code>FAILED_CLOSE = 10;</code> */ public static final int FAILED_CLOSE_VALUE = 10; /** - * <code>MERGING = 11;</code> - * * <pre> * server started merge a region * </pre> + * + * <code>MERGING = 11;</code> */ public static final int MERGING_VALUE = 11; /** - * <code>MERGED = 12;</code> - * * <pre> * server completed merge of a region * </pre> + * + * <code>MERGED = 12;</code> */ public static final int MERGED_VALUE = 12; /** - * <code>SPLITTING_NEW = 13;</code> - * * <pre> * new region to be created when RS splits a parent * </pre> + * + * <code>SPLITTING_NEW = 13;</code> */ public static final int SPLITTING_NEW_VALUE = 13; /** - * <code>MERGING_NEW = 14;</code> - * * <pre> * region but hasn't be created yet, or master doesn't * know it's already created * </pre> + * + * <code>MERGING_NEW = 14;</code> */ public static final int MERGING_NEW_VALUE = 14; - public final int getNumber() { return value; } + public final int getNumber() { + return value; + } + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated public static State valueOf(int value) { + return forNumber(value); + } + + public static State forNumber(int value) { switch (value) { case 0: return OFFLINE; case 1: return PENDING_OPEN; @@ -442,17 +434,17 @@ public final class ClusterStatusProtos { internalGetValueMap() { return internalValueMap; } - private static com.google.protobuf.Internal.EnumLiteMap<State> - internalValueMap = + private static final com.google.protobuf.Internal.EnumLiteMap< + State> internalValueMap = new com.google.protobuf.Internal.EnumLiteMap<State>() { public State findValueByNumber(int number) { - return State.valueOf(number); + return State.forNumber(number); } }; public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() { - return getDescriptor().getValues().get(index); + return getDescriptor().getValues().get(ordinal()); } public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() { @@ -474,11 +466,9 @@ public final class ClusterStatusProtos { return VALUES[desc.getIndex()]; } - private final int index; private final int value; - private State(int index, int value) { - this.index = index; + private State(int value) { this.value = value; } @@ -486,7 +476,6 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required .hbase.pb.RegionInfo region_info = 1; public static final int REGION_INFO_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; /** @@ -499,18 +488,17 @@ public final class ClusterStatusProtos { * <code>required .hbase.pb.RegionInfo region_info = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } /** * <code>required .hbase.pb.RegionInfo region_info = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } - // required .hbase.pb.RegionState.State state = 2; public static final int STATE_FIELD_NUMBER = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State state_; + private int state_; /** * <code>required .hbase.pb.RegionState.State state = 2;</code> */ @@ -521,10 +509,10 @@ public final class ClusterStatusProtos { * <code>required .hbase.pb.RegionState.State state = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE : result; } - // optional uint64 stamp = 3; public static final int STAMP_FIELD_NUMBER = 3; private long stamp_; /** @@ -540,15 +528,11 @@ public final class ClusterStatusProtos { return stamp_; } - private void initFields() { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; - stamp_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasRegionInfo()) { memoizedIsInitialized = 0; @@ -568,50 +552,42 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, regionInfo_); + output.writeMessage(1, getRegionInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeEnum(2, state_.getNumber()); + output.writeEnum(2, state_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeUInt64(3, stamp_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, regionInfo_); + .computeMessageSize(1, getRegionInfo()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeEnumSize(2, state_.getNumber()); + .computeEnumSize(2, state_); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(3, stamp_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -629,20 +605,17 @@ public final class ClusterStatusProtos { } result = result && (hasState() == other.hasState()); if (hasState()) { - result = result && - (getState() == other.getState()); + result = result && state_ == other.state_; } result = result && (hasStamp() == other.hasStamp()); if (hasStamp()) { result = result && (getStamp() == other.getStamp()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -656,13 +629,14 @@ public final class ClusterStatusProtos { } if (hasState()) { hash = (37 * hash) + STATE_FIELD_NUMBER; - hash = (53 * hash) + hashEnum(getState()); + hash = (53 * hash) + state_; } if (hasStamp()) { hash = (37 * hash) + STAMP_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getStamp()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getStamp()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -690,46 +664,57 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -737,14 +722,15 @@ public final class ClusterStatusProtos { * Protobuf type {@code hbase.pb.RegionState} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder<Builder> - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionState) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionState_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionState_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -757,38 +743,31 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getRegionInfoFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + regionInfo_ = null; } else { regionInfoBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + state_ = 0; bitField0_ = (bitField0_ & ~0x00000002); stamp_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionState_descriptor; @@ -831,6 +810,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState)other); @@ -851,21 +856,19 @@ public final class ClusterStatusProtos { if (other.hasStamp()) { setStamp(other.getStamp()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasRegionInfo()) { - return false; } if (!hasState()) { - return false; } if (!getRegionInfo().isInitialized()) { - return false; } return true; @@ -880,7 +883,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -890,9 +893,8 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required .hbase.pb.RegionInfo region_info = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; /** * <code>required .hbase.pb.RegionInfo region_info = 1;</code> @@ -905,7 +907,7 @@ public final class ClusterStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { if (regionInfoBuilder_ == null) { - return regionInfo_; + return regionInfo_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } else { return regionInfoBuilder_.getMessage(); } @@ -946,6 +948,7 @@ public final class ClusterStatusProtos { public Builder mergeRegionInfo(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo value) { if (regionInfoBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + regionInfo_ != null && regionInfo_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); @@ -964,7 +967,7 @@ public final class ClusterStatusProtos { */ public Builder clearRegionInfo() { if (regionInfoBuilder_ == null) { - regionInfo_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); + regionInfo_ = null; onChanged(); } else { regionInfoBuilder_.clear(); @@ -987,19 +990,20 @@ public final class ClusterStatusProtos { if (regionInfoBuilder_ != null) { return regionInfoBuilder_.getMessageOrBuilder(); } else { - return regionInfo_; + return regionInfo_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance() : regionInfo_; } } /** * <code>required .hbase.pb.RegionInfo region_info = 1;</code> */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder() { if (regionInfoBuilder_ == null) { - regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( - regionInfo_, + getRegionInfo(), getParentForChildren(), isClean()); regionInfo_ = null; @@ -1007,8 +1011,7 @@ public final class ClusterStatusProtos { return regionInfoBuilder_; } - // required .hbase.pb.RegionState.State state = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + private int state_ = 0; /** * <code>required .hbase.pb.RegionState.State state = 2;</code> */ @@ -1019,7 +1022,8 @@ public final class ClusterStatusProtos { * <code>required .hbase.pb.RegionState.State state = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { - return state_; + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State result = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(state_); + return result == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE : result; } /** * <code>required .hbase.pb.RegionState.State state = 2;</code> @@ -1029,7 +1033,7 @@ public final class ClusterStatusProtos { throw new NullPointerException(); } bitField0_ |= 0x00000002; - state_ = value; + state_ = value.getNumber(); onChanged(); return this; } @@ -1038,12 +1042,11 @@ public final class ClusterStatusProtos { */ public Builder clearState() { bitField0_ = (bitField0_ & ~0x00000002); - state_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; + state_ = 0; onChanged(); return this; } - // optional uint64 stamp = 3; private long stamp_ ; /** * <code>optional uint64 stamp = 3;</code> @@ -1075,22 +1078,59 @@ public final class ClusterStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionState) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionState) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState DEFAULT_INSTANCE; static { - defaultInstance = new RegionState(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser<RegionState> + PARSER = new com.google.protobuf.AbstractParser<RegionState>() { + public RegionState parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionState(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser<RegionState> parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser<RegionState> getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionState) } - public interface RegionInTransitionOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionInTransitionOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionInTransition) + com.google.protobuf.MessageOrBuilder { - // required .hbase.pb.RegionSpecifier spec = 1; /** * <code>required .hbase.pb.RegionSpecifier spec = 1;</code> */ @@ -1104,7 +1144,6 @@ public final class ClusterStatusProtos { */ org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder(); - // required .hbase.pb.RegionState region_state = 2; /** * <code>required .hbase.pb.RegionState region_state = 2;</code> */ @@ -1121,36 +1160,27 @@ public final class ClusterStatusProtos { /** * Protobuf type {@code hbase.pb.RegionInTransition} */ - public static final class RegionInTransition extends - com.google.protobuf.GeneratedMessage - implements RegionInTransitionOrBuilder { + public static final class RegionInTransition extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionInTransition) + RegionInTransitionOrBuilder { // Use RegionInTransition.newBuilder() to construct. - private RegionInTransition(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + private RegionInTransition(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private RegionInTransition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final RegionInTransition defaultInstance; - public static RegionInTransition getDefaultInstance() { - return defaultInstance; } - - public RegionInTransition getDefaultInstanceForType() { - return defaultInstance; + private RegionInTransition() { } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private RegionInTransition( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1201,7 +1231,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1212,30 +1242,14 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionInTransition_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionInTransition_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder.class); } - public static com.google.protobuf.Parser<RegionInTransition> PARSER = - new com.google.protobuf.AbstractParser<RegionInTransition>() { - public RegionInTransition parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new RegionInTransition(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser<RegionInTransition> getParserForType() { - return PARSER; - } - private int bitField0_; - // required .hbase.pb.RegionSpecifier spec = 1; public static final int SPEC_FIELD_NUMBER = 1; private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier spec_; /** @@ -1248,16 +1262,15 @@ public final class ClusterStatusProtos { * <code>required .hbase.pb.RegionSpecifier spec = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { - return spec_; + return spec_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : spec_; } /** * <code>required .hbase.pb.RegionSpecifier spec = 1;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() { - return spec_; + return spec_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : spec_; } - // required .hbase.pb.RegionState region_state = 2; public static final int REGION_STATE_FIELD_NUMBER = 2; private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState regionState_; /** @@ -1270,23 +1283,20 @@ public final class ClusterStatusProtos { * <code>required .hbase.pb.RegionState region_state = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { - return regionState_; + return regionState_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance() : regionState_; } /** * <code>required .hbase.pb.RegionState region_state = 2;</code> */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() { - return regionState_; + return regionState_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance() : regionState_; } - private void initFields() { - spec_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - regionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasSpec()) { memoizedIsInitialized = 0; @@ -1310,43 +1320,35 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeMessage(1, spec_); + output.writeMessage(1, getSpec()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeMessage(2, regionState_); + output.writeMessage(2, getRegionState()); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(1, spec_); + .computeMessageSize(1, getSpec()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream - .computeMessageSize(2, regionState_); + .computeMessageSize(2, getRegionState()); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -1367,12 +1369,10 @@ public final class ClusterStatusProtos { result = result && getRegionState() .equals(other.getRegionState()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -1388,7 +1388,7 @@ public final class ClusterStatusProtos { hash = (37 * hash) + REGION_STATE_FIELD_NUMBER; hash = (53 * hash) + getRegionState().hashCode(); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -1416,46 +1416,57 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } @@ -1463,14 +1474,15 @@ public final class ClusterStatusProtos { * Protobuf type {@code hbase.pb.RegionInTransition} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder<Builder> - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements + // @@protoc_insertion_point(builder_implements:hbase.pb.RegionInTransition) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionInTransition_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionInTransition_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -1483,30 +1495,27 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { getSpecFieldBuilder(); getRegionStateFieldBuilder(); } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); if (specBuilder_ == null) { - spec_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + spec_ = null; } else { specBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); if (regionStateBuilder_ == null) { - regionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); + regionState_ = null; } else { regionStateBuilder_.clear(); } @@ -1514,10 +1523,6 @@ public final class ClusterStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_RegionInTransition_descriptor; @@ -1560,6 +1565,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition)other); @@ -1577,25 +1608,22 @@ public final class ClusterStatusProtos { if (other.hasRegionState()) { mergeRegionState(other.getRegionState()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasSpec()) { - return false; } if (!hasRegionState()) { - return false; } if (!getSpec().isInitialized()) { - return false; } if (!getRegionState().isInitialized()) { - return false; } return true; @@ -1610,7 +1638,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -1620,9 +1648,8 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required .hbase.pb.RegionSpecifier spec = 1; - private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier spec_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier spec_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> specBuilder_; /** * <code>required .hbase.pb.RegionSpecifier spec = 1;</code> @@ -1635,7 +1662,7 @@ public final class ClusterStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { if (specBuilder_ == null) { - return spec_; + return spec_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : spec_; } else { return specBuilder_.getMessage(); } @@ -1676,6 +1703,7 @@ public final class ClusterStatusProtos { public Builder mergeSpec(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier value) { if (specBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && + spec_ != null && spec_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { spec_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(spec_).mergeFrom(value).buildPartial(); @@ -1694,7 +1722,7 @@ public final class ClusterStatusProtos { */ public Builder clearSpec() { if (specBuilder_ == null) { - spec_ = org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); + spec_ = null; onChanged(); } else { specBuilder_.clear(); @@ -1717,19 +1745,20 @@ public final class ClusterStatusProtos { if (specBuilder_ != null) { return specBuilder_.getMessageOrBuilder(); } else { - return spec_; + return spec_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance() : spec_; } } /** * <code>required .hbase.pb.RegionSpecifier spec = 1;</code> */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getSpecFieldBuilder() { if (specBuilder_ == null) { - specBuilder_ = new com.google.protobuf.SingleFieldBuilder< + specBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( - spec_, + getSpec(), getParentForChildren(), isClean()); spec_ = null; @@ -1737,9 +1766,8 @@ public final class ClusterStatusProtos { return specBuilder_; } - // required .hbase.pb.RegionState region_state = 2; - private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState regionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); - private com.google.protobuf.SingleFieldBuilder< + private org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState regionState_ = null; + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> regionStateBuilder_; /** * <code>required .hbase.pb.RegionState region_state = 2;</code> @@ -1752,7 +1780,7 @@ public final class ClusterStatusProtos { */ public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { if (regionStateBuilder_ == null) { - return regionState_; + return regionState_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance() : regionState_; } else { return regionStateBuilder_.getMessage(); } @@ -1793,6 +1821,7 @@ public final class ClusterStatusProtos { public Builder mergeRegionState(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState value) { if (regionStateBuilder_ == null) { if (((bitField0_ & 0x00000002) == 0x00000002) && + regionState_ != null && regionState_ != org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance()) { regionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder(regionState_).mergeFrom(value).buildPartial(); @@ -1811,7 +1840,7 @@ public final class ClusterStatusProtos { */ public Builder clearRegionState() { if (regionStateBuilder_ == null) { - regionState_ = org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); + regionState_ = null; onChanged(); } else { regionStateBuilder_.clear(); @@ -1834,41 +1863,79 @@ public final class ClusterStatusProtos { if (regionStateBuilder_ != null) { return regionStateBuilder_.getMessageOrBuilder(); } else { - return regionState_; + return regionState_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance() : regionState_; } } /** * <code>required .hbase.pb.RegionState region_state = 2;</code> */ - private com.google.protobuf.SingleFieldBuilder< + private com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> getRegionStateFieldBuilder() { if (regionStateBuilder_ == null) { - regionStateBuilder_ = new com.google.protobuf.SingleFieldBuilder< + regionStateBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder>( - regionState_, + getRegionState(), getParentForChildren(), isClean()); regionState_ = null; } return regionStateBuilder_; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.RegionInTransition) } + // @@protoc_insertion_point(class_scope:hbase.pb.RegionInTransition) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition DEFAULT_INSTANCE; static { - defaultInstance = new RegionInTransition(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser<RegionInTransition> + PARSER = new com.google.protobuf.AbstractParser<RegionInTransition>() { + public RegionInTransition parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RegionInTransition(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser<RegionInTransition> parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser<RegionInTransition> getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.RegionInTransition getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.RegionInTransition) } - public interface StoreSequenceIdOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface StoreSequenceIdOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.StoreSequenceId) + com.google.protobuf.MessageOrBuilder { - // required bytes family_name = 1; /** * <code>required bytes family_name = 1;</code> */ @@ -1878,7 +1945,6 @@ public final class ClusterStatusProtos { */ com.google.protobuf.ByteString getFamilyName(); - // required uint64 sequence_id = 2; /** * <code>required uint64 sequence_id = 2;</code> */ @@ -1889,43 +1955,36 @@ public final class ClusterStatusProtos { long getSequenceId(); } /** - * Protobuf type {@code hbase.pb.StoreSequenceId} - * * <pre> ** * sequence Id of a store * </pre> + * + * Protobuf type {@code hbase.pb.StoreSequenceId} */ - public static final class StoreSequenceId extends - com.google.protobuf.GeneratedMessage - implements StoreSequenceIdOrBuilder { + public static final class StoreSequenceId extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.StoreSequenceId) + StoreSequenceIdOrBuilder { // Use StoreSequenceId.newBuilder() to construct. - private StoreSequenceId(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + private StoreSequenceId(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); - this.unknownFields = builder.getUnknownFields(); - } - private StoreSequenceId(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - - private static final StoreSequenceId defaultInstance; - public static StoreSequenceId getDefaultInstance() { - return defaultInstance; } - - public StoreSequenceId getDefaultInstanceForType() { - return defaultInstance; + private StoreSequenceId() { + familyName_ = com.google.protobuf.ByteString.EMPTY; + sequenceId_ = 0L; } - private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { + getUnknownFields() { return this.unknownFields; } private StoreSequenceId( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); + this(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); @@ -1960,7 +2019,7 @@ public final class ClusterStatusProtos { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); + e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); @@ -1971,30 +2030,14 @@ public final class ClusterStatusProtos { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_StoreSequenceId_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_StoreSequenceId_fieldAccessorTable .ensureFieldAccessorsInitialized( org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.class, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder.class); } - public static com.google.protobuf.Parser<StoreSequenceId> PARSER = - new com.google.protobuf.AbstractParser<StoreSequenceId>() { - public StoreSequenceId parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new StoreSequenceId(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser<StoreSequenceId> getParserForType() { - return PARSER; - } - private int bitField0_; - // required bytes family_name = 1; public static final int FAMILY_NAME_FIELD_NUMBER = 1; private com.google.protobuf.ByteString familyName_; /** @@ -2010,7 +2053,6 @@ public final class ClusterStatusProtos { return familyName_; } - // required uint64 sequence_id = 2; public static final int SEQUENCE_ID_FIELD_NUMBER = 2; private long sequenceId_; /** @@ -2026,14 +2068,11 @@ public final class ClusterStatusProtos { return sequenceId_; } - private void initFields() { - familyName_ = com.google.protobuf.ByteString.EMPTY; - sequenceId_ = 0L; - } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; if (!hasFamilyName()) { memoizedIsInitialized = 0; @@ -2049,19 +2088,17 @@ public final class ClusterStatusProtos { public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { - getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, sequenceId_); } - getUnknownFields().writeTo(output); + unknownFields.writeTo(output); } - private int memoizedSerializedSize = -1; public int getSerializedSize() { - int size = memoizedSerializedSize; + int size = memoizedSize; if (size != -1) return size; size = 0; @@ -2073,19 +2110,13 @@ public final class ClusterStatusProtos { size += com.google.protobuf.CodedOutputStream .computeUInt64Size(2, sequenceId_); } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; + size += unknownFields.getSerializedSize(); + memoizedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); - } - - @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; @@ -2106,12 +2137,10 @@ public final class ClusterStatusProtos { result = result && (getSequenceId() == other.getSequenceId()); } - result = result && - getUnknownFields().equals(other.getUnknownFields()); + result = result && unknownFields.equals(other.unknownFields); return result; } - private int memoizedHashCode = 0; @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { @@ -2125,9 +2154,10 @@ public final class ClusterStatusProtos { } if (hasSequenceId()) { hash = (37 * hash) + SEQUENCE_ID_FIELD_NUMBER; - hash = (53 * hash) + hashLong(getSequenceId()); + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + getSequenceId()); } - hash = (29 * hash) + getUnknownFields().hashCode(); + hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } @@ -2155,66 +2185,78 @@ public final class ClusterStatusProtos { } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { - return PARSER.parseFrom(input); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); } public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + return com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); } - public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId prototype) { - return newBuilder().mergeFrom(prototype); + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); } - public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** - * Protobuf type {@code hbase.pb.StoreSequenceId} - * * <pre> ** * sequence Id of a store * </pre> + * + * Protobuf type {@code hbase.pb.StoreSequenceId} */ public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder<Builder> - implements org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder { + com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements + // @@protoc_insertion_point(builder_implements:hbase.pb.StoreSequenceId) + org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceIdOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_StoreSequenceId_descriptor; } - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_StoreSequenceId_fieldAccessorTable .ensureFieldAccessorsInitialized( @@ -2227,18 +2269,15 @@ public final class ClusterStatusProtos { } private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { + com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { - if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + if (com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { } } - private static Builder create() { - return new Builder(); - } - public Builder clear() { super.clear(); familyName_ = com.google.protobuf.ByteString.EMPTY; @@ -2248,10 +2287,6 @@ public final class ClusterStatusProtos { return this; } - public Builder clone() { - return create().mergeFrom(buildPartial()); - } - public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.internal_static_hbase_pb_StoreSequenceId_descriptor; @@ -2286,6 +2321,32 @@ public final class ClusterStatusProtos { return result; } + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId) { return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId)other); @@ -2303,17 +2364,16 @@ public final class ClusterStatusProtos { if (other.hasSequenceId()) { setSequenceId(other.getSequenceId()); } - this.mergeUnknownFields(other.getUnknownFields()); + this.mergeUnknownFields(other.unknownFields); + onChanged(); return this; } public final boolean isInitialized() { if (!hasFamilyName()) { - return false; } if (!hasSequenceId()) { - return false; } return true; @@ -2328,7 +2388,7 @@ public final class ClusterStatusProtos { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId) e.getUnfinishedMessage(); - throw e; + throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); @@ -2338,7 +2398,6 @@ public final class ClusterStatusProtos { } private int bitField0_; - // required bytes family_name = 1; private com.google.protobuf.ByteString familyName_ = com.google.protobuf.ByteString.EMPTY; /** * <code>required bytes family_name = 1;</code> @@ -2374,7 +2433,6 @@ public final class ClusterStatusProtos { return this; } - // required uint64 sequence_id = 2; private long sequenceId_ ; /** * <code>required uint64 sequence_id = 2;</code> @@ -2406,22 +2464,59 @@ public final class ClusterStatusProtos { onChanged(); return this; } + public final Builder setUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + // @@protoc_insertion_point(builder_scope:hbase.pb.StoreSequenceId) } + // @@protoc_insertion_point(class_scope:hbase.pb.StoreSequenceId) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId DEFAULT_INSTANCE; static { - defaultInstance = new StoreSequenceId(true); - defaultInstance.initFields(); + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final com.google.protobuf.Parser<StoreSequenceId> + PARSER = new com.google.protobuf.AbstractParser<StoreSequenceId>() { + public StoreSequenceId parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new StoreSequenceId(input, extensionRegistry); + } + }; + + public static com.google.protobuf.Parser<StoreSequenceId> parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser<StoreSequenceId> getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getDefaultInstanceForType() { + return DEFAULT_INSTANCE; } - // @@protoc_insertion_point(class_scope:hbase.pb.StoreSequenceId) } - public interface RegionStoreSequenceIdsOrBuilder - extends com.google.protobuf.MessageOrBuilder { + public interface RegionStoreSequenceIdsOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.RegionStoreSequenceIds) + com.google.protobuf.MessageOrBuilder { - // required uint64 last_flushed_sequence_id = 1; /** * <code>required uint64 last_flushed_sequence_id = 1;</code> */ @@ -2431,7 +2526,6 @@ public final class ClusterStatusProtos { */ long getLastFlushedSequenceId(); - // repeated .hbase.pb.StoreSequenceId store_sequence_id = 2; /** * <code>repeated .hbase.pb.StoreSequenceId store_sequence_id = 2;</code> */ @@ -2457,44 +2551,37 @@ public final class ClusterStatusProtos { int index); } /** - * Protobuf type {@code hbase.pb.RegionStoreSequenceIds} - * * <pre> ** * contains a sequence id of a region which should be the minimum of its store sequence ids and * list of sequence ids of the region's stores * </pre> + * + * Protobuf type {@code hbase.pb.RegionStoreSequenceIds} */ - public static final class RegionStoreSequenceIds extends - com.google.protobuf.GeneratedMessage - implements RegionStoreSequenceIdsOrBuilder { + public static final class RegionStoreSequenceIds extends + com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.RegionStoreSequenceIds) + RegionStoreSequenceIdsOrBuilder { // Use RegionStoreSequenceIds.newBuilder() to construct. - private RegionStoreSequenceIds(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + private RegionStoreSequenceIds(com.google.pr
<TRUNCATED>