Repository: hbase Updated Branches: refs/heads/HBASE-14070.HLC c90948c8e -> 7a42c38ef (forced update)
http://git-wip-us.apache.org/repos/asf/hbase/blob/6786b2b6/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BackupProtos.java ---------------------------------------------------------------------- diff --git a/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BackupProtos.java b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BackupProtos.java new file mode 100644 index 0000000..0271777 --- /dev/null +++ b/hbase-protocol-shaded/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/generated/BackupProtos.java @@ -0,0 +1,7013 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Backup.proto + +package org.apache.hadoop.hbase.shaded.protobuf.generated; + +public final class BackupProtos { + private BackupProtos() {} + public static void registerAllExtensions( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite) registry); + } + /** + * Protobuf enum {@code hbase.pb.BackupType} + */ + public enum BackupType + implements org.apache.hadoop.hbase.shaded.com.google.protobuf.ProtocolMessageEnum { + /** + * <code>FULL = 0;</code> + */ + FULL(0), + /** + * <code>INCREMENTAL = 1;</code> + */ + INCREMENTAL(1), + ; + + /** + * <code>FULL = 0;</code> + */ + public static final int FULL_VALUE = 0; + /** + * <code>INCREMENTAL = 1;</code> + */ + public static final int INCREMENTAL_VALUE = 1; + + + public final int getNumber() { + return value; + } + + /** + * @deprecated Use {@link #forNumber(int)} instead. + */ + @java.lang.Deprecated + public static BackupType valueOf(int value) { + return forNumber(value); + } + + public static BackupType forNumber(int value) { + switch (value) { + case 0: return FULL; + case 1: return INCREMENTAL; + default: return null; + } + } + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<BackupType> + internalGetValueMap() { + return internalValueMap; + } + private static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap< + BackupType> internalValueMap = + new org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.EnumLiteMap<BackupType>() { + public BackupType findValueByNumber(int number) { + return BackupType.forNumber(number); + } + }; + + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor + getValueDescriptor() { + return getDescriptor().getValues().get(ordinal()); + } + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor + getDescriptorForType() { + return getDescriptor(); + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.getDescriptor().getEnumTypes().get(0); + } + + private static final BackupType[] VALUES = values(); + + public static BackupType valueOf( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.EnumValueDescriptor desc) { + if (desc.getType() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "EnumValueDescriptor is not for this type."); + } + return VALUES[desc.getIndex()]; + } + + private final int value; + + private BackupType(int value) { + this.value = value; + } + + // @@protoc_insertion_point(enum_scope:hbase.pb.BackupType) + } + + public interface ServerTimestampOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.ServerTimestamp) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + boolean hasServerName(); + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName(); + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder(); + + /** + * <code>optional uint64 timestamp = 2;</code> + */ + boolean hasTimestamp(); + /** + * <code>optional uint64 timestamp = 2;</code> + */ + long getTimestamp(); + } + /** + * <pre> + ** + * ServerTimestamp keeps last WAL roll time per Region Server + * </pre> + * + * Protobuf type {@code hbase.pb.ServerTimestamp} + */ + public static final class ServerTimestamp extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.ServerTimestamp) + ServerTimestampOrBuilder { + // Use ServerTimestamp.newBuilder() to construct. + private ServerTimestamp(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { + super(builder); + } + private ServerTimestamp() { + timestamp_ = 0L; + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ServerTimestamp( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = serverName_.toBuilder(); + } + serverName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(serverName_); + serverName_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 16: { + bitField0_ |= 0x00000002; + timestamp_ = input.readUInt64(); + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.internal_static_hbase_pb_ServerTimestamp_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.internal_static_hbase_pb_ServerTimestamp_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder.class); + } + + private int bitField0_; + public static final int SERVER_NAME_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_; + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + public boolean hasServerName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName() { + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; + } + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; + } + + public static final int TIMESTAMP_FIELD_NUMBER = 2; + private long timestamp_; + /** + * <code>optional uint64 timestamp = 2;</code> + */ + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * <code>optional uint64 timestamp = 2;</code> + */ + public long getTimestamp() { + return timestamp_; + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (hasServerName()) { + if (!getServerName().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, getServerName()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, timestamp_); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getServerName()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, timestamp_); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp other = (org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp) obj; + + boolean result = true; + result = result && (hasServerName() == other.hasServerName()); + if (hasServerName()) { + result = result && getServerName() + .equals(other.getServerName()); + } + result = result && (hasTimestamp() == other.hasTimestamp()); + if (hasTimestamp()) { + result = result && (getTimestamp() + == other.getTimestamp()); + } + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasServerName()) { + hash = (37 * hash) + SERVER_NAME_FIELD_NUMBER; + hash = (53 * hash) + getServerName().hashCode(); + } + if (hasTimestamp()) { + hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + org.apache.hadoop.hbase.shaded.com.google.protobuf.Internal.hashLong( + getTimestamp()); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * <pre> + ** + * ServerTimestamp keeps last WAL roll time per Region Server + * </pre> + * + * Protobuf type {@code hbase.pb.ServerTimestamp} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements + // @@protoc_insertion_point(builder_implements:hbase.pb.ServerTimestamp) + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestampOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.internal_static_hbase_pb_ServerTimestamp_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.internal_static_hbase_pb_ServerTimestamp_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getServerNameFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (serverNameBuilder_ == null) { + serverName_ = null; + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + timestamp_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.internal_static_hbase_pb_ServerTimestamp_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp result = new org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (serverNameBuilder_ == null) { + result.serverName_ = serverName_; + } else { + result.serverName_ = serverNameBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.timestamp_ = timestamp_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.getDefaultInstance()) return this; + if (other.hasServerName()) { + mergeServerName(other.getServerName()); + } + if (other.hasTimestamp()) { + setTimestamp(other.getTimestamp()); + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (hasServerName()) { + if (!getServerName().isInitialized()) { + return false; + } + } + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName serverName_ = null; + private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_; + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + public boolean hasServerName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName getServerName() { + if (serverNameBuilder_ == null) { + return serverName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; + } else { + return serverNameBuilder_.getMessage(); + } + } + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + public Builder setServerName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + serverName_ = value; + onChanged(); + } else { + serverNameBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + public Builder setServerName( + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { + if (serverNameBuilder_ == null) { + serverName_ = builderForValue.build(); + onChanged(); + } else { + serverNameBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + public Builder mergeServerName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName value) { + if (serverNameBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + serverName_ != null && + serverName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { + serverName_ = + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial(); + } else { + serverName_ = value; + } + onChanged(); + } else { + serverNameBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + public Builder clearServerName() { + if (serverNameBuilder_ == null) { + serverName_ = null; + onChanged(); + } else { + serverNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getServerNameFieldBuilder().getBuilder(); + } + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() { + if (serverNameBuilder_ != null) { + return serverNameBuilder_.getMessageOrBuilder(); + } else { + return serverName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance() : serverName_; + } + } + /** + * <code>optional .hbase.pb.ServerName server_name = 1;</code> + */ + private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder> + getServerNameFieldBuilder() { + if (serverNameBuilder_ == null) { + serverNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( + getServerName(), + getParentForChildren(), + isClean()); + serverName_ = null; + } + return serverNameBuilder_; + } + + private long timestamp_ ; + /** + * <code>optional uint64 timestamp = 2;</code> + */ + public boolean hasTimestamp() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * <code>optional uint64 timestamp = 2;</code> + */ + public long getTimestamp() { + return timestamp_; + } + /** + * <code>optional uint64 timestamp = 2;</code> + */ + public Builder setTimestamp(long value) { + bitField0_ |= 0x00000002; + timestamp_ = value; + onChanged(); + return this; + } + /** + * <code>optional uint64 timestamp = 2;</code> + */ + public Builder clearTimestamp() { + bitField0_ = (bitField0_ & ~0x00000002); + timestamp_ = 0L; + onChanged(); + return this; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.ServerTimestamp) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ServerTimestamp) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServerTimestamp> + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<ServerTimestamp>() { + public ServerTimestamp parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new ServerTimestamp(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServerTimestamp> parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<ServerTimestamp> getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface TableServerTimestampOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.TableServerTimestamp) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + boolean hasTableName(); + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName(); + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder(); + + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp> + getServerTimestampList(); + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp getServerTimestamp(int index); + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + int getServerTimestampCount(); + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestampOrBuilder> + getServerTimestampOrBuilderList(); + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestampOrBuilder getServerTimestampOrBuilder( + int index); + } + /** + * <pre> + ** + * TableServerTimestamp keeps last WAL roll time per Region Server & Table + * Each table have different last WAL roll time stamps across cluster, on every RS + * </pre> + * + * Protobuf type {@code hbase.pb.TableServerTimestamp} + */ + public static final class TableServerTimestamp extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.TableServerTimestamp) + TableServerTimestampOrBuilder { + // Use TableServerTimestamp.newBuilder() to construct. + private TableServerTimestamp(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { + super(builder); + } + private TableServerTimestamp() { + serverTimestamp_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private TableServerTimestamp( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = tableName_.toBuilder(); + } + tableName_ = input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(tableName_); + tableName_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + serverTimestamp_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp>(); + mutable_bitField0_ |= 0x00000002; + } + serverTimestamp_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.PARSER, extensionRegistry)); + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { + serverTimestamp_ = java.util.Collections.unmodifiableList(serverTimestamp_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.internal_static_hbase_pb_TableServerTimestamp_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.internal_static_hbase_pb_TableServerTimestamp_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp.Builder.class); + } + + private int bitField0_; + public static final int TABLE_NAME_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_; + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; + } + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; + } + + public static final int SERVER_TIMESTAMP_FIELD_NUMBER = 2; + private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp> serverTimestamp_; + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp> getServerTimestampList() { + return serverTimestamp_; + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestampOrBuilder> + getServerTimestampOrBuilderList() { + return serverTimestamp_; + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public int getServerTimestampCount() { + return serverTimestamp_.size(); + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp getServerTimestamp(int index) { + return serverTimestamp_.get(index); + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestampOrBuilder getServerTimestampOrBuilder( + int index) { + return serverTimestamp_.get(index); + } + + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + if (hasTableName()) { + if (!getTableName().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + for (int i = 0; i < getServerTimestampCount(); i++) { + if (!getServerTimestamp(i).isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, getTableName()); + } + for (int i = 0; i < serverTimestamp_.size(); i++) { + output.writeMessage(2, serverTimestamp_.get(i)); + } + unknownFields.writeTo(output); + } + + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeMessageSize(1, getTableName()); + } + for (int i = 0; i < serverTimestamp_.size(); i++) { + size += org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedOutputStream + .computeMessageSize(2, serverTimestamp_.get(i)); + } + size += unknownFields.getSerializedSize(); + memoizedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp other = (org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp) obj; + + boolean result = true; + result = result && (hasTableName() == other.hasTableName()); + if (hasTableName()) { + result = result && getTableName() + .equals(other.getTableName()); + } + result = result && getServerTimestampList() + .equals(other.getServerTimestampList()); + result = result && unknownFields.equals(other.unknownFields); + return result; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + if (hasTableName()) { + hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER; + hash = (53 * hash) + getTableName().hashCode(); + } + if (getServerTimestampCount() > 0) { + hash = (37 * hash) + SERVER_TIMESTAMP_FIELD_NUMBER; + hash = (53 * hash) + getServerTimestampList().hashCode(); + } + hash = (29 * hash) + unknownFields.hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseFrom(byte[] data) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseFrom( + byte[] data, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseDelimitedFrom( + java.io.InputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input); + } + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parseFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * <pre> + ** + * TableServerTimestamp keeps last WAL roll time per Region Server & Table + * Each table have different last WAL roll time stamps across cluster, on every RS + * </pre> + * + * Protobuf type {@code hbase.pb.TableServerTimestamp} + */ + public static final class Builder extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements + // @@protoc_insertion_point(builder_implements:hbase.pb.TableServerTimestamp) + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestampOrBuilder { + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.internal_static_hbase_pb_TableServerTimestamp_descriptor; + } + + protected org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.internal_static_hbase_pb_TableServerTimestamp_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp.class, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 + .alwaysUseFieldBuilders) { + getTableNameFieldBuilder(); + getServerTimestampFieldBuilder(); + } + } + public Builder clear() { + super.clear(); + if (tableNameBuilder_ == null) { + tableName_ = null; + } else { + tableNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + if (serverTimestampBuilder_ == null) { + serverTimestamp_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + } else { + serverTimestampBuilder_.clear(); + } + return this; + } + + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.internal_static_hbase_pb_TableServerTimestamp_descriptor; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp getDefaultInstanceForType() { + return org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp build() { + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp buildPartial() { + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp result = new org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (tableNameBuilder_ == null) { + result.tableName_ = tableName_; + } else { + result.tableName_ = tableNameBuilder_.build(); + } + if (serverTimestampBuilder_ == null) { + if (((bitField0_ & 0x00000002) == 0x00000002)) { + serverTimestamp_ = java.util.Collections.unmodifiableList(serverTimestamp_); + bitField0_ = (bitField0_ & ~0x00000002); + } + result.serverTimestamp_ = serverTimestamp_; + } else { + result.serverTimestamp_ = serverTimestampBuilder_.build(); + } + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder clone() { + return (Builder) super.clone(); + } + public Builder setField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.setField(field, value); + } + public Builder clearField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field) { + return (Builder) super.clearField(field); + } + public Builder clearOneof( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.OneofDescriptor oneof) { + return (Builder) super.clearOneof(oneof); + } + public Builder setRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + int index, Object value) { + return (Builder) super.setRepeatedField(field, index, value); + } + public Builder addRepeatedField( + org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.FieldDescriptor field, + Object value) { + return (Builder) super.addRepeatedField(field, value); + } + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp) { + return mergeFrom((org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp other) { + if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp.getDefaultInstance()) return this; + if (other.hasTableName()) { + mergeTableName(other.getTableName()); + } + if (serverTimestampBuilder_ == null) { + if (!other.serverTimestamp_.isEmpty()) { + if (serverTimestamp_.isEmpty()) { + serverTimestamp_ = other.serverTimestamp_; + bitField0_ = (bitField0_ & ~0x00000002); + } else { + ensureServerTimestampIsMutable(); + serverTimestamp_.addAll(other.serverTimestamp_); + } + onChanged(); + } + } else { + if (!other.serverTimestamp_.isEmpty()) { + if (serverTimestampBuilder_.isEmpty()) { + serverTimestampBuilder_.dispose(); + serverTimestampBuilder_ = null; + serverTimestamp_ = other.serverTimestamp_; + bitField0_ = (bitField0_ & ~0x00000002); + serverTimestampBuilder_ = + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? + getServerTimestampFieldBuilder() : null; + } else { + serverTimestampBuilder_.addAllMessages(other.serverTimestamp_); + } + } + } + this.mergeUnknownFields(other.unknownFields); + onChanged(); + return this; + } + + public final boolean isInitialized() { + if (hasTableName()) { + if (!getTableName().isInitialized()) { + return false; + } + } + for (int i = 0; i < getServerTimestampCount(); i++) { + if (!getServerTimestamp(i).isInitialized()) { + return false; + } + } + return true; + } + + public Builder mergeFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp) e.getUnfinishedMessage(); + throw e.unwrapIOException(); + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + private org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName tableName_ = null; + private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_; + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + public boolean hasTableName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableName() { + if (tableNameBuilder_ == null) { + return tableName_ == null ? org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; + } else { + return tableNameBuilder_.getMessage(); + } + } + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + public Builder setTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { + if (tableNameBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + tableName_ = value; + onChanged(); + } else { + tableNameBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + public Builder setTableName( + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) { + if (tableNameBuilder_ == null) { + tableName_ = builderForValue.build(); + onChanged(); + } else { + tableNameBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + public Builder mergeTableName(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName value) { + if (tableNameBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + tableName_ != null && + tableName_ != org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) { + tableName_ = + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial(); + } else { + tableName_ = value; + } + onChanged(); + } else { + tableNameBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + public Builder clearTableName() { + if (tableNameBuilder_ == null) { + tableName_ = null; + onChanged(); + } else { + tableNameBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getTableNameFieldBuilder().getBuilder(); + } + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() { + if (tableNameBuilder_ != null) { + return tableNameBuilder_.getMessageOrBuilder(); + } else { + return tableName_ == null ? + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.getDefaultInstance() : tableName_; + } + } + /** + * <code>optional .hbase.pb.TableName table_name = 1;</code> + */ + private org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> + getTableNameFieldBuilder() { + if (tableNameBuilder_ == null) { + tableNameBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.SingleFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder>( + getTableName(), + getParentForChildren(), + isClean()); + tableName_ = null; + } + return tableNameBuilder_; + } + + private java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp> serverTimestamp_ = + java.util.Collections.emptyList(); + private void ensureServerTimestampIsMutable() { + if (!((bitField0_ & 0x00000002) == 0x00000002)) { + serverTimestamp_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp>(serverTimestamp_); + bitField0_ |= 0x00000002; + } + } + + private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestampOrBuilder> serverTimestampBuilder_; + + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp> getServerTimestampList() { + if (serverTimestampBuilder_ == null) { + return java.util.Collections.unmodifiableList(serverTimestamp_); + } else { + return serverTimestampBuilder_.getMessageList(); + } + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public int getServerTimestampCount() { + if (serverTimestampBuilder_ == null) { + return serverTimestamp_.size(); + } else { + return serverTimestampBuilder_.getCount(); + } + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp getServerTimestamp(int index) { + if (serverTimestampBuilder_ == null) { + return serverTimestamp_.get(index); + } else { + return serverTimestampBuilder_.getMessage(index); + } + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public Builder setServerTimestamp( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp value) { + if (serverTimestampBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureServerTimestampIsMutable(); + serverTimestamp_.set(index, value); + onChanged(); + } else { + serverTimestampBuilder_.setMessage(index, value); + } + return this; + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public Builder setServerTimestamp( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder builderForValue) { + if (serverTimestampBuilder_ == null) { + ensureServerTimestampIsMutable(); + serverTimestamp_.set(index, builderForValue.build()); + onChanged(); + } else { + serverTimestampBuilder_.setMessage(index, builderForValue.build()); + } + return this; + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public Builder addServerTimestamp(org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp value) { + if (serverTimestampBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureServerTimestampIsMutable(); + serverTimestamp_.add(value); + onChanged(); + } else { + serverTimestampBuilder_.addMessage(value); + } + return this; + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public Builder addServerTimestamp( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp value) { + if (serverTimestampBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + ensureServerTimestampIsMutable(); + serverTimestamp_.add(index, value); + onChanged(); + } else { + serverTimestampBuilder_.addMessage(index, value); + } + return this; + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public Builder addServerTimestamp( + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder builderForValue) { + if (serverTimestampBuilder_ == null) { + ensureServerTimestampIsMutable(); + serverTimestamp_.add(builderForValue.build()); + onChanged(); + } else { + serverTimestampBuilder_.addMessage(builderForValue.build()); + } + return this; + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public Builder addServerTimestamp( + int index, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder builderForValue) { + if (serverTimestampBuilder_ == null) { + ensureServerTimestampIsMutable(); + serverTimestamp_.add(index, builderForValue.build()); + onChanged(); + } else { + serverTimestampBuilder_.addMessage(index, builderForValue.build()); + } + return this; + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public Builder addAllServerTimestamp( + java.lang.Iterable<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp> values) { + if (serverTimestampBuilder_ == null) { + ensureServerTimestampIsMutable(); + org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractMessageLite.Builder.addAll( + values, serverTimestamp_); + onChanged(); + } else { + serverTimestampBuilder_.addAllMessages(values); + } + return this; + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public Builder clearServerTimestamp() { + if (serverTimestampBuilder_ == null) { + serverTimestamp_ = java.util.Collections.emptyList(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + } else { + serverTimestampBuilder_.clear(); + } + return this; + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public Builder removeServerTimestamp(int index) { + if (serverTimestampBuilder_ == null) { + ensureServerTimestampIsMutable(); + serverTimestamp_.remove(index); + onChanged(); + } else { + serverTimestampBuilder_.remove(index); + } + return this; + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder getServerTimestampBuilder( + int index) { + return getServerTimestampFieldBuilder().getBuilder(index); + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestampOrBuilder getServerTimestampOrBuilder( + int index) { + if (serverTimestampBuilder_ == null) { + return serverTimestamp_.get(index); } else { + return serverTimestampBuilder_.getMessageOrBuilder(index); + } + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestampOrBuilder> + getServerTimestampOrBuilderList() { + if (serverTimestampBuilder_ != null) { + return serverTimestampBuilder_.getMessageOrBuilderList(); + } else { + return java.util.Collections.unmodifiableList(serverTimestamp_); + } + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder addServerTimestampBuilder() { + return getServerTimestampFieldBuilder().addBuilder( + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.getDefaultInstance()); + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder addServerTimestampBuilder( + int index) { + return getServerTimestampFieldBuilder().addBuilder( + index, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.getDefaultInstance()); + } + /** + * <code>repeated .hbase.pb.ServerTimestamp server_timestamp = 2;</code> + */ + public java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder> + getServerTimestampBuilderList() { + return getServerTimestampFieldBuilder().getBuilderList(); + } + private org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestampOrBuilder> + getServerTimestampFieldBuilder() { + if (serverTimestampBuilder_ == null) { + serverTimestampBuilder_ = new org.apache.hadoop.hbase.shaded.com.google.protobuf.RepeatedFieldBuilderV3< + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestamp.Builder, org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.ServerTimestampOrBuilder>( + serverTimestamp_, + ((bitField0_ & 0x00000002) == 0x00000002), + getParentForChildren(), + isClean()); + serverTimestamp_ = null; + } + return serverTimestampBuilder_; + } + public final Builder setUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.setUnknownFields(unknownFields); + } + + public final Builder mergeUnknownFields( + final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet unknownFields) { + return super.mergeUnknownFields(unknownFields); + } + + + // @@protoc_insertion_point(builder_scope:hbase.pb.TableServerTimestamp) + } + + // @@protoc_insertion_point(class_scope:hbase.pb.TableServerTimestamp) + private static final org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp(); + } + + public static org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + @java.lang.Deprecated public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableServerTimestamp> + PARSER = new org.apache.hadoop.hbase.shaded.com.google.protobuf.AbstractParser<TableServerTimestamp>() { + public TableServerTimestamp parsePartialFrom( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + return new TableServerTimestamp(input, extensionRegistry); + } + }; + + public static org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableServerTimestamp> parser() { + return PARSER; + } + + @java.lang.Override + public org.apache.hadoop.hbase.shaded.com.google.protobuf.Parser<TableServerTimestamp> getParserForType() { + return PARSER; + } + + public org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + + } + + public interface BackupImageOrBuilder extends + // @@protoc_insertion_point(interface_extends:hbase.pb.BackupImage) + org.apache.hadoop.hbase.shaded.com.google.protobuf.MessageOrBuilder { + + /** + * <code>optional string backup_id = 1;</code> + */ + boolean hasBackupId(); + /** + * <code>optional string backup_id = 1;</code> + */ + java.lang.String getBackupId(); + /** + * <code>optional string backup_id = 1;</code> + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString + getBackupIdBytes(); + + /** + * <code>optional .hbase.pb.BackupType backup_type = 2;</code> + */ + boolean hasBackupType(); + /** + * <code>optional .hbase.pb.BackupType backup_type = 2;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupType getBackupType(); + + /** + * <code>optional string backup_root_dir = 3;</code> + */ + boolean hasBackupRootDir(); + /** + * <code>optional string backup_root_dir = 3;</code> + */ + java.lang.String getBackupRootDir(); + /** + * <code>optional string backup_root_dir = 3;</code> + */ + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString + getBackupRootDirBytes(); + + /** + * <code>repeated .hbase.pb.TableName table_list = 4;</code> + */ + java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName> + getTableListList(); + /** + * <code>repeated .hbase.pb.TableName table_list = 4;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName getTableList(int index); + /** + * <code>repeated .hbase.pb.TableName table_list = 4;</code> + */ + int getTableListCount(); + /** + * <code>repeated .hbase.pb.TableName table_list = 4;</code> + */ + java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder> + getTableListOrBuilderList(); + /** + * <code>repeated .hbase.pb.TableName table_list = 4;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableListOrBuilder( + int index); + + /** + * <code>optional uint64 start_ts = 5;</code> + */ + boolean hasStartTs(); + /** + * <code>optional uint64 start_ts = 5;</code> + */ + long getStartTs(); + + /** + * <code>optional uint64 complete_ts = 6;</code> + */ + boolean hasCompleteTs(); + /** + * <code>optional uint64 complete_ts = 6;</code> + */ + long getCompleteTs(); + + /** + * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code> + */ + java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupImage> + getAncestorsList(); + /** + * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupImage getAncestors(int index); + /** + * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code> + */ + int getAncestorsCount(); + /** + * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code> + */ + java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupImageOrBuilder> + getAncestorsOrBuilderList(); + /** + * <code>repeated .hbase.pb.BackupImage ancestors = 7;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupImageOrBuilder getAncestorsOrBuilder( + int index); + + /** + * <code>repeated .hbase.pb.TableServerTimestamp tst_map = 8;</code> + */ + java.util.List<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp> + getTstMapList(); + /** + * <code>repeated .hbase.pb.TableServerTimestamp tst_map = 8;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp getTstMap(int index); + /** + * <code>repeated .hbase.pb.TableServerTimestamp tst_map = 8;</code> + */ + int getTstMapCount(); + /** + * <code>repeated .hbase.pb.TableServerTimestamp tst_map = 8;</code> + */ + java.util.List<? extends org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestampOrBuilder> + getTstMapOrBuilderList(); + /** + * <code>repeated .hbase.pb.TableServerTimestamp tst_map = 8;</code> + */ + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestampOrBuilder getTstMapOrBuilder( + int index); + } + /** + * <pre> + ** + * Structure keeps relevant info for backup restore session + * </pre> + * + * Protobuf type {@code hbase.pb.BackupImage} + */ + public static final class BackupImage extends + org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3 implements + // @@protoc_insertion_point(message_implements:hbase.pb.BackupImage) + BackupImageOrBuilder { + // Use BackupImage.newBuilder() to construct. + private BackupImage(org.apache.hadoop.hbase.shaded.com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { + super(builder); + } + private BackupImage() { + backupId_ = ""; + backupType_ = 0; + backupRootDir_ = ""; + tableList_ = java.util.Collections.emptyList(); + startTs_ = 0L; + completeTs_ = 0L; + ancestors_ = java.util.Collections.emptyList(); + tstMap_ = java.util.Collections.emptyList(); + } + + @java.lang.Override + public final org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private BackupImage( + org.apache.hadoop.hbase.shaded.com.google.protobuf.CodedInputStream input, + org.apache.hadoop.hbase.shaded.com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException { + this(); + int mutable_bitField0_ = 0; + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.Builder unknownFields = + org.apache.hadoop.hbase.shaded.com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000001; + backupId_ = bs; + break; + } + case 16: { + int rawValue = input.readEnum(); + org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupType value = org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupType.valueOf(rawValue); + if (value == null) { + unknownFields.mergeVarintField(2, rawValue); + } else { + bitField0_ |= 0x00000002; + backupType_ = rawValue; + } + break; + } + case 26: { + org.apache.hadoop.hbase.shaded.com.google.protobuf.ByteString bs = input.readBytes(); + bitField0_ |= 0x00000004; + backupRootDir_ = bs; + break; + } + case 34: { + if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + tableList_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName>(); + mutable_bitField0_ |= 0x00000008; + } + tableList_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry)); + break; + } + case 40: { + bitField0_ |= 0x00000008; + startTs_ = input.readUInt64(); + break; + } + case 48: { + bitField0_ |= 0x00000010; + completeTs_ = input.readUInt64(); + break; + } + case 58: { + if (!((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + ancestors_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupImage>(); + mutable_bitField0_ |= 0x00000040; + } + ancestors_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupImage.PARSER, extensionRegistry)); + break; + } + case 66: { + if (!((mutable_bitField0_ & 0x00000080) == 0x00000080)) { + tstMap_ = new java.util.ArrayList<org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp>(); + mutable_bitField0_ |= 0x00000080; + } + tstMap_.add( + input.readMessage(org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.TableServerTimestamp.PARSER, extensionRegistry)); + break; + } + } + } + } catch (org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new org.apache.hadoop.hbase.shaded.com.google.protobuf.InvalidProtocolBufferException( + e).setUnfinishedMessage(this); + } finally { + if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) { + tableList_ = java.util.Collections.unmodifiableList(tableList_); + } + if (((mutable_bitField0_ & 0x00000040) == 0x00000040)) { + ancestors_ = java.util.Collections.unmodifiableList(ancestors_); + } + if (((mutable_bitField0_ & 0x00000080) == 0x00000080)) { + tstMap_ = java.util.Collections.unmodifiableList(tstMap_); + } + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final org.apache.hadoop.hbase.shaded.com.google.protobuf.Descriptors.Descr <TRUNCATED>