http://git-wip-us.apache.org/repos/asf/hbase/blob/6786b2b6/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java
----------------------------------------------------------------------
diff --git
a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java
new file mode 100644
index 0000000..373e036
--- /dev/null
+++
b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/example/generated/BulkDeleteProtos.java
@@ -0,0 +1,1792 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: BulkDelete.proto
+
+package org.apache.hadoop.hbase.coprocessor.example.generated;
+
+public final class BulkDeleteProtos {
+ private BulkDeleteProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface BulkDeleteRequestOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .hbase.pb.Scan scan = 1;
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ boolean hasScan();
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan();
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder
getScanOrBuilder();
+
+ // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ /**
+ * <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType =
2;</code>
+ */
+ boolean hasDeleteType();
+ /**
+ * <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType =
2;</code>
+ */
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType
getDeleteType();
+
+ // optional uint64 timestamp = 3;
+ /**
+ * <code>optional uint64 timestamp = 3;</code>
+ */
+ boolean hasTimestamp();
+ /**
+ * <code>optional uint64 timestamp = 3;</code>
+ */
+ long getTimestamp();
+
+ // required uint32 rowBatchSize = 4;
+ /**
+ * <code>required uint32 rowBatchSize = 4;</code>
+ */
+ boolean hasRowBatchSize();
+ /**
+ * <code>required uint32 rowBatchSize = 4;</code>
+ */
+ int getRowBatchSize();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BulkDeleteRequest}
+ */
+ public static final class BulkDeleteRequest extends
+ com.google.protobuf.GeneratedMessage
+ implements BulkDeleteRequestOrBuilder {
+ // Use BulkDeleteRequest.newBuilder() to construct.
+ private BulkDeleteRequest(com.google.protobuf.GeneratedMessage.Builder<?>
builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private BulkDeleteRequest(boolean noInit) { this.unknownFields =
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final BulkDeleteRequest defaultInstance;
+ public static BulkDeleteRequest getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public BulkDeleteRequest getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private BulkDeleteRequest(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 10: {
+
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder
= null;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ subBuilder = scan_.toBuilder();
+ }
+ scan_ =
input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER,
extensionRegistry);
+ if (subBuilder != null) {
+ subBuilder.mergeFrom(scan_);
+ scan_ = subBuilder.buildPartial();
+ }
+ bitField0_ |= 0x00000001;
+ break;
+ }
+ case 16: {
+ int rawValue = input.readEnum();
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType
value =
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(2, rawValue);
+ } else {
+ bitField0_ |= 0x00000002;
+ deleteType_ = value;
+ }
+ break;
+ }
+ case 24: {
+ bitField0_ |= 0x00000004;
+ timestamp_ = input.readUInt64();
+ break;
+ }
+ case 32: {
+ bitField0_ |= 0x00000008;
+ rowBatchSize_ = input.readUInt32();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<BulkDeleteRequest> PARSER =
+ new com.google.protobuf.AbstractParser<BulkDeleteRequest>() {
+ public BulkDeleteRequest parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new BulkDeleteRequest(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<BulkDeleteRequest> getParserForType() {
+ return PARSER;
+ }
+
+ /**
+ * Protobuf enum {@code hbase.pb.BulkDeleteRequest.DeleteType}
+ */
+ public enum DeleteType
+ implements com.google.protobuf.ProtocolMessageEnum {
+ /**
+ * <code>ROW = 0;</code>
+ */
+ ROW(0, 0),
+ /**
+ * <code>FAMILY = 1;</code>
+ */
+ FAMILY(1, 1),
+ /**
+ * <code>COLUMN = 2;</code>
+ */
+ COLUMN(2, 2),
+ /**
+ * <code>VERSION = 3;</code>
+ */
+ VERSION(3, 3),
+ ;
+
+ /**
+ * <code>ROW = 0;</code>
+ */
+ public static final int ROW_VALUE = 0;
+ /**
+ * <code>FAMILY = 1;</code>
+ */
+ public static final int FAMILY_VALUE = 1;
+ /**
+ * <code>COLUMN = 2;</code>
+ */
+ public static final int COLUMN_VALUE = 2;
+ /**
+ * <code>VERSION = 3;</code>
+ */
+ public static final int VERSION_VALUE = 3;
+
+
+ public final int getNumber() { return value; }
+
+ public static DeleteType valueOf(int value) {
+ switch (value) {
+ case 0: return ROW;
+ case 1: return FAMILY;
+ case 2: return COLUMN;
+ case 3: return VERSION;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap<DeleteType>
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap<DeleteType>() {
+ public DeleteType findValueByNumber(int number) {
+ return DeleteType.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final DeleteType[] VALUES = values();
+
+ public static DeleteType valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private DeleteType(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ //
@@protoc_insertion_point(enum_scope:hbase.pb.BulkDeleteRequest.DeleteType)
+ }
+
+ private int bitField0_;
+ // required .hbase.pb.Scan scan = 1;
+ public static final int SCAN_FIELD_NUMBER = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_;
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ public boolean hasScan() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan
getScan() {
+ return scan_;
+ }
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ public
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder
getScanOrBuilder() {
+ return scan_;
+ }
+
+ // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ public static final int DELETETYPE_FIELD_NUMBER = 2;
+ private
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType
deleteType_;
+ /**
+ * <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType =
2;</code>
+ */
+ public boolean hasDeleteType() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType =
2;</code>
+ */
+ public
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType
getDeleteType() {
+ return deleteType_;
+ }
+
+ // optional uint64 timestamp = 3;
+ public static final int TIMESTAMP_FIELD_NUMBER = 3;
+ private long timestamp_;
+ /**
+ * <code>optional uint64 timestamp = 3;</code>
+ */
+ public boolean hasTimestamp() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional uint64 timestamp = 3;</code>
+ */
+ public long getTimestamp() {
+ return timestamp_;
+ }
+
+ // required uint32 rowBatchSize = 4;
+ public static final int ROWBATCHSIZE_FIELD_NUMBER = 4;
+ private int rowBatchSize_;
+ /**
+ * <code>required uint32 rowBatchSize = 4;</code>
+ */
+ public boolean hasRowBatchSize() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>required uint32 rowBatchSize = 4;</code>
+ */
+ public int getRowBatchSize() {
+ return rowBatchSize_;
+ }
+
+ private void initFields() {
+ scan_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ deleteType_ =
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
+ timestamp_ = 0L;
+ rowBatchSize_ = 0;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasScan()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasDeleteType()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!hasRowBatchSize()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ if (!getScan().isInitialized()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeMessage(1, scan_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeEnum(2, deleteType_.getNumber());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeUInt64(3, timestamp_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ output.writeUInt32(4, rowBatchSize_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(1, scan_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(2, deleteType_.getNumber());
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(3, timestamp_);
+ }
+ if (((bitField0_ & 0x00000008) == 0x00000008)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt32Size(4, rowBatchSize_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest))
{
+ return super.equals(obj);
+ }
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
other =
(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)
obj;
+
+ boolean result = true;
+ result = result && (hasScan() == other.hasScan());
+ if (hasScan()) {
+ result = result && getScan()
+ .equals(other.getScan());
+ }
+ result = result && (hasDeleteType() == other.hasDeleteType());
+ if (hasDeleteType()) {
+ result = result &&
+ (getDeleteType() == other.getDeleteType());
+ }
+ result = result && (hasTimestamp() == other.hasTimestamp());
+ if (hasTimestamp()) {
+ result = result && (getTimestamp()
+ == other.getTimestamp());
+ }
+ result = result && (hasRowBatchSize() == other.hasRowBatchSize());
+ if (hasRowBatchSize()) {
+ result = result && (getRowBatchSize()
+ == other.getRowBatchSize());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasScan()) {
+ hash = (37 * hash) + SCAN_FIELD_NUMBER;
+ hash = (53 * hash) + getScan().hashCode();
+ }
+ if (hasDeleteType()) {
+ hash = (37 * hash) + DELETETYPE_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnum(getDeleteType());
+ }
+ if (hasTimestamp()) {
+ hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getTimestamp());
+ }
+ if (hasRowBatchSize()) {
+ hash = (37 * hash) + ROWBATCHSIZE_FIELD_NUMBER;
+ hash = (53 * hash) + getRowBatchSize();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder
newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BulkDeleteRequest}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequestOrBuilder
{
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.class,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.Builder.class);
+ }
+
+ // Construct using
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getScanFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ if (scanBuilder_ == null) {
+ scan_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ } else {
+ scanBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ deleteType_ =
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ timestamp_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000004);
+ rowBatchSize_ = 0;
+ bitField0_ = (bitField0_ & ~0x00000008);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteRequest_descriptor;
+ }
+
+ public
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
getDefaultInstanceForType() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance();
+ }
+
+ public
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
build() {
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
buildPartial() {
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
result = new
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ if (scanBuilder_ == null) {
+ result.scan_ = scan_;
+ } else {
+ result.scan_ = scanBuilder_.build();
+ }
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.deleteType_ = deleteType_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ result.timestamp_ = timestamp_;
+ if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
+ to_bitField0_ |= 0x00000008;
+ }
+ result.rowBatchSize_ = rowBatchSize_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)
{
+ return
mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder
mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
other) {
+ if (other ==
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance())
return this;
+ if (other.hasScan()) {
+ mergeScan(other.getScan());
+ }
+ if (other.hasDeleteType()) {
+ setDeleteType(other.getDeleteType());
+ }
+ if (other.hasTimestamp()) {
+ setTimestamp(other.getTimestamp());
+ }
+ if (other.hasRowBatchSize()) {
+ setRowBatchSize(other.getRowBatchSize());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasScan()) {
+
+ return false;
+ }
+ if (!hasDeleteType()) {
+
+ return false;
+ }
+ if (!hasRowBatchSize()) {
+
+ return false;
+ }
+ if (!getScan().isInitialized()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage =
(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)
e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required .hbase.pb.Scan scan = 1;
+ private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan
scan_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
scanBuilder_;
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ public boolean hasScan() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan
getScan() {
+ if (scanBuilder_ == null) {
+ return scan_;
+ } else {
+ return scanBuilder_.getMessage();
+ }
+ }
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ public Builder
setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+ if (scanBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ scan_ = value;
+ onChanged();
+ } else {
+ scanBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ public Builder setScan(
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder
builderForValue) {
+ if (scanBuilder_ == null) {
+ scan_ = builderForValue.build();
+ onChanged();
+ } else {
+ scanBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ public Builder
mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) {
+ if (scanBuilder_ == null) {
+ if (((bitField0_ & 0x00000001) == 0x00000001) &&
+ scan_ !=
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance())
{
+ scan_ =
+
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial();
+ } else {
+ scan_ = value;
+ }
+ onChanged();
+ } else {
+ scanBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000001;
+ return this;
+ }
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ public Builder clearScan() {
+ if (scanBuilder_ == null) {
+ scan_ =
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance();
+ onChanged();
+ } else {
+ scanBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ public
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder
getScanBuilder() {
+ bitField0_ |= 0x00000001;
+ onChanged();
+ return getScanFieldBuilder().getBuilder();
+ }
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ public
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder
getScanOrBuilder() {
+ if (scanBuilder_ != null) {
+ return scanBuilder_.getMessageOrBuilder();
+ } else {
+ return scan_;
+ }
+ }
+ /**
+ * <code>required .hbase.pb.Scan scan = 1;</code>
+ */
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>
+ getScanFieldBuilder() {
+ if (scanBuilder_ == null) {
+ scanBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>(
+ scan_,
+ getParentForChildren(),
+ isClean());
+ scan_ = null;
+ }
+ return scanBuilder_;
+ }
+
+ // required .hbase.pb.BulkDeleteRequest.DeleteType deleteType = 2;
+ private
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType
deleteType_ =
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
+ /**
+ * <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType =
2;</code>
+ */
+ public boolean hasDeleteType() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType =
2;</code>
+ */
+ public
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType
getDeleteType() {
+ return deleteType_;
+ }
+ /**
+ * <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType =
2;</code>
+ */
+ public Builder
setDeleteType(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType
value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ deleteType_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required .hbase.pb.BulkDeleteRequest.DeleteType deleteType =
2;</code>
+ */
+ public Builder clearDeleteType() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ deleteType_ =
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.DeleteType.ROW;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 timestamp = 3;
+ private long timestamp_ ;
+ /**
+ * <code>optional uint64 timestamp = 3;</code>
+ */
+ public boolean hasTimestamp() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ /**
+ * <code>optional uint64 timestamp = 3;</code>
+ */
+ public long getTimestamp() {
+ return timestamp_;
+ }
+ /**
+ * <code>optional uint64 timestamp = 3;</code>
+ */
+ public Builder setTimestamp(long value) {
+ bitField0_ |= 0x00000004;
+ timestamp_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 timestamp = 3;</code>
+ */
+ public Builder clearTimestamp() {
+ bitField0_ = (bitField0_ & ~0x00000004);
+ timestamp_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // required uint32 rowBatchSize = 4;
+ private int rowBatchSize_ ;
+ /**
+ * <code>required uint32 rowBatchSize = 4;</code>
+ */
+ public boolean hasRowBatchSize() {
+ return ((bitField0_ & 0x00000008) == 0x00000008);
+ }
+ /**
+ * <code>required uint32 rowBatchSize = 4;</code>
+ */
+ public int getRowBatchSize() {
+ return rowBatchSize_;
+ }
+ /**
+ * <code>required uint32 rowBatchSize = 4;</code>
+ */
+ public Builder setRowBatchSize(int value) {
+ bitField0_ |= 0x00000008;
+ rowBatchSize_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required uint32 rowBatchSize = 4;</code>
+ */
+ public Builder clearRowBatchSize() {
+ bitField0_ = (bitField0_ & ~0x00000008);
+ rowBatchSize_ = 0;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteRequest)
+ }
+
+ static {
+ defaultInstance = new BulkDeleteRequest(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteRequest)
+ }
+
+ public interface BulkDeleteResponseOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required uint64 rowsDeleted = 1;
+ /**
+ * <code>required uint64 rowsDeleted = 1;</code>
+ */
+ boolean hasRowsDeleted();
+ /**
+ * <code>required uint64 rowsDeleted = 1;</code>
+ */
+ long getRowsDeleted();
+
+ // optional uint64 versionsDeleted = 2;
+ /**
+ * <code>optional uint64 versionsDeleted = 2;</code>
+ */
+ boolean hasVersionsDeleted();
+ /**
+ * <code>optional uint64 versionsDeleted = 2;</code>
+ */
+ long getVersionsDeleted();
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BulkDeleteResponse}
+ */
+ public static final class BulkDeleteResponse extends
+ com.google.protobuf.GeneratedMessage
+ implements BulkDeleteResponseOrBuilder {
+ // Use BulkDeleteResponse.newBuilder() to construct.
+ private BulkDeleteResponse(com.google.protobuf.GeneratedMessage.Builder<?>
builder) {
+ super(builder);
+ this.unknownFields = builder.getUnknownFields();
+ }
+ private BulkDeleteResponse(boolean noInit) { this.unknownFields =
com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
+
+ private static final BulkDeleteResponse defaultInstance;
+ public static BulkDeleteResponse getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public BulkDeleteResponse getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ private final com.google.protobuf.UnknownFieldSet unknownFields;
+ @java.lang.Override
+ public final com.google.protobuf.UnknownFieldSet
+ getUnknownFields() {
+ return this.unknownFields;
+ }
+ private BulkDeleteResponse(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ initFields();
+ int mutable_bitField0_ = 0;
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder();
+ try {
+ boolean done = false;
+ while (!done) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ done = true;
+ break;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ done = true;
+ }
+ break;
+ }
+ case 8: {
+ bitField0_ |= 0x00000001;
+ rowsDeleted_ = input.readUInt64();
+ break;
+ }
+ case 16: {
+ bitField0_ |= 0x00000002;
+ versionsDeleted_ = input.readUInt64();
+ break;
+ }
+ }
+ }
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw e.setUnfinishedMessage(this);
+ } catch (java.io.IOException e) {
+ throw new com.google.protobuf.InvalidProtocolBufferException(
+ e.getMessage()).setUnfinishedMessage(this);
+ } finally {
+ this.unknownFields = unknownFields.build();
+ makeExtensionsImmutable();
+ }
+ }
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class);
+ }
+
+ public static com.google.protobuf.Parser<BulkDeleteResponse> PARSER =
+ new com.google.protobuf.AbstractParser<BulkDeleteResponse>() {
+ public BulkDeleteResponse parsePartialFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return new BulkDeleteResponse(input, extensionRegistry);
+ }
+ };
+
+ @java.lang.Override
+ public com.google.protobuf.Parser<BulkDeleteResponse> getParserForType() {
+ return PARSER;
+ }
+
+ private int bitField0_;
+ // required uint64 rowsDeleted = 1;
+ public static final int ROWSDELETED_FIELD_NUMBER = 1;
+ private long rowsDeleted_;
+ /**
+ * <code>required uint64 rowsDeleted = 1;</code>
+ */
+ public boolean hasRowsDeleted() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required uint64 rowsDeleted = 1;</code>
+ */
+ public long getRowsDeleted() {
+ return rowsDeleted_;
+ }
+
+ // optional uint64 versionsDeleted = 2;
+ public static final int VERSIONSDELETED_FIELD_NUMBER = 2;
+ private long versionsDeleted_;
+ /**
+ * <code>optional uint64 versionsDeleted = 2;</code>
+ */
+ public boolean hasVersionsDeleted() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional uint64 versionsDeleted = 2;</code>
+ */
+ public long getVersionsDeleted() {
+ return versionsDeleted_;
+ }
+
+ private void initFields() {
+ rowsDeleted_ = 0L;
+ versionsDeleted_ = 0L;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasRowsDeleted()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeUInt64(1, rowsDeleted_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeUInt64(2, versionsDeleted_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(1, rowsDeleted_);
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeUInt64Size(2, versionsDeleted_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse))
{
+ return super.equals(obj);
+ }
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
other =
(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)
obj;
+
+ boolean result = true;
+ result = result && (hasRowsDeleted() == other.hasRowsDeleted());
+ if (hasRowsDeleted()) {
+ result = result && (getRowsDeleted()
+ == other.getRowsDeleted());
+ }
+ result = result && (hasVersionsDeleted() == other.hasVersionsDeleted());
+ if (hasVersionsDeleted()) {
+ result = result && (getVersionsDeleted()
+ == other.getVersionsDeleted());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ private int memoizedHashCode = 0;
+ @java.lang.Override
+ public int hashCode() {
+ if (memoizedHashCode != 0) {
+ return memoizedHashCode;
+ }
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasRowsDeleted()) {
+ hash = (37 * hash) + ROWSDELETED_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getRowsDeleted());
+ }
+ if (hasVersionsDeleted()) {
+ hash = (37 * hash) + VERSIONSDELETED_FIELD_NUMBER;
+ hash = (53 * hash) + hashLong(getVersionsDeleted());
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ memoizedHashCode = hash;
+ return hash;
+ }
+
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return PARSER.parseFrom(data, extensionRegistry);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseDelimitedFrom(input, extensionRegistry);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input);
+ }
+ public static
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return PARSER.parseFrom(input, extensionRegistry);
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder
newBuilder(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ /**
+ * Protobuf type {@code hbase.pb.BulkDeleteResponse}
+ */
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder<Builder>
+ implements
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponseOrBuilder
{
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class,
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.Builder.class);
+ }
+
+ // Construct using
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ rowsDeleted_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ versionsDeleted_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.internal_static_hbase_pb_BulkDeleteResponse_descriptor;
+ }
+
+ public
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
getDefaultInstanceForType() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance();
+ }
+
+ public
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
build() {
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
buildPartial() {
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
result = new
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.rowsDeleted_ = rowsDeleted_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.versionsDeleted_ = versionsDeleted_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)
{
+ return
mergeFrom((org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder
mergeFrom(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
other) {
+ if (other ==
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance())
return this;
+ if (other.hasRowsDeleted()) {
+ setRowsDeleted(other.getRowsDeleted());
+ }
+ if (other.hasVersionsDeleted()) {
+ setVersionsDeleted(other.getVersionsDeleted());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasRowsDeleted()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage =
(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)
e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // required uint64 rowsDeleted = 1;
+ private long rowsDeleted_ ;
+ /**
+ * <code>required uint64 rowsDeleted = 1;</code>
+ */
+ public boolean hasRowsDeleted() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * <code>required uint64 rowsDeleted = 1;</code>
+ */
+ public long getRowsDeleted() {
+ return rowsDeleted_;
+ }
+ /**
+ * <code>required uint64 rowsDeleted = 1;</code>
+ */
+ public Builder setRowsDeleted(long value) {
+ bitField0_ |= 0x00000001;
+ rowsDeleted_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>required uint64 rowsDeleted = 1;</code>
+ */
+ public Builder clearRowsDeleted() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ rowsDeleted_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional uint64 versionsDeleted = 2;
+ private long versionsDeleted_ ;
+ /**
+ * <code>optional uint64 versionsDeleted = 2;</code>
+ */
+ public boolean hasVersionsDeleted() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * <code>optional uint64 versionsDeleted = 2;</code>
+ */
+ public long getVersionsDeleted() {
+ return versionsDeleted_;
+ }
+ /**
+ * <code>optional uint64 versionsDeleted = 2;</code>
+ */
+ public Builder setVersionsDeleted(long value) {
+ bitField0_ |= 0x00000002;
+ versionsDeleted_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * <code>optional uint64 versionsDeleted = 2;</code>
+ */
+ public Builder clearVersionsDeleted() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ versionsDeleted_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hbase.pb.BulkDeleteResponse)
+ }
+
+ static {
+ defaultInstance = new BulkDeleteResponse(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteResponse)
+ }
+
+ /**
+ * Protobuf service {@code hbase.pb.BulkDeleteService}
+ */
+ public static abstract class BulkDeleteService
+ implements com.google.protobuf.Service {
+ protected BulkDeleteService() {}
+
+ public interface Interface {
+ /**
+ * <code>rpc delete(.hbase.pb.BulkDeleteRequest) returns
(.hbase.pb.BulkDeleteResponse);</code>
+ */
+ public abstract void delete(
+ com.google.protobuf.RpcController controller,
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
request,
+
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse>
done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new BulkDeleteService() {
+ @java.lang.Override
+ public void delete(
+ com.google.protobuf.RpcController controller,
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
request,
+
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse>
done) {
+ impl.delete(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.delete(controller,
(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ /**
+ * <code>rpc delete(.hbase.pb.BulkDeleteRequest) returns
(.hbase.pb.BulkDeleteResponse);</code>
+ */
+ public abstract void delete(
+ com.google.protobuf.RpcController controller,
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
request,
+
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse>
done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.getDescriptor().getServices().get(0);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.delete(controller,
(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest)request,
+
com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse>specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteService
implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void delete(
+ com.google.protobuf.RpcController controller,
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
request,
+
com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse>
done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.class,
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
delete(
+ com.google.protobuf.RpcController controller,
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse
delete(
+ com.google.protobuf.RpcController controller,
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteRequest
request)
+ throws com.google.protobuf.ServiceException {
+ return
(org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse)
channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+
org.apache.hadoop.hbase.coprocessor.example.generated.BulkDeleteProtos.BulkDeleteResponse.getDefaultInstance());
+ }
+
+ }
+
+ // @@protoc_insertion_point(class_scope:hbase.pb.BulkDeleteService)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_BulkDeleteRequest_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hbase_pb_BulkDeleteResponse_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\020BulkDelete.proto\022\010hbase.pb\032\014Client.pro" +
+ "to\"\322\001\n\021BulkDeleteRequest\022\034\n\004scan\030\001
\002(\0132\016" +
+ ".hbase.pb.Scan\022:\n\ndeleteType\030\002 \002(\0162&.hba" +
+ "se.pb.BulkDeleteRequest.DeleteType\022\021\n\tti" +
+ "mestamp\030\003 \001(\004\022\024\n\014rowBatchSize\030\004
\002(\r\":\n\nD" +
+
"eleteType\022\007\n\003ROW\020\000\022\n\n\006FAMILY\020\001\022\n\n\006COLUMN"
+
+
"\020\002\022\013\n\007VERSION\020\003\"B\n\022BulkDeleteResponse\022\023\n" +
+ "\013rowsDeleted\030\001 \002(\004\022\027\n\017versionsDeleted\030\002
" +
+ "\001(\0042X\n\021BulkDeleteService\022C\n\006delete\022\033.hba" +
+ "se.pb.BulkDeleteRequest\032\034.hbase.pb.BulkD",
+ "eleteResponseBQ\n5org.apache.hadoop.hbase" +
+ ".coprocessor.example.generatedB\020BulkDele" +
+ "teProtosH\001\210\001\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner
assigner =
+ new
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_hbase_pb_BulkDeleteRequest_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_hbase_pb_BulkDeleteRequest_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_BulkDeleteRequest_descriptor,
+ new java.lang.String[] { "Scan", "DeleteType", "Timestamp",
"RowBatchSize", });
+ internal_static_hbase_pb_BulkDeleteResponse_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_hbase_pb_BulkDeleteResponse_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hbase_pb_BulkDeleteResponse_descriptor,
+ new java.lang.String[] { "RowsDeleted", "VersionsDeleted", });
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(),
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}