HBASE-15806 An endpoint-based export tool (ChiaPing Tsai)
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/c03ea895 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/c03ea895 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/c03ea895 Branch: refs/heads/master Commit: c03ea895c4c9c9fec59e4b9e14280c5b867ff3eb Parents: 7130a22 Author: tedyu <yuzhih...@gmail.com> Authored: Mon May 23 10:51:25 2016 -0700 Committer: tedyu <yuzhih...@gmail.com> Committed: Mon May 23 10:51:25 2016 -0700 ---------------------------------------------------------------------- hbase-protocol/pom.xml | 1 + .../hbase/protobuf/generated/ExportProtos.java | 1990 ++++++++++++++++++ hbase-protocol/src/main/protobuf/Export.proto | 44 + .../hbase/coprocessor/ExportEndpoint.java | 280 +++ .../apache/hadoop/hbase/mapreduce/Export.java | 11 +- .../hbase/mapreduce/TestImportExport.java | 463 ++-- 6 files changed, 2621 insertions(+), 168 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hbase/blob/c03ea895/hbase-protocol/pom.xml ---------------------------------------------------------------------- diff --git a/hbase-protocol/pom.xml b/hbase-protocol/pom.xml index 56ab13a..60ec46f 100644 --- a/hbase-protocol/pom.xml +++ b/hbase-protocol/pom.xml @@ -180,6 +180,7 @@ <include>Comparator.proto</include> <include>Encryption.proto</include> <include>ErrorHandling.proto</include> + <include>Export.proto</include> <include>FS.proto</include> <include>Filter.proto</include> <include>HBase.proto</include> http://git-wip-us.apache.org/repos/asf/hbase/blob/c03ea895/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ExportProtos.java ---------------------------------------------------------------------- diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ExportProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ExportProtos.java new file mode 100644 index 0000000..4a503fc --- /dev/null +++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ExportProtos.java @@ -0,0 +1,1990 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: Export.proto + +package org.apache.hadoop.hbase.protobuf.generated; + +public final class ExportProtos { + private ExportProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface ExportRequestOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required .hbase.pb.Scan scan = 1; + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + boolean hasScan(); + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan(); + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder(); + + // required string outputPath = 2; + /** + * <code>required string outputPath = 2;</code> + */ + boolean hasOutputPath(); + /** + * <code>required string outputPath = 2;</code> + */ + java.lang.String getOutputPath(); + /** + * <code>required string outputPath = 2;</code> + */ + com.google.protobuf.ByteString + getOutputPathBytes(); + + // optional bool compressed = 3 [default = false]; + /** + * <code>optional bool compressed = 3 [default = false];</code> + */ + boolean hasCompressed(); + /** + * <code>optional bool compressed = 3 [default = false];</code> + */ + boolean getCompressed(); + + // optional string compressType = 4; + /** + * <code>optional string compressType = 4;</code> + */ + boolean hasCompressType(); + /** + * <code>optional string compressType = 4;</code> + */ + java.lang.String getCompressType(); + /** + * <code>optional string compressType = 4;</code> + */ + com.google.protobuf.ByteString + getCompressTypeBytes(); + + // optional string compressCodec = 5; + /** + * <code>optional string compressCodec = 5;</code> + */ + boolean hasCompressCodec(); + /** + * <code>optional string compressCodec = 5;</code> + */ + java.lang.String getCompressCodec(); + /** + * <code>optional string compressCodec = 5;</code> + */ + com.google.protobuf.ByteString + getCompressCodecBytes(); + } + /** + * Protobuf type {@code hbase.pb.ExportRequest} + */ + public static final class ExportRequest extends + com.google.protobuf.GeneratedMessage + implements ExportRequestOrBuilder { + // Use ExportRequest.newBuilder() to construct. + private ExportRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ExportRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ExportRequest defaultInstance; + public static ExportRequest getDefaultInstance() { + return defaultInstance; + } + + public ExportRequest getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ExportRequest( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder subBuilder = null; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + subBuilder = scan_.toBuilder(); + } + scan_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.PARSER, extensionRegistry); + if (subBuilder != null) { + subBuilder.mergeFrom(scan_); + scan_ = subBuilder.buildPartial(); + } + bitField0_ |= 0x00000001; + break; + } + case 18: { + bitField0_ |= 0x00000002; + outputPath_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + compressed_ = input.readBool(); + break; + } + case 34: { + bitField0_ |= 0x00000008; + compressType_ = input.readBytes(); + break; + } + case 42: { + bitField0_ |= 0x00000010; + compressCodec_ = input.readBytes(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.Builder.class); + } + + public static com.google.protobuf.Parser<ExportRequest> PARSER = + new com.google.protobuf.AbstractParser<ExportRequest>() { + public ExportRequest parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExportRequest(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser<ExportRequest> getParserForType() { + return PARSER; + } + + private int bitField0_; + // required .hbase.pb.Scan scan = 1; + public static final int SCAN_FIELD_NUMBER = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_; + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + public boolean hasScan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + return scan_; + } + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + return scan_; + } + + // required string outputPath = 2; + public static final int OUTPUTPATH_FIELD_NUMBER = 2; + private java.lang.Object outputPath_; + /** + * <code>required string outputPath = 2;</code> + */ + public boolean hasOutputPath() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * <code>required string outputPath = 2;</code> + */ + public java.lang.String getOutputPath() { + java.lang.Object ref = outputPath_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + outputPath_ = s; + } + return s; + } + } + /** + * <code>required string outputPath = 2;</code> + */ + public com.google.protobuf.ByteString + getOutputPathBytes() { + java.lang.Object ref = outputPath_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + outputPath_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional bool compressed = 3 [default = false]; + public static final int COMPRESSED_FIELD_NUMBER = 3; + private boolean compressed_; + /** + * <code>optional bool compressed = 3 [default = false];</code> + */ + public boolean hasCompressed() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * <code>optional bool compressed = 3 [default = false];</code> + */ + public boolean getCompressed() { + return compressed_; + } + + // optional string compressType = 4; + public static final int COMPRESSTYPE_FIELD_NUMBER = 4; + private java.lang.Object compressType_; + /** + * <code>optional string compressType = 4;</code> + */ + public boolean hasCompressType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * <code>optional string compressType = 4;</code> + */ + public java.lang.String getCompressType() { + java.lang.Object ref = compressType_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + compressType_ = s; + } + return s; + } + } + /** + * <code>optional string compressType = 4;</code> + */ + public com.google.protobuf.ByteString + getCompressTypeBytes() { + java.lang.Object ref = compressType_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + compressType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // optional string compressCodec = 5; + public static final int COMPRESSCODEC_FIELD_NUMBER = 5; + private java.lang.Object compressCodec_; + /** + * <code>optional string compressCodec = 5;</code> + */ + public boolean hasCompressCodec() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * <code>optional string compressCodec = 5;</code> + */ + public java.lang.String getCompressCodec() { + java.lang.Object ref = compressCodec_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + compressCodec_ = s; + } + return s; + } + } + /** + * <code>optional string compressCodec = 5;</code> + */ + public com.google.protobuf.ByteString + getCompressCodecBytes() { + java.lang.Object ref = compressCodec_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + compressCodec_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + private void initFields() { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + outputPath_ = ""; + compressed_ = false; + compressType_ = ""; + compressCodec_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasScan()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasOutputPath()) { + memoizedIsInitialized = 0; + return false; + } + if (!getScan().isInitialized()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeMessage(1, scan_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getOutputPathBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBool(3, compressed_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + output.writeBytes(4, getCompressTypeBytes()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + output.writeBytes(5, getCompressCodecBytes()); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeMessageSize(1, scan_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getOutputPathBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBoolSize(3, compressed_); + } + if (((bitField0_ & 0x00000008) == 0x00000008)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(4, getCompressTypeBytes()); + } + if (((bitField0_ & 0x00000010) == 0x00000010)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(5, getCompressCodecBytes()); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest) obj; + + boolean result = true; + result = result && (hasScan() == other.hasScan()); + if (hasScan()) { + result = result && getScan() + .equals(other.getScan()); + } + result = result && (hasOutputPath() == other.hasOutputPath()); + if (hasOutputPath()) { + result = result && getOutputPath() + .equals(other.getOutputPath()); + } + result = result && (hasCompressed() == other.hasCompressed()); + if (hasCompressed()) { + result = result && (getCompressed() + == other.getCompressed()); + } + result = result && (hasCompressType() == other.hasCompressType()); + if (hasCompressType()) { + result = result && getCompressType() + .equals(other.getCompressType()); + } + result = result && (hasCompressCodec() == other.hasCompressCodec()); + if (hasCompressCodec()) { + result = result && getCompressCodec() + .equals(other.getCompressCodec()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasScan()) { + hash = (37 * hash) + SCAN_FIELD_NUMBER; + hash = (53 * hash) + getScan().hashCode(); + } + if (hasOutputPath()) { + hash = (37 * hash) + OUTPUTPATH_FIELD_NUMBER; + hash = (53 * hash) + getOutputPath().hashCode(); + } + if (hasCompressed()) { + hash = (37 * hash) + COMPRESSED_FIELD_NUMBER; + hash = (53 * hash) + hashBoolean(getCompressed()); + } + if (hasCompressType()) { + hash = (37 * hash) + COMPRESSTYPE_FIELD_NUMBER; + hash = (53 * hash) + getCompressType().hashCode(); + } + if (hasCompressCodec()) { + hash = (37 * hash) + COMPRESSCODEC_FIELD_NUMBER; + hash = (53 * hash) + getCompressCodec().hashCode(); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ExportRequest} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder<Builder> + implements org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequestOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + getScanFieldBuilder(); + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + outputPath_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + compressed_ = false; + bitField0_ = (bitField0_ & ~0x00000004); + compressType_ = ""; + bitField0_ = (bitField0_ & ~0x00000008); + compressCodec_ = ""; + bitField0_ = (bitField0_ & ~0x00000010); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportRequest_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest build() { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest result = new org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + if (scanBuilder_ == null) { + result.scan_ = scan_; + } else { + result.scan_ = scanBuilder_.build(); + } + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.outputPath_ = outputPath_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.compressed_ = compressed_; + if (((from_bitField0_ & 0x00000008) == 0x00000008)) { + to_bitField0_ |= 0x00000008; + } + result.compressType_ = compressType_; + if (((from_bitField0_ & 0x00000010) == 0x00000010)) { + to_bitField0_ |= 0x00000010; + } + result.compressCodec_ = compressCodec_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance()) return this; + if (other.hasScan()) { + mergeScan(other.getScan()); + } + if (other.hasOutputPath()) { + bitField0_ |= 0x00000002; + outputPath_ = other.outputPath_; + onChanged(); + } + if (other.hasCompressed()) { + setCompressed(other.getCompressed()); + } + if (other.hasCompressType()) { + bitField0_ |= 0x00000008; + compressType_ = other.compressType_; + onChanged(); + } + if (other.hasCompressCodec()) { + bitField0_ |= 0x00000010; + compressCodec_ = other.compressCodec_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasScan()) { + return false; + } + if (!hasOutputPath()) { + return false; + } + if (!getScan().isInitialized()) { + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required .hbase.pb.Scan scan = 1; + private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> scanBuilder_; + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + public boolean hasScan() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan getScan() { + if (scanBuilder_ == null) { + return scan_; + } else { + return scanBuilder_.getMessage(); + } + } + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + public Builder setScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (value == null) { + throw new NullPointerException(); + } + scan_ = value; + onChanged(); + } else { + scanBuilder_.setMessage(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + public Builder setScan( + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder builderForValue) { + if (scanBuilder_ == null) { + scan_ = builderForValue.build(); + onChanged(); + } else { + scanBuilder_.setMessage(builderForValue.build()); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + public Builder mergeScan(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan value) { + if (scanBuilder_ == null) { + if (((bitField0_ & 0x00000001) == 0x00000001) && + scan_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance()) { + scan_ = + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.newBuilder(scan_).mergeFrom(value).buildPartial(); + } else { + scan_ = value; + } + onChanged(); + } else { + scanBuilder_.mergeFrom(value); + } + bitField0_ |= 0x00000001; + return this; + } + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + public Builder clearScan() { + if (scanBuilder_ == null) { + scan_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.getDefaultInstance(); + onChanged(); + } else { + scanBuilder_.clear(); + } + bitField0_ = (bitField0_ & ~0x00000001); + return this; + } + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder getScanBuilder() { + bitField0_ |= 0x00000001; + onChanged(); + return getScanFieldBuilder().getBuilder(); + } + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder getScanOrBuilder() { + if (scanBuilder_ != null) { + return scanBuilder_.getMessageOrBuilder(); + } else { + return scan_; + } + } + /** + * <code>required .hbase.pb.Scan scan = 1;</code> + */ + private com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder> + getScanFieldBuilder() { + if (scanBuilder_ == null) { + scanBuilder_ = new com.google.protobuf.SingleFieldBuilder< + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Scan.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ScanOrBuilder>( + scan_, + getParentForChildren(), + isClean()); + scan_ = null; + } + return scanBuilder_; + } + + // required string outputPath = 2; + private java.lang.Object outputPath_ = ""; + /** + * <code>required string outputPath = 2;</code> + */ + public boolean hasOutputPath() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * <code>required string outputPath = 2;</code> + */ + public java.lang.String getOutputPath() { + java.lang.Object ref = outputPath_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + outputPath_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * <code>required string outputPath = 2;</code> + */ + public com.google.protobuf.ByteString + getOutputPathBytes() { + java.lang.Object ref = outputPath_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + outputPath_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * <code>required string outputPath = 2;</code> + */ + public Builder setOutputPath( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + outputPath_ = value; + onChanged(); + return this; + } + /** + * <code>required string outputPath = 2;</code> + */ + public Builder clearOutputPath() { + bitField0_ = (bitField0_ & ~0x00000002); + outputPath_ = getDefaultInstance().getOutputPath(); + onChanged(); + return this; + } + /** + * <code>required string outputPath = 2;</code> + */ + public Builder setOutputPathBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + outputPath_ = value; + onChanged(); + return this; + } + + // optional bool compressed = 3 [default = false]; + private boolean compressed_ ; + /** + * <code>optional bool compressed = 3 [default = false];</code> + */ + public boolean hasCompressed() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * <code>optional bool compressed = 3 [default = false];</code> + */ + public boolean getCompressed() { + return compressed_; + } + /** + * <code>optional bool compressed = 3 [default = false];</code> + */ + public Builder setCompressed(boolean value) { + bitField0_ |= 0x00000004; + compressed_ = value; + onChanged(); + return this; + } + /** + * <code>optional bool compressed = 3 [default = false];</code> + */ + public Builder clearCompressed() { + bitField0_ = (bitField0_ & ~0x00000004); + compressed_ = false; + onChanged(); + return this; + } + + // optional string compressType = 4; + private java.lang.Object compressType_ = ""; + /** + * <code>optional string compressType = 4;</code> + */ + public boolean hasCompressType() { + return ((bitField0_ & 0x00000008) == 0x00000008); + } + /** + * <code>optional string compressType = 4;</code> + */ + public java.lang.String getCompressType() { + java.lang.Object ref = compressType_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + compressType_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * <code>optional string compressType = 4;</code> + */ + public com.google.protobuf.ByteString + getCompressTypeBytes() { + java.lang.Object ref = compressType_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + compressType_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * <code>optional string compressType = 4;</code> + */ + public Builder setCompressType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + compressType_ = value; + onChanged(); + return this; + } + /** + * <code>optional string compressType = 4;</code> + */ + public Builder clearCompressType() { + bitField0_ = (bitField0_ & ~0x00000008); + compressType_ = getDefaultInstance().getCompressType(); + onChanged(); + return this; + } + /** + * <code>optional string compressType = 4;</code> + */ + public Builder setCompressTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000008; + compressType_ = value; + onChanged(); + return this; + } + + // optional string compressCodec = 5; + private java.lang.Object compressCodec_ = ""; + /** + * <code>optional string compressCodec = 5;</code> + */ + public boolean hasCompressCodec() { + return ((bitField0_ & 0x00000010) == 0x00000010); + } + /** + * <code>optional string compressCodec = 5;</code> + */ + public java.lang.String getCompressCodec() { + java.lang.Object ref = compressCodec_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + compressCodec_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * <code>optional string compressCodec = 5;</code> + */ + public com.google.protobuf.ByteString + getCompressCodecBytes() { + java.lang.Object ref = compressCodec_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + compressCodec_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * <code>optional string compressCodec = 5;</code> + */ + public Builder setCompressCodec( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + compressCodec_ = value; + onChanged(); + return this; + } + /** + * <code>optional string compressCodec = 5;</code> + */ + public Builder clearCompressCodec() { + bitField0_ = (bitField0_ & ~0x00000010); + compressCodec_ = getDefaultInstance().getCompressCodec(); + onChanged(); + return this; + } + /** + * <code>optional string compressCodec = 5;</code> + */ + public Builder setCompressCodecBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000010; + compressCodec_ = value; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.ExportRequest) + } + + static { + defaultInstance = new ExportRequest(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ExportRequest) + } + + public interface ExportResponseOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required uint64 rowCount = 1; + /** + * <code>required uint64 rowCount = 1;</code> + */ + boolean hasRowCount(); + /** + * <code>required uint64 rowCount = 1;</code> + */ + long getRowCount(); + + // required uint64 cellCount = 2; + /** + * <code>required uint64 cellCount = 2;</code> + */ + boolean hasCellCount(); + /** + * <code>required uint64 cellCount = 2;</code> + */ + long getCellCount(); + } + /** + * Protobuf type {@code hbase.pb.ExportResponse} + */ + public static final class ExportResponse extends + com.google.protobuf.GeneratedMessage + implements ExportResponseOrBuilder { + // Use ExportResponse.newBuilder() to construct. + private ExportResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private ExportResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final ExportResponse defaultInstance; + public static ExportResponse getDefaultInstance() { + return defaultInstance; + } + + public ExportResponse getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private ExportResponse( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 8: { + bitField0_ |= 0x00000001; + rowCount_ = input.readUInt64(); + break; + } + case 16: { + bitField0_ |= 0x00000002; + cellCount_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.Builder.class); + } + + public static com.google.protobuf.Parser<ExportResponse> PARSER = + new com.google.protobuf.AbstractParser<ExportResponse>() { + public ExportResponse parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new ExportResponse(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser<ExportResponse> getParserForType() { + return PARSER; + } + + private int bitField0_; + // required uint64 rowCount = 1; + public static final int ROWCOUNT_FIELD_NUMBER = 1; + private long rowCount_; + /** + * <code>required uint64 rowCount = 1;</code> + */ + public boolean hasRowCount() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>required uint64 rowCount = 1;</code> + */ + public long getRowCount() { + return rowCount_; + } + + // required uint64 cellCount = 2; + public static final int CELLCOUNT_FIELD_NUMBER = 2; + private long cellCount_; + /** + * <code>required uint64 cellCount = 2;</code> + */ + public boolean hasCellCount() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * <code>required uint64 cellCount = 2;</code> + */ + public long getCellCount() { + return cellCount_; + } + + private void initFields() { + rowCount_ = 0L; + cellCount_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasRowCount()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasCellCount()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeUInt64(1, rowCount_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeUInt64(2, cellCount_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(1, rowCount_); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(2, cellCount_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse)) { + return super.equals(obj); + } + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse other = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) obj; + + boolean result = true; + result = result && (hasRowCount() == other.hasRowCount()); + if (hasRowCount()) { + result = result && (getRowCount() + == other.getRowCount()); + } + result = result && (hasCellCount() == other.hasCellCount()); + if (hasCellCount()) { + result = result && (getCellCount() + == other.getCellCount()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasRowCount()) { + hash = (37 * hash) + ROWCOUNT_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getRowCount()); + } + if (hasCellCount()) { + hash = (37 * hash) + CELLCOUNT_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getCellCount()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hbase.pb.ExportResponse} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder<Builder> + implements org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponseOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.class, org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.Builder.class); + } + + // Construct using org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + rowCount_ = 0L; + bitField0_ = (bitField0_ & ~0x00000001); + cellCount_ = 0L; + bitField0_ = (bitField0_ & ~0x00000002); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.internal_static_hbase_pb_ExportResponse_descriptor; + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse getDefaultInstanceForType() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance(); + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse build() { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse buildPartial() { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse result = new org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.rowCount_ = rowCount_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.cellCount_ = cellCount_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) { + return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse other) { + if (other == org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance()) return this; + if (other.hasRowCount()) { + setRowCount(other.getRowCount()); + } + if (other.hasCellCount()) { + setCellCount(other.getCellCount()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasRowCount()) { + return false; + } + if (!hasCellCount()) { + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required uint64 rowCount = 1; + private long rowCount_ ; + /** + * <code>required uint64 rowCount = 1;</code> + */ + public boolean hasRowCount() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>required uint64 rowCount = 1;</code> + */ + public long getRowCount() { + return rowCount_; + } + /** + * <code>required uint64 rowCount = 1;</code> + */ + public Builder setRowCount(long value) { + bitField0_ |= 0x00000001; + rowCount_ = value; + onChanged(); + return this; + } + /** + * <code>required uint64 rowCount = 1;</code> + */ + public Builder clearRowCount() { + bitField0_ = (bitField0_ & ~0x00000001); + rowCount_ = 0L; + onChanged(); + return this; + } + + // required uint64 cellCount = 2; + private long cellCount_ ; + /** + * <code>required uint64 cellCount = 2;</code> + */ + public boolean hasCellCount() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * <code>required uint64 cellCount = 2;</code> + */ + public long getCellCount() { + return cellCount_; + } + /** + * <code>required uint64 cellCount = 2;</code> + */ + public Builder setCellCount(long value) { + bitField0_ |= 0x00000002; + cellCount_ = value; + onChanged(); + return this; + } + /** + * <code>required uint64 cellCount = 2;</code> + */ + public Builder clearCellCount() { + bitField0_ = (bitField0_ & ~0x00000002); + cellCount_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hbase.pb.ExportResponse) + } + + static { + defaultInstance = new ExportResponse(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ExportResponse) + } + + /** + * Protobuf service {@code hbase.pb.ExportService} + */ + public static abstract class ExportService + implements com.google.protobuf.Service { + protected ExportService() {} + + public interface Interface { + /** + * <code>rpc export(.hbase.pb.ExportRequest) returns (.hbase.pb.ExportResponse);</code> + */ + public abstract void export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request, + com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse> done); + + } + + public static com.google.protobuf.Service newReflectiveService( + final Interface impl) { + return new ExportService() { + @java.lang.Override + public void export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request, + com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse> done) { + impl.export(controller, request, done); + } + + }; + } + + public static com.google.protobuf.BlockingService + newReflectiveBlockingService(final BlockingInterface impl) { + return new com.google.protobuf.BlockingService() { + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final com.google.protobuf.Message callBlockingMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request) + throws com.google.protobuf.ServiceException { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callBlockingMethod() given method descriptor for " + + "wrong service type."); + } + switch(method.getIndex()) { + case 0: + return impl.export(controller, (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)request); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + }; + } + + /** + * <code>rpc export(.hbase.pb.ExportRequest) returns (.hbase.pb.ExportResponse);</code> + */ + public abstract void export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request, + com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse> done); + + public static final + com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptor() { + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.getDescriptor().getServices().get(0); + } + public final com.google.protobuf.Descriptors.ServiceDescriptor + getDescriptorForType() { + return getDescriptor(); + } + + public final void callMethod( + com.google.protobuf.Descriptors.MethodDescriptor method, + com.google.protobuf.RpcController controller, + com.google.protobuf.Message request, + com.google.protobuf.RpcCallback< + com.google.protobuf.Message> done) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.callMethod() given method descriptor for wrong " + + "service type."); + } + switch(method.getIndex()) { + case 0: + this.export(controller, (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest)request, + com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse>specializeCallback( + done)); + return; + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getRequestPrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getRequestPrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public final com.google.protobuf.Message + getResponsePrototype( + com.google.protobuf.Descriptors.MethodDescriptor method) { + if (method.getService() != getDescriptor()) { + throw new java.lang.IllegalArgumentException( + "Service.getResponsePrototype() given method " + + "descriptor for wrong service type."); + } + switch(method.getIndex()) { + case 0: + return org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance(); + default: + throw new java.lang.AssertionError("Can't get here."); + } + } + + public static Stub newStub( + com.google.protobuf.RpcChannel channel) { + return new Stub(channel); + } + + public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportService implements Interface { + private Stub(com.google.protobuf.RpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.RpcChannel channel; + + public com.google.protobuf.RpcChannel getChannel() { + return channel; + } + + public void export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request, + com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse> done) { + channel.callMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance(), + com.google.protobuf.RpcUtil.generalizeCallback( + done, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.class, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance())); + } + } + + public static BlockingInterface newBlockingStub( + com.google.protobuf.BlockingRpcChannel channel) { + return new BlockingStub(channel); + } + + public interface BlockingInterface { + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) + throws com.google.protobuf.ServiceException; + } + + private static final class BlockingStub implements BlockingInterface { + private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { + this.channel = channel; + } + + private final com.google.protobuf.BlockingRpcChannel channel; + + public org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse export( + com.google.protobuf.RpcController controller, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportRequest request) + throws com.google.protobuf.ServiceException { + return (org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse) channel.callBlockingMethod( + getDescriptor().getMethods().get(0), + controller, + request, + org.apache.hadoop.hbase.protobuf.generated.ExportProtos.ExportResponse.getDefaultInstance()); + } + + } + + // @@protoc_insertion_point(class_scope:hbase.pb.ExportService) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_ExportRequest_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_ExportRequest_fieldAccessorTable; + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hbase_pb_ExportResponse_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hbase_pb_ExportResponse_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\014Export.proto\022\010hbase.pb\032\014Client.proto\"\211" + + "\001\n\rExportRequest\022\034\n\004scan\030\001 \002(\0132\016.hbase.p" + + "b.Scan\022\022\n\noutputPath\030\002 \002(\t\022\031\n\ncompressed" + + "\030\003 \001(\010:\005false\022\024\n\014compressType\030\004 \001(\t\022\025\n\rc" + + "ompressCodec\030\005 \001(\t\"5\n\016ExportResponse\022\020\n\010" + + "rowCount\030\001 \002(\004\022\021\n\tcellCount\030\002 \002(\0042L\n\rExp" + + "ortService\022;\n\006export\022\027.hbase.pb.ExportRe" + + "quest\032\030.hbase.pb.ExportResponseBB\n*org.a" + + "pache.hadoop.hbase.protobuf.generatedB\014E" + + "xportProtosH\001\210\001\001\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_hbase_pb_ExportRequest_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hbase_pb_ExportRequest_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_ExportRequest_descriptor, + new java.lang.String[] { "Scan", "OutputPath", "Compressed", "CompressType", "CompressCodec", }); + internal_static_hbase_pb_ExportResponse_descriptor = + getDescriptor().getMessageTypes().get(1); + internal_static_hbase_pb_ExportResponse_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hbase_pb_ExportResponse_descriptor, + new java.lang.String[] { "RowCount", "CellCount", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + org.apache.hadoop.hbase.protobuf.generated.ClientProtos.getDescriptor(), + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} http://git-wip-us.apache.org/repos/asf/hbase/blob/c03ea895/hbase-protocol/src/main/protobuf/Export.proto ---------------------------------------------------------------------- diff --git a/hbase-protocol/src/main/protobuf/Export.proto b/hbase-protocol/src/main/protobuf/Export.proto new file mode 100644 index 0000000..0551851 --- /dev/null +++ b/hbase-protocol/src/main/protobuf/Export.proto @@ -0,0 +1,44 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package hbase.pb; + +option java_package = "org.apache.hadoop.hbase.protobuf.generated"; +option java_outer_classname = "ExportProtos"; +option java_generate_equals_and_hash = true; +option optimize_for = SPEED; +option java_generic_services = true; + +import "Client.proto"; + +service ExportService { + rpc export (ExportRequest) returns (ExportResponse); +} + +message ExportRequest { + required Scan scan = 1; + required string outputPath = 2; + optional bool compressed = 3 [default = false]; + optional string compressType = 4; + optional string compressCodec = 5; +} +message ExportResponse { + required uint64 rowCount = 1; + required uint64 cellCount = 2; +} +