http://git-wip-us.apache.org/repos/asf/hive/blob/a310524c/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java ---------------------------------------------------------------------- diff --git a/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java b/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java index 314fc7f..5c5818a 100644 --- a/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java +++ b/metastore/src/gen/protobuf/gen-java/org/apache/hadoop/hive/metastore/hbase/HbaseMetastoreProto.java @@ -10222,73 +10222,43 @@ public final class HbaseMetastoreProto { // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.Database) } - public interface FieldSchemaOrBuilder + public interface DelegationTokenOrBuilder extends com.google.protobuf.MessageOrBuilder { - // required string name = 1; - /** - * <code>required string name = 1;</code> - */ - boolean hasName(); - /** - * <code>required string name = 1;</code> - */ - java.lang.String getName(); - /** - * <code>required string name = 1;</code> - */ - com.google.protobuf.ByteString - getNameBytes(); - - // required string type = 2; - /** - * <code>required string type = 2;</code> - */ - boolean hasType(); - /** - * <code>required string type = 2;</code> - */ - java.lang.String getType(); - /** - * <code>required string type = 2;</code> - */ - com.google.protobuf.ByteString - getTypeBytes(); - - // optional string comment = 3; + // required string token_str = 1; /** - * <code>optional string comment = 3;</code> + * <code>required string token_str = 1;</code> */ - boolean hasComment(); + boolean hasTokenStr(); /** - * <code>optional string comment = 3;</code> + * <code>required string token_str = 1;</code> */ - java.lang.String getComment(); + java.lang.String getTokenStr(); /** - * <code>optional string comment = 3;</code> + * <code>required string token_str = 1;</code> */ com.google.protobuf.ByteString - getCommentBytes(); + getTokenStrBytes(); } /** - * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.FieldSchema} + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.DelegationToken} */ - public static final class FieldSchema extends + public static final class DelegationToken extends com.google.protobuf.GeneratedMessage - implements FieldSchemaOrBuilder { - // Use FieldSchema.newBuilder() to construct. - private FieldSchema(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + implements DelegationTokenOrBuilder { + // Use DelegationToken.newBuilder() to construct. + private DelegationToken(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private FieldSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private DelegationToken(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final FieldSchema defaultInstance; - public static FieldSchema getDefaultInstance() { + private static final DelegationToken defaultInstance; + public static DelegationToken getDefaultInstance() { return defaultInstance; } - public FieldSchema getDefaultInstanceForType() { + public DelegationToken getDefaultInstanceForType() { return defaultInstance; } @@ -10298,7 +10268,7 @@ public final class HbaseMetastoreProto { getUnknownFields() { return this.unknownFields; } - private FieldSchema( + private DelegationToken( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -10323,17 +10293,7 @@ public final class HbaseMetastoreProto { } case 10: { bitField0_ |= 0x00000001; - name_ = input.readBytes(); - break; - } - case 18: { - bitField0_ |= 0x00000002; - type_ = input.readBytes(); - break; - } - case 26: { - bitField0_ |= 0x00000004; - comment_ = input.readBytes(); + tokenStr_ = input.readBytes(); break; } } @@ -10350,132 +10310,46 @@ public final class HbaseMetastoreProto { } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder.class); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.Builder.class); } - public static com.google.protobuf.Parser<FieldSchema> PARSER = - new com.google.protobuf.AbstractParser<FieldSchema>() { - public FieldSchema parsePartialFrom( + public static com.google.protobuf.Parser<DelegationToken> PARSER = + new com.google.protobuf.AbstractParser<DelegationToken>() { + public DelegationToken parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new FieldSchema(input, extensionRegistry); + return new DelegationToken(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser<FieldSchema> getParserForType() { + public com.google.protobuf.Parser<DelegationToken> getParserForType() { return PARSER; } private int bitField0_; - // required string name = 1; - public static final int NAME_FIELD_NUMBER = 1; - private java.lang.Object name_; + // required string token_str = 1; + public static final int TOKEN_STR_FIELD_NUMBER = 1; + private java.lang.Object tokenStr_; /** - * <code>required string name = 1;</code> + * <code>required string token_str = 1;</code> */ - public boolean hasName() { + public boolean hasTokenStr() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * <code>required string name = 1;</code> - */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - name_ = s; - } - return s; - } - } - /** - * <code>required string name = 1;</code> - */ - public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // required string type = 2; - public static final int TYPE_FIELD_NUMBER = 2; - private java.lang.Object type_; - /** - * <code>required string type = 2;</code> - */ - public boolean hasType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * <code>required string type = 2;</code> - */ - public java.lang.String getType() { - java.lang.Object ref = type_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - type_ = s; - } - return s; - } - } - /** - * <code>required string type = 2;</code> - */ - public com.google.protobuf.ByteString - getTypeBytes() { - java.lang.Object ref = type_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - type_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - // optional string comment = 3; - public static final int COMMENT_FIELD_NUMBER = 3; - private java.lang.Object comment_; - /** - * <code>optional string comment = 3;</code> - */ - public boolean hasComment() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * <code>optional string comment = 3;</code> + * <code>required string token_str = 1;</code> */ - public java.lang.String getComment() { - java.lang.Object ref = comment_; + public java.lang.String getTokenStr() { + java.lang.Object ref = tokenStr_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { @@ -10483,22 +10357,22 @@ public final class HbaseMetastoreProto { (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (bs.isValidUtf8()) { - comment_ = s; + tokenStr_ = s; } return s; } } /** - * <code>optional string comment = 3;</code> + * <code>required string token_str = 1;</code> */ public com.google.protobuf.ByteString - getCommentBytes() { - java.lang.Object ref = comment_; + getTokenStrBytes() { + java.lang.Object ref = tokenStr_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - comment_ = b; + tokenStr_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; @@ -10506,20 +10380,14 @@ public final class HbaseMetastoreProto { } private void initFields() { - name_ = ""; - type_ = ""; - comment_ = ""; + tokenStr_ = ""; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; - if (!hasName()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasType()) { + if (!hasTokenStr()) { memoizedIsInitialized = 0; return false; } @@ -10531,13 +10399,7 @@ public final class HbaseMetastoreProto { throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeBytes(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getTypeBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - output.writeBytes(3, getCommentBytes()); + output.writeBytes(1, getTokenStrBytes()); } getUnknownFields().writeTo(output); } @@ -10550,15 +10412,7 @@ public final class HbaseMetastoreProto { size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream - .computeBytesSize(1, getNameBytes()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getTypeBytes()); - } - if (((bitField0_ & 0x00000004) == 0x00000004)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(3, getCommentBytes()); + .computeBytesSize(1, getTokenStrBytes()); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; @@ -10572,53 +10426,53 @@ public final class HbaseMetastoreProto { return super.writeReplace(); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(byte[] data) + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(java.io.InputStream input) + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseDelimitedFrom(java.io.InputStream input) + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseDelimitedFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { @@ -10627,7 +10481,7 @@ public final class HbaseMetastoreProto { public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema prototype) { + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @@ -10639,24 +10493,24 @@ public final class HbaseMetastoreProto { return builder; } /** - * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.FieldSchema} + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.DelegationToken} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> - implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder { + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationTokenOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder.class); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.Builder.class); } - // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.newBuilder() + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.newBuilder() private Builder() { maybeForceBuilderInitialization(); } @@ -10676,12 +10530,8 @@ public final class HbaseMetastoreProto { public Builder clear() { super.clear(); - name_ = ""; + tokenStr_ = ""; bitField0_ = (bitField0_ & ~0x00000001); - type_ = ""; - bitField0_ = (bitField0_ & ~0x00000002); - comment_ = ""; - bitField0_ = (bitField0_ & ~0x00000004); return this; } @@ -10691,66 +10541,48 @@ public final class HbaseMetastoreProto { public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_DelegationToken_descriptor; } - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema getDefaultInstanceForType() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance(); + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.getDefaultInstance(); } - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema build() { - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema result = buildPartial(); + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema buildPartial() { - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema(this); + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } - result.name_ = name_; - if (((from_bitField0_ & 0x00000002) == 0x00000002)) { - to_bitField0_ |= 0x00000002; - } - result.type_ = type_; - if (((from_bitField0_ & 0x00000004) == 0x00000004)) { - to_bitField0_ |= 0x00000004; - } - result.comment_ = comment_; + result.tokenStr_ = tokenStr_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema) { - return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema)other); + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken)other); } else { super.mergeFrom(other); return this; } } - public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema other) { - if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance()) return this; - if (other.hasName()) { + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken.getDefaultInstance()) return this; + if (other.hasTokenStr()) { bitField0_ |= 0x00000001; - name_ = other.name_; - onChanged(); - } - if (other.hasType()) { - bitField0_ |= 0x00000002; - type_ = other.type_; - onChanged(); - } - if (other.hasComment()) { - bitField0_ |= 0x00000004; - comment_ = other.comment_; + tokenStr_ = other.tokenStr_; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); @@ -10758,11 +10590,7 @@ public final class HbaseMetastoreProto { } public final boolean isInitialized() { - if (!hasName()) { - - return false; - } - if (!hasType()) { + if (!hasTokenStr()) { return false; } @@ -10773,11 +10601,11 @@ public final class HbaseMetastoreProto { com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parsedMessage = null; + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { - parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema) e.getUnfinishedMessage(); + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.DelegationToken) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { @@ -10788,346 +10616,158 @@ public final class HbaseMetastoreProto { } private int bitField0_; - // required string name = 1; - private java.lang.Object name_ = ""; + // required string token_str = 1; + private java.lang.Object tokenStr_ = ""; /** - * <code>required string name = 1;</code> + * <code>required string token_str = 1;</code> */ - public boolean hasName() { + public boolean hasTokenStr() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * <code>required string name = 1;</code> + * <code>required string token_str = 1;</code> */ - public java.lang.String getName() { - java.lang.Object ref = name_; + public java.lang.String getTokenStr() { + java.lang.Object ref = tokenStr_; if (!(ref instanceof java.lang.String)) { java.lang.String s = ((com.google.protobuf.ByteString) ref) .toStringUtf8(); - name_ = s; + tokenStr_ = s; return s; } else { return (java.lang.String) ref; } } /** - * <code>required string name = 1;</code> + * <code>required string token_str = 1;</code> */ public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; + getTokenStrBytes() { + java.lang.Object ref = tokenStr_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - name_ = b; + tokenStr_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** - * <code>required string name = 1;</code> + * <code>required string token_str = 1;</code> */ - public Builder setName( + public Builder setTokenStr( java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - name_ = value; + tokenStr_ = value; onChanged(); return this; } /** - * <code>required string name = 1;</code> + * <code>required string token_str = 1;</code> */ - public Builder clearName() { + public Builder clearTokenStr() { bitField0_ = (bitField0_ & ~0x00000001); - name_ = getDefaultInstance().getName(); + tokenStr_ = getDefaultInstance().getTokenStr(); onChanged(); return this; } /** - * <code>required string name = 1;</code> + * <code>required string token_str = 1;</code> */ - public Builder setNameBytes( + public Builder setTokenStrBytes( com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; - name_ = value; - onChanged(); - return this; - } - - // required string type = 2; - private java.lang.Object type_ = ""; - /** - * <code>required string type = 2;</code> - */ - public boolean hasType() { - return ((bitField0_ & 0x00000002) == 0x00000002); - } - /** - * <code>required string type = 2;</code> - */ - public java.lang.String getType() { - java.lang.Object ref = type_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - type_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * <code>required string type = 2;</code> - */ - public com.google.protobuf.ByteString - getTypeBytes() { - java.lang.Object ref = type_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - type_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * <code>required string type = 2;</code> - */ - public Builder setType( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - type_ = value; - onChanged(); - return this; - } - /** - * <code>required string type = 2;</code> - */ - public Builder clearType() { - bitField0_ = (bitField0_ & ~0x00000002); - type_ = getDefaultInstance().getType(); - onChanged(); - return this; - } - /** - * <code>required string type = 2;</code> - */ - public Builder setTypeBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000002; - type_ = value; - onChanged(); - return this; - } - - // optional string comment = 3; - private java.lang.Object comment_ = ""; - /** - * <code>optional string comment = 3;</code> - */ - public boolean hasComment() { - return ((bitField0_ & 0x00000004) == 0x00000004); - } - /** - * <code>optional string comment = 3;</code> - */ - public java.lang.String getComment() { - java.lang.Object ref = comment_; - if (!(ref instanceof java.lang.String)) { - java.lang.String s = ((com.google.protobuf.ByteString) ref) - .toStringUtf8(); - comment_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * <code>optional string comment = 3;</code> - */ - public com.google.protobuf.ByteString - getCommentBytes() { - java.lang.Object ref = comment_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - comment_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * <code>optional string comment = 3;</code> - */ - public Builder setComment( - java.lang.String value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - comment_ = value; - onChanged(); - return this; - } - /** - * <code>optional string comment = 3;</code> - */ - public Builder clearComment() { - bitField0_ = (bitField0_ & ~0x00000004); - comment_ = getDefaultInstance().getComment(); - onChanged(); - return this; - } - /** - * <code>optional string comment = 3;</code> - */ - public Builder setCommentBytes( - com.google.protobuf.ByteString value) { - if (value == null) { - throw new NullPointerException(); - } - bitField0_ |= 0x00000004; - comment_ = value; + tokenStr_ = value; onChanged(); return this; } - // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.FieldSchema) + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.DelegationToken) } static { - defaultInstance = new FieldSchema(true); + defaultInstance = new DelegationToken(true); defaultInstance.initFields(); } - // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.FieldSchema) + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.DelegationToken) } - public interface FunctionOrBuilder + public interface FieldSchemaOrBuilder extends com.google.protobuf.MessageOrBuilder { - // optional string class_name = 1; + // required string name = 1; /** - * <code>optional string class_name = 1;</code> + * <code>required string name = 1;</code> */ - boolean hasClassName(); + boolean hasName(); /** - * <code>optional string class_name = 1;</code> + * <code>required string name = 1;</code> */ - java.lang.String getClassName(); + java.lang.String getName(); /** - * <code>optional string class_name = 1;</code> + * <code>required string name = 1;</code> */ com.google.protobuf.ByteString - getClassNameBytes(); + getNameBytes(); - // optional string owner_name = 2; + // required string type = 2; /** - * <code>optional string owner_name = 2;</code> + * <code>required string type = 2;</code> */ - boolean hasOwnerName(); + boolean hasType(); /** - * <code>optional string owner_name = 2;</code> + * <code>required string type = 2;</code> */ - java.lang.String getOwnerName(); + java.lang.String getType(); /** - * <code>optional string owner_name = 2;</code> + * <code>required string type = 2;</code> */ com.google.protobuf.ByteString - getOwnerNameBytes(); - - // optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; - /** - * <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code> - */ - boolean hasOwnerType(); - /** - * <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code> - */ - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType(); - - // optional sint64 create_time = 4; - /** - * <code>optional sint64 create_time = 4;</code> - */ - boolean hasCreateTime(); - /** - * <code>optional sint64 create_time = 4;</code> - */ - long getCreateTime(); - - // optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; - /** - * <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code> - */ - boolean hasFunctionType(); - /** - * <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code> - */ - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType(); + getTypeBytes(); - // repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; - /** - * <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code> - */ - java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri> - getResourceUrisList(); - /** - * <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code> - */ - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index); + // optional string comment = 3; /** - * <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code> + * <code>optional string comment = 3;</code> */ - int getResourceUrisCount(); + boolean hasComment(); /** - * <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code> + * <code>optional string comment = 3;</code> */ - java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder> - getResourceUrisOrBuilderList(); + java.lang.String getComment(); /** - * <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code> + * <code>optional string comment = 3;</code> */ - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder( - int index); + com.google.protobuf.ByteString + getCommentBytes(); } /** - * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function} + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.FieldSchema} */ - public static final class Function extends + public static final class FieldSchema extends com.google.protobuf.GeneratedMessage - implements FunctionOrBuilder { - // Use Function.newBuilder() to construct. - private Function(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + implements FieldSchemaOrBuilder { + // Use FieldSchema.newBuilder() to construct. + private FieldSchema(com.google.protobuf.GeneratedMessage.Builder<?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } - private Function(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + private FieldSchema(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } - private static final Function defaultInstance; - public static Function getDefaultInstance() { + private static final FieldSchema defaultInstance; + public static FieldSchema getDefaultInstance() { return defaultInstance; } - public Function getDefaultInstanceForType() { + public FieldSchema getDefaultInstanceForType() { return defaultInstance; } @@ -11137,7 +10777,7 @@ public final class HbaseMetastoreProto { getUnknownFields() { return this.unknownFields; } - private Function( + private FieldSchema( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { @@ -11162,47 +10802,17 @@ public final class HbaseMetastoreProto { } case 10: { bitField0_ |= 0x00000001; - className_ = input.readBytes(); + name_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; - ownerName_ = input.readBytes(); - break; - } - case 24: { - int rawValue = input.readEnum(); - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(3, rawValue); - } else { - bitField0_ |= 0x00000004; - ownerType_ = value; - } - break; - } - case 32: { - bitField0_ |= 0x00000008; - createTime_ = input.readSInt64(); - break; - } - case 40: { - int rawValue = input.readEnum(); - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(5, rawValue); - } else { - bitField0_ |= 0x00000010; - functionType_ = value; - } + type_ = input.readBytes(); break; } - case 50: { - if (!((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - resourceUris_ = new java.util.ArrayList<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri>(); - mutable_bitField0_ |= 0x00000020; - } - resourceUris_.add(input.readMessage(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.PARSER, extensionRegistry)); + case 26: { + bitField0_ |= 0x00000004; + comment_ = input.readBytes(); break; } } @@ -11213,528 +10823,1397 @@ public final class HbaseMetastoreProto { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { - if (((mutable_bitField0_ & 0x00000020) == 0x00000020)) { - resourceUris_ = java.util.Collections.unmodifiableList(resourceUris_); - } this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_descriptor; + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_fieldAccessorTable + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.Builder.class); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder.class); } - public static com.google.protobuf.Parser<Function> PARSER = - new com.google.protobuf.AbstractParser<Function>() { - public Function parsePartialFrom( + public static com.google.protobuf.Parser<FieldSchema> PARSER = + new com.google.protobuf.AbstractParser<FieldSchema>() { + public FieldSchema parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { - return new Function(input, extensionRegistry); + return new FieldSchema(input, extensionRegistry); } }; @java.lang.Override - public com.google.protobuf.Parser<Function> getParserForType() { + public com.google.protobuf.Parser<FieldSchema> getParserForType() { return PARSER; } + private int bitField0_; + // required string name = 1; + public static final int NAME_FIELD_NUMBER = 1; + private java.lang.Object name_; /** - * Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.Function.FunctionType} + * <code>required string name = 1;</code> */ - public enum FunctionType - implements com.google.protobuf.ProtocolMessageEnum { - /** - * <code>JAVA = 1;</code> - */ - JAVA(0, 1), - ; - - /** - * <code>JAVA = 1;</code> - */ - public static final int JAVA_VALUE = 1; - - - public final int getNumber() { return value; } - - public static FunctionType valueOf(int value) { - switch (value) { - case 1: return JAVA; - default: return null; + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * <code>required string name = 1;</code> + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + name_ = s; } + return s; } - - public static com.google.protobuf.Internal.EnumLiteMap<FunctionType> - internalGetValueMap() { - return internalValueMap; - } - private static com.google.protobuf.Internal.EnumLiteMap<FunctionType> - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap<FunctionType>() { - public FunctionType findValueByNumber(int number) { - return FunctionType.valueOf(number); - } - }; - - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); - } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); - } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.getDescriptor().getEnumTypes().get(0); + } + /** + * <code>required string name = 1;</code> + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } + } - private static final FunctionType[] VALUES = values(); - - public static FunctionType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); + // required string type = 2; + public static final int TYPE_FIELD_NUMBER = 2; + private java.lang.Object type_; + /** + * <code>required string type = 2;</code> + */ + public boolean hasType() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * <code>required string type = 2;</code> + */ + public java.lang.String getType() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + type_ = s; } - return VALUES[desc.getIndex()]; + return s; } - - private final int index; - private final int value; - - private FunctionType(int index, int value) { - this.index = index; - this.value = value; + } + /** + * <code>required string type = 2;</code> + */ + public com.google.protobuf.ByteString + getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; } - - // @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.Function.FunctionType) } - public interface ResourceUriOrBuilder - extends com.google.protobuf.MessageOrBuilder { - - // required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; - /** - * <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code> - */ - boolean hasResourceType(); - /** - * <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code> - */ - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType(); - - // required string uri = 2; - /** - * <code>required string uri = 2;</code> - */ - boolean hasUri(); - /** - * <code>required string uri = 2;</code> - */ - java.lang.String getUri(); - /** - * <code>required string uri = 2;</code> - */ - com.google.protobuf.ByteString - getUriBytes(); + // optional string comment = 3; + public static final int COMMENT_FIELD_NUMBER = 3; + private java.lang.Object comment_; + /** + * <code>optional string comment = 3;</code> + */ + public boolean hasComment() { + return ((bitField0_ & 0x00000004) == 0x00000004); } /** - * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri} + * <code>optional string comment = 3;</code> */ - public static final class ResourceUri extends - com.google.protobuf.GeneratedMessage - implements ResourceUriOrBuilder { - // Use ResourceUri.newBuilder() to construct. - private ResourceUri(com.google.protobuf.GeneratedMessage.Builder<?> builder) { - super(builder); - this.unknownFields = builder.getUnknownFields(); + public java.lang.String getComment() { + java.lang.Object ref = comment_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + comment_ = s; + } + return s; } - private ResourceUri(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + } + /** + * <code>optional string comment = 3;</code> + */ + public com.google.protobuf.ByteString + getCommentBytes() { + java.lang.Object ref = comment_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + comment_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } - private static final ResourceUri defaultInstance; - public static ResourceUri getDefaultInstance() { - return defaultInstance; + private void initFields() { + name_ = ""; + type_ = ""; + comment_ = ""; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasName()) { + memoizedIsInitialized = 0; + return false; } + if (!hasType()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } - public ResourceUri getDefaultInstanceForType() { - return defaultInstance; + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getTypeBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeBytes(3, getCommentBytes()); } + getUnknownFields().writeTo(output); + } - private final com.google.protobuf.UnknownFieldSet unknownFields; - @java.lang.Override - public final com.google.protobuf.UnknownFieldSet - getUnknownFields() { - return this.unknownFields; + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getNameBytes()); } - private ResourceUri( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - initFields(); - int mutable_bitField0_ = 0; - com.google.protobuf.UnknownFieldSet.Builder unknownFields = - com.google.protobuf.UnknownFieldSet.newBuilder(); - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - default: { - if (!parseUnknownField(input, unknownFields, - extensionRegistry, tag)) { - done = true; - } - break; - } - case 8: { - int rawValue = input.readEnum(); - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType value = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.valueOf(rawValue); - if (value == null) { - unknownFields.mergeVarintField(1, rawValue); - } else { - bitField0_ |= 0x00000001; - resourceType_ = value; - } - break; - } - case 18: { - bitField0_ |= 0x00000002; - uri_ = input.readBytes(); - break; - } - } - } - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(this); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException( - e.getMessage()).setUnfinishedMessage(this); - } finally { - this.unknownFields = unknownFields.build(); - makeExtensionsImmutable(); - } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getTypeBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(3, getCommentBytes()); } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.FieldSchema} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder<Builder> + implements org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchemaOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_descriptor; + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_Function_ResourceUri_fieldAccessorTable + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_fieldAccessorTable .ensureFieldAccessorsInitialized( - org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.Builder.class); + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.class, org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.Builder.class); } - public static com.google.protobuf.Parser<ResourceUri> PARSER = - new com.google.protobuf.AbstractParser<ResourceUri>() { - public ResourceUri parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return new ResourceUri(input, extensionRegistry); - } - }; - - @java.lang.Override - public com.google.protobuf.Parser<ResourceUri> getParserForType() { - return PARSER; + // Construct using org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); } - /** - * Protobuf enum {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType} - */ - public enum ResourceType - implements com.google.protobuf.ProtocolMessageEnum { - /** - * <code>JAR = 1;</code> - */ - JAR(0, 1), - /** - * <code>FILE = 2;</code> - */ - FILE(1, 2), - /** - * <code>ARCHIVE = 3;</code> - */ - ARCHIVE(2, 3), - ; + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } - /** - * <code>JAR = 1;</code> - */ - public static final int JAR_VALUE = 1; - /** - * <code>FILE = 2;</code> - */ - public static final int FILE_VALUE = 2; - /** - * <code>ARCHIVE = 3;</code> - */ - public static final int ARCHIVE_VALUE = 3; + public Builder clear() { + super.clear(); + name_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + type_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + comment_ = ""; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + public Builder clone() { + return create().mergeFrom(buildPartial()); + } - public final int getNumber() { return value; } + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.internal_static_org_apache_hadoop_hive_metastore_hbase_FieldSchema_descriptor; + } - public static ResourceType valueOf(int value) { - switch (value) { - case 1: return JAR; - case 2: return FILE; - case 3: return ARCHIVE; - default: return null; - } - } + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema getDefaultInstanceForType() { + return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance(); + } - public static com.google.protobuf.Internal.EnumLiteMap<ResourceType> - internalGetValueMap() { - return internalValueMap; + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema build() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); } - private static com.google.protobuf.Internal.EnumLiteMap<ResourceType> - internalValueMap = - new com.google.protobuf.Internal.EnumLiteMap<ResourceType>() { - public ResourceType findValueByNumber(int number) { - return ResourceType.valueOf(number); - } - }; + return result; + } - public final com.google.protobuf.Descriptors.EnumValueDescriptor - getValueDescriptor() { - return getDescriptor().getValues().get(index); + public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema buildPartial() { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema result = new org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; } - public final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptorForType() { - return getDescriptor(); + result.name_ = name_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; } - public static final com.google.protobuf.Descriptors.EnumDescriptor - getDescriptor() { - return org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.getDescriptor().getEnumTypes().get(0); + result.type_ = type_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; } + result.comment_ = comment_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } - private static final ResourceType[] VALUES = values(); - - public static ResourceType valueOf( - com.google.protobuf.Descriptors.EnumValueDescriptor desc) { - if (desc.getType() != getDescriptor()) { - throw new java.lang.IllegalArgumentException( - "EnumValueDescriptor is not for this type."); - } - return VALUES[desc.getIndex()]; + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema) { + return mergeFrom((org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema)other); + } else { + super.mergeFrom(other); + return this; } + } - private final int index; - private final int value; - - private ResourceType(int index, int value) { - this.index = index; - this.value = value; + public Builder mergeFrom(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema other) { + if (other == org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema.getDefaultInstance()) return this; + if (other.hasName()) { + bitField0_ |= 0x00000001; + name_ = other.name_; + onChanged(); } - - // @@protoc_insertion_point(enum_scope:org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType) + if (other.hasType()) { + bitField0_ |= 0x00000002; + type_ = other.type_; + onChanged(); + } + if (other.hasComment()) { + bitField0_ |= 0x00000004; + comment_ = other.comment_; + onChanged(); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; } - private int bitField0_; - // required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1; - public static final int RESOURCE_TYPE_FIELD_NUMBER = 1; - private org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType resourceType_; - /** - * <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code> - */ - public boolean hasResourceType() { - return ((bitField0_ & 0x00000001) == 0x00000001); + public final boolean isInitialized() { + if (!hasName()) { + + return false; + } + if (!hasType()) { + + return false; + } + return true; } - /** - * <code>required .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri.ResourceType resource_type = 1;</code> - */ - public org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType getResourceType() { - return resourceType_; + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.FieldSchema) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; } + private int bitField0_; - // required string uri = 2; - public static final int URI_FIELD_NUMBER = 2; - private java.lang.Object uri_; + // required string name = 1; + private java.lang.Object name_ = ""; /** - * <code>required string uri = 2;</code> + * <code>required string name = 1;</code> */ - public boolean hasUri() { - return ((bitField0_ & 0x00000002) == 0x00000002); + public boolean hasName() { + return ((bitField0_ & 0x00000001) == 0x00000001); } /** - * <code>required string uri = 2;</code> + * <code>required string name = 1;</code> */ - public java.lang.String getUri() { - java.lang.Object ref = uri_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - if (bs.isValidUtf8()) { - uri_ = s; - } + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + name_ = s; return s; + } else { + return (java.lang.String) ref; } } /** - * <code>required string uri = 2;</code> + * <code>required string name = 1;</code> */ public com.google.protobuf.ByteString - getUriBytes() { - java.lang.Object ref = uri_; - if (ref instanceof java.lang.String) { + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8( (java.lang.String) ref); - uri_ = b; + name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } - - private void initFields() { - resourceType_ = org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri.ResourceType.JAR; - uri_ = ""; + /** + * <code>required string name = 1;</code> + */ + public Builder setName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; } - private byte memoizedIsInitialized = -1; - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized != -1) return isInitialized == 1; - - if (!hasResourceType()) { - memoizedIsInitialized = 0; - return false; - } - if (!hasUri()) { - memoizedIsInitialized = 0; - return false; - } - memoizedIsInitialized = 1; - return true; + /** + * <code>required string name = 1;</code> + */ + public Builder clearName() { + bitField0_ = (bitField0_ & ~0x00000001); + name_ = getDefaultInstance().getName(); + onChanged(); + return this; } - - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - getSerializedSize(); - if (((bitField0_ & 0x00000001) == 0x00000001)) { - output.writeEnum(1, resourceType_.getNumber()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - output.writeBytes(2, getUriBytes()); - } - getUnknownFields().writeTo(output); + /** + * <code>required string name = 1;</code> + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + name_ = value; + onChanged(); + return this; } - private int memoizedSerializedSize = -1; - public int getSerializedSize() { - int size = memoizedSerializedSize; - if (size != -1) return size; - - size = 0; - if (((bitField0_ & 0x00000001) == 0x00000001)) { - size += com.google.protobuf.CodedOutputStream - .computeEnumSize(1, resourceType_.getNumber()); - } - if (((bitField0_ & 0x00000002) == 0x00000002)) { - size += com.google.protobuf.CodedOutputStream - .computeBytesSize(2, getUriBytes()); - } - size += getUnknownFields().getSerializedSize(); - memoizedSerializedSize = size; - return size; + // required string type = 2; + private java.lang.Object type_ = ""; + /** + * <code>required string type = 2;</code> + */ + public boolean hasType() { + return ((bitField0_ & 0x00000002) == 0x00000002); } - - private static final long serialVersionUID = 0L; - @java.lang.Override - protected java.lang.Object writeReplace() - throws java.io.ObjectStreamException { - return super.writeReplace(); + /** + * <code>required string type = 2;</code> + */ + public java.lang.String getType() { + java.lang.Object ref = type_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + type_ = s; + return s; + } else { + return (java.lang.String) ref; + } } - - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + /** + * <code>required string type = 2;</code> + */ + public com.google.protobuf.ByteString + getTypeBytes() { + java.lang.Object ref = type_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + type_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + /** + * <code>required string type = 2;</code> + */ + public Builder setType( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + type_ = value; + onChanged(); + return this; } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); + /** + * <code>required string type = 2;</code> + */ + public Builder clearType() { + bitField0_ = (bitField0_ & ~0x00000002); + type_ = getDefaultInstance().getType(); + onChanged(); + return this; } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); + /** + * <code>required string type = 2;</code> + */ + public Builder setTypeBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + type_ = value; + onChanged(); + return this; } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); + + // optional string comment = 3; + private java.lang.Object comment_ = ""; + /** + * <code>optional string comment = 3;</code> + */ + public boolean hasComment() { + return ((bitField0_ & 0x00000004) == 0x00000004); } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + /** + * <code>optional string comment = 3;</code> + */ + public java.lang.String getComment() { + java.lang.Object ref = comment_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + comment_ = s; + return s; + } else { + return (java.lang.String) ref; + } } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input); + /** + * <code>optional string comment = 3;</code> + */ + public com.google.protobuf.ByteString + getCommentBytes() { + java.lang.Object ref = comment_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + comment_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseDelimitedFrom(input, extensionRegistry); + /** + * <code>optional string comment = 3;</code> + */ + public Builder setComment( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + comment_ = value; + onChanged(); + return this; } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return PARSER.parseFrom(input); + /** + * <code>optional string comment = 3;</code> + */ + public Builder clearComment() { + bitField0_ = (bitField0_ & ~0x00000004); + comment_ = getDefaultInstance().getComment(); + onChanged(); + return this; } - public static org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return PARSER.parseFrom(input, extensionRegistry); + /** + * <code>optional string comment = 3;</code> + */ + public Builder setCommentBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000004; + comment_ = value; + onChanged(); + return this; } - public static Builder newBuilder() { return Builder.create(); } - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder(org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri prototype) { - return newBuilder().mergeFrom(prototype); - } - public Builder toBuilder() { return newBuilder(this); } + // @@protoc_insertion_point(builder_scope:org.apache.hadoop.hive.metastore.hbase.FieldSchema) + } - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder<Builder> + static { + defaultInstance = new FieldSchema(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:org.apache.hadoop.hive.metastore.hbase.FieldSchema) + } + + public interface FunctionOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // optional string class_name = 1; + /** + * <code>optional string class_name = 1;</code> + */ + boolean hasClassName(); + /** + * <code>optional string class_name = 1;</code> + */ + java.lang.String getClassName(); + /** + * <code>optional string class_name = 1;</code> + */ + com.google.protobuf.ByteString + getClassNameBytes(); + + // optional string owner_name = 2; + /** + * <code>optional string owner_name = 2;</code> + */ + boolean hasOwnerName(); + /** + * <code>optional string owner_name = 2;</code> + */ + java.lang.String getOwnerName(); + /** + * <code>optional string owner_name = 2;</code> + */ + com.google.protobuf.ByteString + getOwnerNameBytes(); + + // optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3; + /** + * <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code> + */ + boolean hasOwnerType(); + /** + * <code>optional .org.apache.hadoop.hive.metastore.hbase.PrincipalType owner_type = 3;</code> + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.PrincipalType getOwnerType(); + + // optional sint64 create_time = 4; + /** + * <code>optional sint64 create_time = 4;</code> + */ + boolean hasCreateTime(); + /** + * <code>optional sint64 create_time = 4;</code> + */ + long getCreateTime(); + + // optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5; + /** + * <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code> + */ + boolean hasFunctionType(); + /** + * <code>optional .org.apache.hadoop.hive.metastore.hbase.Function.FunctionType function_type = 5;</code> + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.FunctionType getFunctionType(); + + // repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6; + /** + * <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code> + */ + java.util.List<org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri> + getResourceUrisList(); + /** + * <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code> + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUri getResourceUris(int index); + /** + * <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code> + */ + int getResourceUrisCount(); + /** + * <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code> + */ + java.util.List<? extends org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder> + getResourceUrisOrBuilderList(); + /** + * <code>repeated .org.apache.hadoop.hive.metastore.hbase.Function.ResourceUri resource_uris = 6;</code> + */ + org.apache.hadoop.hive.metastore.hbase.HbaseMetastoreProto.Function.ResourceUriOrBuilder getResourceUrisOrBuilder( + int index); + } + /** + * Protobuf type {@code org.apache.hadoop.hive.metastore.hbase.Function} + */ + public static final class Function extends + com.google.protobuf.GeneratedMessage + implements FunctionOrBuilder { + // Use Function.newBuilder() to construct. + private Function(com.google.protobuf.GeneratedMessage.Builder<?> builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private Function(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final Function defaultInstance; + public static Function getDefaultInstance() { + return defaultInstance; + } + + public Function getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + privat
<TRUNCATED>