http://git-wip-us.apache.org/repos/asf/hive/blob/12041d39/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
 
b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
index 8f3b848..8c5ceaf 100644
--- 
a/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
+++ 
b/standalone-metastore/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/ThriftHiveMetastore.java
@@ -402,6 +402,34 @@ import org.slf4j.LoggerFactory;
 
     public WMCreateOrDropTriggerToPoolMappingResponse 
create_or_drop_wm_trigger_to_pool_mapping(WMCreateOrDropTriggerToPoolMappingRequest
 request) throws AlreadyExistsException, NoSuchObjectException, 
InvalidObjectException, MetaException, org.apache.thrift.TException;
 
+    public void create_ischema(ISchema schema) throws AlreadyExistsException, 
NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+    public void alter_ischema(AlterISchemaRequest rqst) throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+    public ISchema get_ischema(ISchemaName name) throws NoSuchObjectException, 
MetaException, org.apache.thrift.TException;
+
+    public void drop_ischema(ISchemaName name) throws NoSuchObjectException, 
InvalidOperationException, MetaException, org.apache.thrift.TException;
+
+    public void add_schema_version(SchemaVersion schemaVersion) throws 
AlreadyExistsException, NoSuchObjectException, MetaException, 
org.apache.thrift.TException;
+
+    public SchemaVersion get_schema_version(SchemaVersionDescriptor 
schemaVersion) throws NoSuchObjectException, MetaException, 
org.apache.thrift.TException;
+
+    public SchemaVersion get_schema_latest_version(ISchemaName schemaName) 
throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+    public List<SchemaVersion> get_schema_all_versions(ISchemaName schemaName) 
throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+    public void drop_schema_version(SchemaVersionDescriptor schemaVersion) 
throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+    public FindSchemasByColsResp get_schemas_by_cols(FindSchemasByColsRqst 
rqst) throws MetaException, org.apache.thrift.TException;
+
+    public void map_schema_version_to_serde(MapSchemaVersionToSerdeRequest 
rqst) throws NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
+    public void set_schema_version_state(SetSchemaVersionStateRequest rqst) 
throws NoSuchObjectException, InvalidOperationException, MetaException, 
org.apache.thrift.TException;
+
+    public void add_serde(SerDeInfo serde) throws AlreadyExistsException, 
MetaException, org.apache.thrift.TException;
+
+    public SerDeInfo get_serde(GetSerdeRequest rqst) throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException;
+
   }
 
   @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public interface 
AsyncIface extends com.facebook.fb303.FacebookService .AsyncIface {
@@ -766,6 +794,34 @@ import org.slf4j.LoggerFactory;
 
     public void 
create_or_drop_wm_trigger_to_pool_mapping(WMCreateOrDropTriggerToPoolMappingRequest
 request, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
 
+    public void create_ischema(ISchema schema, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void alter_ischema(AlterISchemaRequest rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void get_ischema(ISchemaName name, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void drop_ischema(ISchemaName name, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void add_schema_version(SchemaVersion schemaVersion, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void get_schema_version(SchemaVersionDescriptor schemaVersion, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void get_schema_latest_version(ISchemaName schemaName, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void get_schema_all_versions(ISchemaName schemaName, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void drop_schema_version(SchemaVersionDescriptor schemaVersion, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void get_schemas_by_cols(FindSchemasByColsRqst rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void map_schema_version_to_serde(MapSchemaVersionToSerdeRequest 
rqst, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void set_schema_version_state(SetSchemaVersionStateRequest rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void add_serde(SerDeInfo serde, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
+    public void get_serde(GetSerdeRequest rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException;
+
   }
 
   @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
Client extends com.facebook.fb303.FacebookService.Client implements Iface {
@@ -5994,6 +6050,397 @@ import org.slf4j.LoggerFactory;
       throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "create_or_drop_wm_trigger_to_pool_mapping failed: unknown result");
     }
 
+    public void create_ischema(ISchema schema) throws AlreadyExistsException, 
NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      send_create_ischema(schema);
+      recv_create_ischema();
+    }
+
+    public void send_create_ischema(ISchema schema) throws 
org.apache.thrift.TException
+    {
+      create_ischema_args args = new create_ischema_args();
+      args.setSchema(schema);
+      sendBase("create_ischema", args);
+    }
+
+    public void recv_create_ischema() throws AlreadyExistsException, 
NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      create_ischema_result result = new create_ischema_result();
+      receiveBase(result, "create_ischema");
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      if (result.o3 != null) {
+        throw result.o3;
+      }
+      return;
+    }
+
+    public void alter_ischema(AlterISchemaRequest rqst) throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      send_alter_ischema(rqst);
+      recv_alter_ischema();
+    }
+
+    public void send_alter_ischema(AlterISchemaRequest rqst) throws 
org.apache.thrift.TException
+    {
+      alter_ischema_args args = new alter_ischema_args();
+      args.setRqst(rqst);
+      sendBase("alter_ischema", args);
+    }
+
+    public void recv_alter_ischema() throws NoSuchObjectException, 
MetaException, org.apache.thrift.TException
+    {
+      alter_ischema_result result = new alter_ischema_result();
+      receiveBase(result, "alter_ischema");
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      return;
+    }
+
+    public ISchema get_ischema(ISchemaName name) throws NoSuchObjectException, 
MetaException, org.apache.thrift.TException
+    {
+      send_get_ischema(name);
+      return recv_get_ischema();
+    }
+
+    public void send_get_ischema(ISchemaName name) throws 
org.apache.thrift.TException
+    {
+      get_ischema_args args = new get_ischema_args();
+      args.setName(name);
+      sendBase("get_ischema", args);
+    }
+
+    public ISchema recv_get_ischema() throws NoSuchObjectException, 
MetaException, org.apache.thrift.TException
+    {
+      get_ischema_result result = new get_ischema_result();
+      receiveBase(result, "get_ischema");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "get_ischema failed: unknown result");
+    }
+
+    public void drop_ischema(ISchemaName name) throws NoSuchObjectException, 
InvalidOperationException, MetaException, org.apache.thrift.TException
+    {
+      send_drop_ischema(name);
+      recv_drop_ischema();
+    }
+
+    public void send_drop_ischema(ISchemaName name) throws 
org.apache.thrift.TException
+    {
+      drop_ischema_args args = new drop_ischema_args();
+      args.setName(name);
+      sendBase("drop_ischema", args);
+    }
+
+    public void recv_drop_ischema() throws NoSuchObjectException, 
InvalidOperationException, MetaException, org.apache.thrift.TException
+    {
+      drop_ischema_result result = new drop_ischema_result();
+      receiveBase(result, "drop_ischema");
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      if (result.o3 != null) {
+        throw result.o3;
+      }
+      return;
+    }
+
+    public void add_schema_version(SchemaVersion schemaVersion) throws 
AlreadyExistsException, NoSuchObjectException, MetaException, 
org.apache.thrift.TException
+    {
+      send_add_schema_version(schemaVersion);
+      recv_add_schema_version();
+    }
+
+    public void send_add_schema_version(SchemaVersion schemaVersion) throws 
org.apache.thrift.TException
+    {
+      add_schema_version_args args = new add_schema_version_args();
+      args.setSchemaVersion(schemaVersion);
+      sendBase("add_schema_version", args);
+    }
+
+    public void recv_add_schema_version() throws AlreadyExistsException, 
NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      add_schema_version_result result = new add_schema_version_result();
+      receiveBase(result, "add_schema_version");
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      if (result.o3 != null) {
+        throw result.o3;
+      }
+      return;
+    }
+
+    public SchemaVersion get_schema_version(SchemaVersionDescriptor 
schemaVersion) throws NoSuchObjectException, MetaException, 
org.apache.thrift.TException
+    {
+      send_get_schema_version(schemaVersion);
+      return recv_get_schema_version();
+    }
+
+    public void send_get_schema_version(SchemaVersionDescriptor schemaVersion) 
throws org.apache.thrift.TException
+    {
+      get_schema_version_args args = new get_schema_version_args();
+      args.setSchemaVersion(schemaVersion);
+      sendBase("get_schema_version", args);
+    }
+
+    public SchemaVersion recv_get_schema_version() throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      get_schema_version_result result = new get_schema_version_result();
+      receiveBase(result, "get_schema_version");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "get_schema_version failed: unknown result");
+    }
+
+    public SchemaVersion get_schema_latest_version(ISchemaName schemaName) 
throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      send_get_schema_latest_version(schemaName);
+      return recv_get_schema_latest_version();
+    }
+
+    public void send_get_schema_latest_version(ISchemaName schemaName) throws 
org.apache.thrift.TException
+    {
+      get_schema_latest_version_args args = new 
get_schema_latest_version_args();
+      args.setSchemaName(schemaName);
+      sendBase("get_schema_latest_version", args);
+    }
+
+    public SchemaVersion recv_get_schema_latest_version() throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      get_schema_latest_version_result result = new 
get_schema_latest_version_result();
+      receiveBase(result, "get_schema_latest_version");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "get_schema_latest_version failed: unknown result");
+    }
+
+    public List<SchemaVersion> get_schema_all_versions(ISchemaName schemaName) 
throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      send_get_schema_all_versions(schemaName);
+      return recv_get_schema_all_versions();
+    }
+
+    public void send_get_schema_all_versions(ISchemaName schemaName) throws 
org.apache.thrift.TException
+    {
+      get_schema_all_versions_args args = new get_schema_all_versions_args();
+      args.setSchemaName(schemaName);
+      sendBase("get_schema_all_versions", args);
+    }
+
+    public List<SchemaVersion> recv_get_schema_all_versions() throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      get_schema_all_versions_result result = new 
get_schema_all_versions_result();
+      receiveBase(result, "get_schema_all_versions");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "get_schema_all_versions failed: unknown result");
+    }
+
+    public void drop_schema_version(SchemaVersionDescriptor schemaVersion) 
throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      send_drop_schema_version(schemaVersion);
+      recv_drop_schema_version();
+    }
+
+    public void send_drop_schema_version(SchemaVersionDescriptor 
schemaVersion) throws org.apache.thrift.TException
+    {
+      drop_schema_version_args args = new drop_schema_version_args();
+      args.setSchemaVersion(schemaVersion);
+      sendBase("drop_schema_version", args);
+    }
+
+    public void recv_drop_schema_version() throws NoSuchObjectException, 
MetaException, org.apache.thrift.TException
+    {
+      drop_schema_version_result result = new drop_schema_version_result();
+      receiveBase(result, "drop_schema_version");
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      return;
+    }
+
+    public FindSchemasByColsResp get_schemas_by_cols(FindSchemasByColsRqst 
rqst) throws MetaException, org.apache.thrift.TException
+    {
+      send_get_schemas_by_cols(rqst);
+      return recv_get_schemas_by_cols();
+    }
+
+    public void send_get_schemas_by_cols(FindSchemasByColsRqst rqst) throws 
org.apache.thrift.TException
+    {
+      get_schemas_by_cols_args args = new get_schemas_by_cols_args();
+      args.setRqst(rqst);
+      sendBase("get_schemas_by_cols", args);
+    }
+
+    public FindSchemasByColsResp recv_get_schemas_by_cols() throws 
MetaException, org.apache.thrift.TException
+    {
+      get_schemas_by_cols_result result = new get_schemas_by_cols_result();
+      receiveBase(result, "get_schemas_by_cols");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "get_schemas_by_cols failed: unknown result");
+    }
+
+    public void map_schema_version_to_serde(MapSchemaVersionToSerdeRequest 
rqst) throws NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      send_map_schema_version_to_serde(rqst);
+      recv_map_schema_version_to_serde();
+    }
+
+    public void 
send_map_schema_version_to_serde(MapSchemaVersionToSerdeRequest rqst) throws 
org.apache.thrift.TException
+    {
+      map_schema_version_to_serde_args args = new 
map_schema_version_to_serde_args();
+      args.setRqst(rqst);
+      sendBase("map_schema_version_to_serde", args);
+    }
+
+    public void recv_map_schema_version_to_serde() throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      map_schema_version_to_serde_result result = new 
map_schema_version_to_serde_result();
+      receiveBase(result, "map_schema_version_to_serde");
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      return;
+    }
+
+    public void set_schema_version_state(SetSchemaVersionStateRequest rqst) 
throws NoSuchObjectException, InvalidOperationException, MetaException, 
org.apache.thrift.TException
+    {
+      send_set_schema_version_state(rqst);
+      recv_set_schema_version_state();
+    }
+
+    public void send_set_schema_version_state(SetSchemaVersionStateRequest 
rqst) throws org.apache.thrift.TException
+    {
+      set_schema_version_state_args args = new set_schema_version_state_args();
+      args.setRqst(rqst);
+      sendBase("set_schema_version_state", args);
+    }
+
+    public void recv_set_schema_version_state() throws NoSuchObjectException, 
InvalidOperationException, MetaException, org.apache.thrift.TException
+    {
+      set_schema_version_state_result result = new 
set_schema_version_state_result();
+      receiveBase(result, "set_schema_version_state");
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      if (result.o3 != null) {
+        throw result.o3;
+      }
+      return;
+    }
+
+    public void add_serde(SerDeInfo serde) throws AlreadyExistsException, 
MetaException, org.apache.thrift.TException
+    {
+      send_add_serde(serde);
+      recv_add_serde();
+    }
+
+    public void send_add_serde(SerDeInfo serde) throws 
org.apache.thrift.TException
+    {
+      add_serde_args args = new add_serde_args();
+      args.setSerde(serde);
+      sendBase("add_serde", args);
+    }
+
+    public void recv_add_serde() throws AlreadyExistsException, MetaException, 
org.apache.thrift.TException
+    {
+      add_serde_result result = new add_serde_result();
+      receiveBase(result, "add_serde");
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      return;
+    }
+
+    public SerDeInfo get_serde(GetSerdeRequest rqst) throws 
NoSuchObjectException, MetaException, org.apache.thrift.TException
+    {
+      send_get_serde(rqst);
+      return recv_get_serde();
+    }
+
+    public void send_get_serde(GetSerdeRequest rqst) throws 
org.apache.thrift.TException
+    {
+      get_serde_args args = new get_serde_args();
+      args.setRqst(rqst);
+      sendBase("get_serde", args);
+    }
+
+    public SerDeInfo recv_get_serde() throws NoSuchObjectException, 
MetaException, org.apache.thrift.TException
+    {
+      get_serde_result result = new get_serde_result();
+      receiveBase(result, "get_serde");
+      if (result.isSetSuccess()) {
+        return result.success;
+      }
+      if (result.o1 != null) {
+        throw result.o1;
+      }
+      if (result.o2 != null) {
+        throw result.o2;
+      }
+      throw new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT,
 "get_serde failed: unknown result");
+    }
+
   }
   @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
AsyncClient extends com.facebook.fb303.FacebookService.AsyncClient implements 
AsyncIface {
     @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
@@ -12243,6 +12690,454 @@ import org.slf4j.LoggerFactory;
       }
     }
 
+    public void create_ischema(ISchema schema, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      create_ischema_call method_call = new create_ischema_call(schema, 
resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
create_ischema_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private ISchema schema;
+      public create_ischema_call(ISchema schema, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.schema = schema;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("create_ischema", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        create_ischema_args args = new create_ischema_args();
+        args.setSchema(schema);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws AlreadyExistsException, 
NoSuchObjectException, MetaException, org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_create_ischema();
+      }
+    }
+
+    public void alter_ischema(AlterISchemaRequest rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      alter_ischema_call method_call = new alter_ischema_call(rqst, 
resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
alter_ischema_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private AlterISchemaRequest rqst;
+      public alter_ischema_call(AlterISchemaRequest rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.rqst = rqst;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("alter_ischema", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        alter_ischema_args args = new alter_ischema_args();
+        args.setRqst(rqst);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws NoSuchObjectException, MetaException, 
org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_alter_ischema();
+      }
+    }
+
+    public void get_ischema(ISchemaName name, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      get_ischema_call method_call = new get_ischema_call(name, resultHandler, 
this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_ischema_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private ISchemaName name;
+      public get_ischema_call(ISchemaName name, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.name = name;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("get_ischema", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        get_ischema_args args = new get_ischema_args();
+        args.setName(name);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public ISchema getResult() throws NoSuchObjectException, MetaException, 
org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_get_ischema();
+      }
+    }
+
+    public void drop_ischema(ISchemaName name, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      drop_ischema_call method_call = new drop_ischema_call(name, 
resultHandler, this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
drop_ischema_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private ISchemaName name;
+      public drop_ischema_call(ISchemaName name, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.name = name;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("drop_ischema", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        drop_ischema_args args = new drop_ischema_args();
+        args.setName(name);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws NoSuchObjectException, 
InvalidOperationException, MetaException, org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_drop_ischema();
+      }
+    }
+
+    public void add_schema_version(SchemaVersion schemaVersion, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      add_schema_version_call method_call = new 
add_schema_version_call(schemaVersion, resultHandler, this, ___protocolFactory, 
___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
add_schema_version_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private SchemaVersion schemaVersion;
+      public add_schema_version_call(SchemaVersion schemaVersion, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.schemaVersion = schemaVersion;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("add_schema_version", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        add_schema_version_args args = new add_schema_version_args();
+        args.setSchemaVersion(schemaVersion);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws AlreadyExistsException, 
NoSuchObjectException, MetaException, org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_add_schema_version();
+      }
+    }
+
+    public void get_schema_version(SchemaVersionDescriptor schemaVersion, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      get_schema_version_call method_call = new 
get_schema_version_call(schemaVersion, resultHandler, this, ___protocolFactory, 
___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_schema_version_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private SchemaVersionDescriptor schemaVersion;
+      public get_schema_version_call(SchemaVersionDescriptor schemaVersion, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.schemaVersion = schemaVersion;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("get_schema_version", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        get_schema_version_args args = new get_schema_version_args();
+        args.setSchemaVersion(schemaVersion);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public SchemaVersion getResult() throws NoSuchObjectException, 
MetaException, org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_get_schema_version();
+      }
+    }
+
+    public void get_schema_latest_version(ISchemaName schemaName, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      get_schema_latest_version_call method_call = new 
get_schema_latest_version_call(schemaName, resultHandler, this, 
___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_schema_latest_version_call extends org.apache.thrift.async.TAsyncMethodCall 
{
+      private ISchemaName schemaName;
+      public get_schema_latest_version_call(ISchemaName schemaName, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.schemaName = schemaName;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("get_schema_latest_version", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        get_schema_latest_version_args args = new 
get_schema_latest_version_args();
+        args.setSchemaName(schemaName);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public SchemaVersion getResult() throws NoSuchObjectException, 
MetaException, org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_get_schema_latest_version();
+      }
+    }
+
+    public void get_schema_all_versions(ISchemaName schemaName, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      get_schema_all_versions_call method_call = new 
get_schema_all_versions_call(schemaName, resultHandler, this, 
___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_schema_all_versions_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private ISchemaName schemaName;
+      public get_schema_all_versions_call(ISchemaName schemaName, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.schemaName = schemaName;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("get_schema_all_versions", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        get_schema_all_versions_args args = new get_schema_all_versions_args();
+        args.setSchemaName(schemaName);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public List<SchemaVersion> getResult() throws NoSuchObjectException, 
MetaException, org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_get_schema_all_versions();
+      }
+    }
+
+    public void drop_schema_version(SchemaVersionDescriptor schemaVersion, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      drop_schema_version_call method_call = new 
drop_schema_version_call(schemaVersion, resultHandler, this, 
___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
drop_schema_version_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private SchemaVersionDescriptor schemaVersion;
+      public drop_schema_version_call(SchemaVersionDescriptor schemaVersion, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.schemaVersion = schemaVersion;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("drop_schema_version", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        drop_schema_version_args args = new drop_schema_version_args();
+        args.setSchemaVersion(schemaVersion);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws NoSuchObjectException, MetaException, 
org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_drop_schema_version();
+      }
+    }
+
+    public void get_schemas_by_cols(FindSchemasByColsRqst rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      get_schemas_by_cols_call method_call = new 
get_schemas_by_cols_call(rqst, resultHandler, this, ___protocolFactory, 
___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_schemas_by_cols_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private FindSchemasByColsRqst rqst;
+      public get_schemas_by_cols_call(FindSchemasByColsRqst rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.rqst = rqst;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("get_schemas_by_cols", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        get_schemas_by_cols_args args = new get_schemas_by_cols_args();
+        args.setRqst(rqst);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public FindSchemasByColsResp getResult() throws MetaException, 
org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_get_schemas_by_cols();
+      }
+    }
+
+    public void map_schema_version_to_serde(MapSchemaVersionToSerdeRequest 
rqst, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      map_schema_version_to_serde_call method_call = new 
map_schema_version_to_serde_call(rqst, resultHandler, this, ___protocolFactory, 
___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
map_schema_version_to_serde_call extends 
org.apache.thrift.async.TAsyncMethodCall {
+      private MapSchemaVersionToSerdeRequest rqst;
+      public map_schema_version_to_serde_call(MapSchemaVersionToSerdeRequest 
rqst, org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.rqst = rqst;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("map_schema_version_to_serde", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        map_schema_version_to_serde_args args = new 
map_schema_version_to_serde_args();
+        args.setRqst(rqst);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws NoSuchObjectException, MetaException, 
org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_map_schema_version_to_serde();
+      }
+    }
+
+    public void set_schema_version_state(SetSchemaVersionStateRequest rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      set_schema_version_state_call method_call = new 
set_schema_version_state_call(rqst, resultHandler, this, ___protocolFactory, 
___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
set_schema_version_state_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private SetSchemaVersionStateRequest rqst;
+      public set_schema_version_state_call(SetSchemaVersionStateRequest rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.rqst = rqst;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("set_schema_version_state", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        set_schema_version_state_args args = new 
set_schema_version_state_args();
+        args.setRqst(rqst);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws NoSuchObjectException, 
InvalidOperationException, MetaException, org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_set_schema_version_state();
+      }
+    }
+
+    public void add_serde(SerDeInfo serde, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      add_serde_call method_call = new add_serde_call(serde, resultHandler, 
this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
add_serde_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private SerDeInfo serde;
+      public add_serde_call(SerDeInfo serde, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.serde = serde;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("add_serde", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        add_serde_args args = new add_serde_args();
+        args.setSerde(serde);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public void getResult() throws AlreadyExistsException, MetaException, 
org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        (new Client(prot)).recv_add_serde();
+      }
+    }
+
+    public void get_serde(GetSerdeRequest rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler) throws 
org.apache.thrift.TException {
+      checkReady();
+      get_serde_call method_call = new get_serde_call(rqst, resultHandler, 
this, ___protocolFactory, ___transport);
+      this.___currentMethod = method_call;
+      ___manager.call(method_call);
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_serde_call extends org.apache.thrift.async.TAsyncMethodCall {
+      private GetSerdeRequest rqst;
+      public get_serde_call(GetSerdeRequest rqst, 
org.apache.thrift.async.AsyncMethodCallback resultHandler, 
org.apache.thrift.async.TAsyncClient client, 
org.apache.thrift.protocol.TProtocolFactory protocolFactory, 
org.apache.thrift.transport.TNonblockingTransport transport) throws 
org.apache.thrift.TException {
+        super(client, protocolFactory, transport, resultHandler, false);
+        this.rqst = rqst;
+      }
+
+      public void write_args(org.apache.thrift.protocol.TProtocol prot) throws 
org.apache.thrift.TException {
+        prot.writeMessageBegin(new 
org.apache.thrift.protocol.TMessage("get_serde", 
org.apache.thrift.protocol.TMessageType.CALL, 0));
+        get_serde_args args = new get_serde_args();
+        args.setRqst(rqst);
+        args.write(prot);
+        prot.writeMessageEnd();
+      }
+
+      public SerDeInfo getResult() throws NoSuchObjectException, 
MetaException, org.apache.thrift.TException {
+        if (getState() != 
org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+          throw new IllegalStateException("Method call not finished!");
+        }
+        org.apache.thrift.transport.TMemoryInputTransport memoryTransport = 
new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+        org.apache.thrift.protocol.TProtocol prot = 
client.getProtocolFactory().getProtocol(memoryTransport);
+        return (new Client(prot)).recv_get_serde();
+      }
+    }
+
   }
 
   @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
Processor<I extends Iface> extends 
com.facebook.fb303.FacebookService.Processor<I> implements 
org.apache.thrift.TProcessor {
@@ -12436,6 +13331,20 @@ import org.slf4j.LoggerFactory;
       processMap.put("create_or_update_wm_mapping", new 
create_or_update_wm_mapping());
       processMap.put("drop_wm_mapping", new drop_wm_mapping());
       processMap.put("create_or_drop_wm_trigger_to_pool_mapping", new 
create_or_drop_wm_trigger_to_pool_mapping());
+      processMap.put("create_ischema", new create_ischema());
+      processMap.put("alter_ischema", new alter_ischema());
+      processMap.put("get_ischema", new get_ischema());
+      processMap.put("drop_ischema", new drop_ischema());
+      processMap.put("add_schema_version", new add_schema_version());
+      processMap.put("get_schema_version", new get_schema_version());
+      processMap.put("get_schema_latest_version", new 
get_schema_latest_version());
+      processMap.put("get_schema_all_versions", new get_schema_all_versions());
+      processMap.put("drop_schema_version", new drop_schema_version());
+      processMap.put("get_schemas_by_cols", new get_schemas_by_cols());
+      processMap.put("map_schema_version_to_serde", new 
map_schema_version_to_serde());
+      processMap.put("set_schema_version_state", new 
set_schema_version_state());
+      processMap.put("add_serde", new add_serde());
+      processMap.put("get_serde", new get_serde());
       return processMap;
     }
 
@@ -17060,6 +17969,376 @@ import org.slf4j.LoggerFactory;
       }
     }
 
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
create_ischema<I extends Iface> extends org.apache.thrift.ProcessFunction<I, 
create_ischema_args> {
+      public create_ischema() {
+        super("create_ischema");
+      }
+
+      public create_ischema_args getEmptyArgsInstance() {
+        return new create_ischema_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public create_ischema_result getResult(I iface, create_ischema_args 
args) throws org.apache.thrift.TException {
+        create_ischema_result result = new create_ischema_result();
+        try {
+          iface.create_ischema(args.schema);
+        } catch (AlreadyExistsException o1) {
+          result.o1 = o1;
+        } catch (NoSuchObjectException o2) {
+          result.o2 = o2;
+        } catch (MetaException o3) {
+          result.o3 = o3;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
alter_ischema<I extends Iface> extends org.apache.thrift.ProcessFunction<I, 
alter_ischema_args> {
+      public alter_ischema() {
+        super("alter_ischema");
+      }
+
+      public alter_ischema_args getEmptyArgsInstance() {
+        return new alter_ischema_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public alter_ischema_result getResult(I iface, alter_ischema_args args) 
throws org.apache.thrift.TException {
+        alter_ischema_result result = new alter_ischema_result();
+        try {
+          iface.alter_ischema(args.rqst);
+        } catch (NoSuchObjectException o1) {
+          result.o1 = o1;
+        } catch (MetaException o2) {
+          result.o2 = o2;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_ischema<I extends Iface> extends org.apache.thrift.ProcessFunction<I, 
get_ischema_args> {
+      public get_ischema() {
+        super("get_ischema");
+      }
+
+      public get_ischema_args getEmptyArgsInstance() {
+        return new get_ischema_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public get_ischema_result getResult(I iface, get_ischema_args args) 
throws org.apache.thrift.TException {
+        get_ischema_result result = new get_ischema_result();
+        try {
+          result.success = iface.get_ischema(args.name);
+        } catch (NoSuchObjectException o1) {
+          result.o1 = o1;
+        } catch (MetaException o2) {
+          result.o2 = o2;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
drop_ischema<I extends Iface> extends org.apache.thrift.ProcessFunction<I, 
drop_ischema_args> {
+      public drop_ischema() {
+        super("drop_ischema");
+      }
+
+      public drop_ischema_args getEmptyArgsInstance() {
+        return new drop_ischema_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public drop_ischema_result getResult(I iface, drop_ischema_args args) 
throws org.apache.thrift.TException {
+        drop_ischema_result result = new drop_ischema_result();
+        try {
+          iface.drop_ischema(args.name);
+        } catch (NoSuchObjectException o1) {
+          result.o1 = o1;
+        } catch (InvalidOperationException o2) {
+          result.o2 = o2;
+        } catch (MetaException o3) {
+          result.o3 = o3;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
add_schema_version<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, add_schema_version_args> {
+      public add_schema_version() {
+        super("add_schema_version");
+      }
+
+      public add_schema_version_args getEmptyArgsInstance() {
+        return new add_schema_version_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public add_schema_version_result getResult(I iface, 
add_schema_version_args args) throws org.apache.thrift.TException {
+        add_schema_version_result result = new add_schema_version_result();
+        try {
+          iface.add_schema_version(args.schemaVersion);
+        } catch (AlreadyExistsException o1) {
+          result.o1 = o1;
+        } catch (NoSuchObjectException o2) {
+          result.o2 = o2;
+        } catch (MetaException o3) {
+          result.o3 = o3;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_schema_version<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, get_schema_version_args> {
+      public get_schema_version() {
+        super("get_schema_version");
+      }
+
+      public get_schema_version_args getEmptyArgsInstance() {
+        return new get_schema_version_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public get_schema_version_result getResult(I iface, 
get_schema_version_args args) throws org.apache.thrift.TException {
+        get_schema_version_result result = new get_schema_version_result();
+        try {
+          result.success = iface.get_schema_version(args.schemaVersion);
+        } catch (NoSuchObjectException o1) {
+          result.o1 = o1;
+        } catch (MetaException o2) {
+          result.o2 = o2;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_schema_latest_version<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, get_schema_latest_version_args> {
+      public get_schema_latest_version() {
+        super("get_schema_latest_version");
+      }
+
+      public get_schema_latest_version_args getEmptyArgsInstance() {
+        return new get_schema_latest_version_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public get_schema_latest_version_result getResult(I iface, 
get_schema_latest_version_args args) throws org.apache.thrift.TException {
+        get_schema_latest_version_result result = new 
get_schema_latest_version_result();
+        try {
+          result.success = iface.get_schema_latest_version(args.schemaName);
+        } catch (NoSuchObjectException o1) {
+          result.o1 = o1;
+        } catch (MetaException o2) {
+          result.o2 = o2;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_schema_all_versions<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, get_schema_all_versions_args> {
+      public get_schema_all_versions() {
+        super("get_schema_all_versions");
+      }
+
+      public get_schema_all_versions_args getEmptyArgsInstance() {
+        return new get_schema_all_versions_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public get_schema_all_versions_result getResult(I iface, 
get_schema_all_versions_args args) throws org.apache.thrift.TException {
+        get_schema_all_versions_result result = new 
get_schema_all_versions_result();
+        try {
+          result.success = iface.get_schema_all_versions(args.schemaName);
+        } catch (NoSuchObjectException o1) {
+          result.o1 = o1;
+        } catch (MetaException o2) {
+          result.o2 = o2;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
drop_schema_version<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, drop_schema_version_args> {
+      public drop_schema_version() {
+        super("drop_schema_version");
+      }
+
+      public drop_schema_version_args getEmptyArgsInstance() {
+        return new drop_schema_version_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public drop_schema_version_result getResult(I iface, 
drop_schema_version_args args) throws org.apache.thrift.TException {
+        drop_schema_version_result result = new drop_schema_version_result();
+        try {
+          iface.drop_schema_version(args.schemaVersion);
+        } catch (NoSuchObjectException o1) {
+          result.o1 = o1;
+        } catch (MetaException o2) {
+          result.o2 = o2;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_schemas_by_cols<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, get_schemas_by_cols_args> {
+      public get_schemas_by_cols() {
+        super("get_schemas_by_cols");
+      }
+
+      public get_schemas_by_cols_args getEmptyArgsInstance() {
+        return new get_schemas_by_cols_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public get_schemas_by_cols_result getResult(I iface, 
get_schemas_by_cols_args args) throws org.apache.thrift.TException {
+        get_schemas_by_cols_result result = new get_schemas_by_cols_result();
+        try {
+          result.success = iface.get_schemas_by_cols(args.rqst);
+        } catch (MetaException o1) {
+          result.o1 = o1;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
map_schema_version_to_serde<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, map_schema_version_to_serde_args> {
+      public map_schema_version_to_serde() {
+        super("map_schema_version_to_serde");
+      }
+
+      public map_schema_version_to_serde_args getEmptyArgsInstance() {
+        return new map_schema_version_to_serde_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public map_schema_version_to_serde_result getResult(I iface, 
map_schema_version_to_serde_args args) throws org.apache.thrift.TException {
+        map_schema_version_to_serde_result result = new 
map_schema_version_to_serde_result();
+        try {
+          iface.map_schema_version_to_serde(args.rqst);
+        } catch (NoSuchObjectException o1) {
+          result.o1 = o1;
+        } catch (MetaException o2) {
+          result.o2 = o2;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
set_schema_version_state<I extends Iface> extends 
org.apache.thrift.ProcessFunction<I, set_schema_version_state_args> {
+      public set_schema_version_state() {
+        super("set_schema_version_state");
+      }
+
+      public set_schema_version_state_args getEmptyArgsInstance() {
+        return new set_schema_version_state_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public set_schema_version_state_result getResult(I iface, 
set_schema_version_state_args args) throws org.apache.thrift.TException {
+        set_schema_version_state_result result = new 
set_schema_version_state_result();
+        try {
+          iface.set_schema_version_state(args.rqst);
+        } catch (NoSuchObjectException o1) {
+          result.o1 = o1;
+        } catch (InvalidOperationException o2) {
+          result.o2 = o2;
+        } catch (MetaException o3) {
+          result.o3 = o3;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
add_serde<I extends Iface> extends org.apache.thrift.ProcessFunction<I, 
add_serde_args> {
+      public add_serde() {
+        super("add_serde");
+      }
+
+      public add_serde_args getEmptyArgsInstance() {
+        return new add_serde_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public add_serde_result getResult(I iface, add_serde_args args) throws 
org.apache.thrift.TException {
+        add_serde_result result = new add_serde_result();
+        try {
+          iface.add_serde(args.serde);
+        } catch (AlreadyExistsException o1) {
+          result.o1 = o1;
+        } catch (MetaException o2) {
+          result.o2 = o2;
+        }
+        return result;
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_serde<I extends Iface> extends org.apache.thrift.ProcessFunction<I, 
get_serde_args> {
+      public get_serde() {
+        super("get_serde");
+      }
+
+      public get_serde_args getEmptyArgsInstance() {
+        return new get_serde_args();
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public get_serde_result getResult(I iface, get_serde_args args) throws 
org.apache.thrift.TException {
+        get_serde_result result = new get_serde_result();
+        try {
+          result.success = iface.get_serde(args.rqst);
+        } catch (NoSuchObjectException o1) {
+          result.o1 = o1;
+        } catch (MetaException o2) {
+          result.o2 = o2;
+        }
+        return result;
+      }
+    }
+
   }
 
   @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
AsyncProcessor<I extends AsyncIface> extends 
com.facebook.fb303.FacebookService.AsyncProcessor<I> {
@@ -17253,6 +18532,20 @@ import org.slf4j.LoggerFactory;
       processMap.put("create_or_update_wm_mapping", new 
create_or_update_wm_mapping());
       processMap.put("drop_wm_mapping", new drop_wm_mapping());
       processMap.put("create_or_drop_wm_trigger_to_pool_mapping", new 
create_or_drop_wm_trigger_to_pool_mapping());
+      processMap.put("create_ischema", new create_ischema());
+      processMap.put("alter_ischema", new alter_ischema());
+      processMap.put("get_ischema", new get_ischema());
+      processMap.put("drop_ischema", new drop_ischema());
+      processMap.put("add_schema_version", new add_schema_version());
+      processMap.put("get_schema_version", new get_schema_version());
+      processMap.put("get_schema_latest_version", new 
get_schema_latest_version());
+      processMap.put("get_schema_all_versions", new get_schema_all_versions());
+      processMap.put("drop_schema_version", new drop_schema_version());
+      processMap.put("get_schemas_by_cols", new get_schemas_by_cols());
+      processMap.put("map_schema_version_to_serde", new 
map_schema_version_to_serde());
+      processMap.put("set_schema_version_state", new 
set_schema_version_state());
+      processMap.put("add_serde", new add_serde());
+      processMap.put("get_serde", new get_serde());
       return processMap;
     }
 
@@ -28289,6 +29582,881 @@ import org.slf4j.LoggerFactory;
       }
     }
 
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
create_ischema<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, create_ischema_args, Void> {
+      public create_ischema() {
+        super("create_ischema");
+      }
+
+      public create_ischema_args getEmptyArgsInstance() {
+        return new create_ischema_args();
+      }
+
+      public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer 
fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<Void>() { 
+          public void onComplete(Void o) {
+            create_ischema_result result = new create_ischema_result();
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            create_ischema_result result = new create_ischema_result();
+            if (e instanceof AlreadyExistsException) {
+                        result.o1 = (AlreadyExistsException) e;
+                        result.setO1IsSet(true);
+                        msg = result;
+            }
+            else             if (e instanceof NoSuchObjectException) {
+                        result.o2 = (NoSuchObjectException) e;
+                        result.setO2IsSet(true);
+                        msg = result;
+            }
+            else             if (e instanceof MetaException) {
+                        result.o3 = (MetaException) e;
+                        result.setO3IsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, create_ischema_args args, 
org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws 
TException {
+        iface.create_ischema(args.schema,resultHandler);
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
alter_ischema<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, alter_ischema_args, Void> {
+      public alter_ischema() {
+        super("alter_ischema");
+      }
+
+      public alter_ischema_args getEmptyArgsInstance() {
+        return new alter_ischema_args();
+      }
+
+      public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer 
fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<Void>() { 
+          public void onComplete(Void o) {
+            alter_ischema_result result = new alter_ischema_result();
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            alter_ischema_result result = new alter_ischema_result();
+            if (e instanceof NoSuchObjectException) {
+                        result.o1 = (NoSuchObjectException) e;
+                        result.setO1IsSet(true);
+                        msg = result;
+            }
+            else             if (e instanceof MetaException) {
+                        result.o2 = (MetaException) e;
+                        result.setO2IsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, alter_ischema_args args, 
org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws 
TException {
+        iface.alter_ischema(args.rqst,resultHandler);
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_ischema<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, get_ischema_args, ISchema> {
+      public get_ischema() {
+        super("get_ischema");
+      }
+
+      public get_ischema_args getEmptyArgsInstance() {
+        return new get_ischema_args();
+      }
+
+      public AsyncMethodCallback<ISchema> getResultHandler(final 
AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<ISchema>() { 
+          public void onComplete(ISchema o) {
+            get_ischema_result result = new get_ischema_result();
+            result.success = o;
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            get_ischema_result result = new get_ischema_result();
+            if (e instanceof NoSuchObjectException) {
+                        result.o1 = (NoSuchObjectException) e;
+                        result.setO1IsSet(true);
+                        msg = result;
+            }
+            else             if (e instanceof MetaException) {
+                        result.o2 = (MetaException) e;
+                        result.setO2IsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, get_ischema_args args, 
org.apache.thrift.async.AsyncMethodCallback<ISchema> resultHandler) throws 
TException {
+        iface.get_ischema(args.name,resultHandler);
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
drop_ischema<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, drop_ischema_args, Void> {
+      public drop_ischema() {
+        super("drop_ischema");
+      }
+
+      public drop_ischema_args getEmptyArgsInstance() {
+        return new drop_ischema_args();
+      }
+
+      public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer 
fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<Void>() { 
+          public void onComplete(Void o) {
+            drop_ischema_result result = new drop_ischema_result();
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            drop_ischema_result result = new drop_ischema_result();
+            if (e instanceof NoSuchObjectException) {
+                        result.o1 = (NoSuchObjectException) e;
+                        result.setO1IsSet(true);
+                        msg = result;
+            }
+            else             if (e instanceof InvalidOperationException) {
+                        result.o2 = (InvalidOperationException) e;
+                        result.setO2IsSet(true);
+                        msg = result;
+            }
+            else             if (e instanceof MetaException) {
+                        result.o3 = (MetaException) e;
+                        result.setO3IsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, drop_ischema_args args, 
org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws 
TException {
+        iface.drop_ischema(args.name,resultHandler);
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
add_schema_version<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, add_schema_version_args, Void> {
+      public add_schema_version() {
+        super("add_schema_version");
+      }
+
+      public add_schema_version_args getEmptyArgsInstance() {
+        return new add_schema_version_args();
+      }
+
+      public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer 
fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<Void>() { 
+          public void onComplete(Void o) {
+            add_schema_version_result result = new add_schema_version_result();
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            add_schema_version_result result = new add_schema_version_result();
+            if (e instanceof AlreadyExistsException) {
+                        result.o1 = (AlreadyExistsException) e;
+                        result.setO1IsSet(true);
+                        msg = result;
+            }
+            else             if (e instanceof NoSuchObjectException) {
+                        result.o2 = (NoSuchObjectException) e;
+                        result.setO2IsSet(true);
+                        msg = result;
+            }
+            else             if (e instanceof MetaException) {
+                        result.o3 = (MetaException) e;
+                        result.setO3IsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, add_schema_version_args args, 
org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws 
TException {
+        iface.add_schema_version(args.schemaVersion,resultHandler);
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_schema_version<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, get_schema_version_args, 
SchemaVersion> {
+      public get_schema_version() {
+        super("get_schema_version");
+      }
+
+      public get_schema_version_args getEmptyArgsInstance() {
+        return new get_schema_version_args();
+      }
+
+      public AsyncMethodCallback<SchemaVersion> getResultHandler(final 
AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<SchemaVersion>() { 
+          public void onComplete(SchemaVersion o) {
+            get_schema_version_result result = new get_schema_version_result();
+            result.success = o;
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            get_schema_version_result result = new get_schema_version_result();
+            if (e instanceof NoSuchObjectException) {
+                        result.o1 = (NoSuchObjectException) e;
+                        result.setO1IsSet(true);
+                        msg = result;
+            }
+            else             if (e instanceof MetaException) {
+                        result.o2 = (MetaException) e;
+                        result.setO2IsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, get_schema_version_args args, 
org.apache.thrift.async.AsyncMethodCallback<SchemaVersion> resultHandler) 
throws TException {
+        iface.get_schema_version(args.schemaVersion,resultHandler);
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_schema_latest_version<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, get_schema_latest_version_args, 
SchemaVersion> {
+      public get_schema_latest_version() {
+        super("get_schema_latest_version");
+      }
+
+      public get_schema_latest_version_args getEmptyArgsInstance() {
+        return new get_schema_latest_version_args();
+      }
+
+      public AsyncMethodCallback<SchemaVersion> getResultHandler(final 
AsyncFrameBuffer fb, final int seqid) {
+        final org.apache.thrift.AsyncProcessFunction fcall = this;
+        return new AsyncMethodCallback<SchemaVersion>() { 
+          public void onComplete(SchemaVersion o) {
+            get_schema_latest_version_result result = new 
get_schema_latest_version_result();
+            result.success = o;
+            try {
+              fcall.sendResponse(fb,result, 
org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+              return;
+            } catch (Exception e) {
+              LOGGER.error("Exception writing to internal frame buffer", e);
+            }
+            fb.close();
+          }
+          public void onError(Exception e) {
+            byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+            org.apache.thrift.TBase msg;
+            get_schema_latest_version_result result = new 
get_schema_latest_version_result();
+            if (e instanceof NoSuchObjectException) {
+                        result.o1 = (NoSuchObjectException) e;
+                        result.setO1IsSet(true);
+                        msg = result;
+            }
+            else             if (e instanceof MetaException) {
+                        result.o2 = (MetaException) e;
+                        result.setO2IsSet(true);
+                        msg = result;
+            }
+             else 
+            {
+              msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+              msg = (org.apache.thrift.TBase)new 
org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR,
 e.getMessage());
+            }
+            try {
+              fcall.sendResponse(fb,msg,msgType,seqid);
+              return;
+            } catch (Exception ex) {
+              LOGGER.error("Exception writing to internal frame buffer", ex);
+            }
+            fb.close();
+          }
+        };
+      }
+
+      protected boolean isOneway() {
+        return false;
+      }
+
+      public void start(I iface, get_schema_latest_version_args args, 
org.apache.thrift.async.AsyncMethodCallback<SchemaVersion> resultHandler) 
throws TException {
+        iface.get_schema_latest_version(args.schemaName,resultHandler);
+      }
+    }
+
+    @org.apache.hadoop.classification.InterfaceAudience.Public 
@org.apache.hadoop.classification.InterfaceStability.Stable public static class 
get_schema_all_versions<I extends AsyncIface> extends 
org.apache.thrift.AsyncProcessFunction<I, get_schema_all_versions_args, 
List<SchemaVersion>> {
+      public get_schema_all_versions() {
+        super("get_schema_all_versions");
+      }
+
+      public get_schema_all_versions_args getEmptyArgsInstance() {
+        return new get_schema_all_versions_args();
+      }
+
+      public AsyncMethodCallback<List<SchemaVersion>> getResultHandler(final 
AsyncFrameBuffer fb, final int seqid) {
+ 

<TRUNCATED>

Reply via email to