http://git-wip-us.apache.org/repos/asf/hive/blob/12041d39/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index c99c3af..db09407 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -53,6 +53,7 @@ import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
+import java.util.function.Consumer;
 import java.util.regex.Pattern;
 
 import javax.jdo.JDOException;
@@ -79,33 +80,47 @@ import 
org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AddPrimaryKeyEvent;
 import org.apache.hadoop.hive.metastore.events.AddUniqueConstraintEvent;
 import org.apache.hadoop.hive.metastore.events.AlterDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.AlterISchemaEvent;
 import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.AlterSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
 import org.apache.hadoop.hive.metastore.events.ConfigChangeEvent;
 import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.CreateFunctionEvent;
+import org.apache.hadoop.hive.metastore.events.CreateISchemaEvent;
+import org.apache.hadoop.hive.metastore.events.AddSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
 import org.apache.hadoop.hive.metastore.events.DropConstraintEvent;
 import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.DropFunctionEvent;
+import org.apache.hadoop.hive.metastore.events.DropISchemaEvent;
 import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.DropSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.DropTableEvent;
 import org.apache.hadoop.hive.metastore.events.InsertEvent;
 import org.apache.hadoop.hive.metastore.events.LoadPartitionDoneEvent;
 import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.PreAlterDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.PreAlterISchemaEvent;
 import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreAlterSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent;
 import org.apache.hadoop.hive.metastore.events.PreAuthorizationCallEvent;
 import org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.PreCreateISchemaEvent;
+import org.apache.hadoop.hive.metastore.events.PreAddSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent;
 import org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.PreDropISchemaEvent;
 import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreDropSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.PreDropTableEvent;
 import org.apache.hadoop.hive.metastore.events.PreEventContext;
 import org.apache.hadoop.hive.metastore.events.PreLoadPartitionDoneEvent;
 import org.apache.hadoop.hive.metastore.events.PreReadDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.PreReadISchemaEvent;
 import org.apache.hadoop.hive.metastore.events.PreReadTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreReadhSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType;
 import org.apache.hadoop.hive.metastore.metrics.JvmPauseMonitor;
 import org.apache.hadoop.hive.metastore.metrics.Metrics;
@@ -7106,7 +7121,6 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       }
     }
 
-
     @Override
     public WMCreateResourcePlanResponse 
create_resource_plan(WMCreateResourcePlanRequest request)
         throws AlreadyExistsException, InvalidObjectException, MetaException, 
TException {
@@ -7254,7 +7268,8 @@ public class HiveMetaStore extends ThriftHiveMetastore {
         WMGetTriggersForResourePlanRequest request)
         throws NoSuchObjectException, MetaException, TException {
       try {
-        List<WMTrigger> triggers = 
getMS().getTriggersForResourcePlan(request.getResourcePlanName());
+        List<WMTrigger> triggers =
+            getMS().getTriggersForResourcePlan(request.getResourcePlanName());
         WMGetTriggersForResourePlanResponse response = new 
WMGetTriggersForResourePlanResponse();
         response.setTriggers(triggers);
         return response;
@@ -7264,33 +7279,31 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       }
     }
 
-    @Override
-    public WMCreatePoolResponse create_wm_pool(WMCreatePoolRequest request)
+    public WMAlterPoolResponse alter_wm_pool(WMAlterPoolRequest request)
         throws AlreadyExistsException, NoSuchObjectException, 
InvalidObjectException, MetaException,
         TException {
       try {
-        getMS().createPool(request.getPool());
-        return new WMCreatePoolResponse();
+        getMS().alterPool(request.getPool(), request.getPoolPath());
+        return new WMAlterPoolResponse();
       } catch (MetaException e) {
-        LOG.error("Exception while trying to create WMPool", e);
+        LOG.error("Exception while trying to alter WMPool", e);
         throw e;
       }
     }
 
     @Override
-    public WMAlterPoolResponse alter_wm_pool(WMAlterPoolRequest request)
+    public WMCreatePoolResponse create_wm_pool(WMCreatePoolRequest request)
         throws AlreadyExistsException, NoSuchObjectException, 
InvalidObjectException, MetaException,
         TException {
       try {
-        getMS().alterPool(request.getPool(), request.getPoolPath());
-        return new WMAlterPoolResponse();
+        getMS().createPool(request.getPool());
+        return new WMCreatePoolResponse();
       } catch (MetaException e) {
-        LOG.error("Exception while trying to alter WMPool", e);
+        LOG.error("Exception while trying to create WMPool", e);
         throw e;
       }
     }
 
-    @Override
     public WMDropPoolResponse drop_wm_pool(WMDropPoolRequest request)
         throws NoSuchObjectException, InvalidOperationException, 
MetaException, TException {
       try {
@@ -7302,7 +7315,6 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       }
     }
 
-    @Override
     public WMCreateOrUpdateMappingResponse create_or_update_wm_mapping(
         WMCreateOrUpdateMappingRequest request) throws AlreadyExistsException,
         NoSuchObjectException, InvalidObjectException, MetaException, 
TException {
@@ -7315,7 +7327,6 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       }
     }
 
-    @Override
     public WMDropMappingResponse drop_wm_mapping(WMDropMappingRequest request)
         throws NoSuchObjectException, InvalidOperationException, 
MetaException, TException {
       try {
@@ -7327,7 +7338,6 @@ public class HiveMetaStore extends ThriftHiveMetastore {
       }
     }
 
-    @Override
     public WMCreateOrDropTriggerToPoolMappingResponse 
create_or_drop_wm_trigger_to_pool_mapping(
         WMCreateOrDropTriggerToPoolMappingRequest request) throws 
AlreadyExistsException,
         NoSuchObjectException, InvalidObjectException, MetaException, 
TException {
@@ -7345,6 +7355,438 @@ public class HiveMetaStore extends ThriftHiveMetastore {
         throw e;
       }
     }
+
+    public void create_ischema(ISchema schema) throws TException {
+      startFunction("create_ischema", ": " + schema.getName());
+      boolean success = false;
+      Exception ex = null;
+      RawStore ms = getMS();
+      try {
+        firePreEvent(new PreCreateISchemaEvent(this, schema));
+        Map<String, String> transactionalListenersResponses = 
Collections.emptyMap();
+        ms.openTransaction();
+        try {
+          ms.createISchema(schema);
+
+          if (!transactionalListeners.isEmpty()) {
+            transactionalListenersResponses =
+                MetaStoreListenerNotifier.notifyEvent(transactionalListeners,
+                    EventType.CREATE_ISCHEMA, new CreateISchemaEvent(true, 
this, schema));
+          }
+          success = ms.commitTransaction();
+        } finally {
+          if (!success) ms.rollbackTransaction();
+          if (!listeners.isEmpty()) {
+            MetaStoreListenerNotifier.notifyEvent(listeners, 
EventType.CREATE_ISCHEMA,
+                new CreateISchemaEvent(success, this, schema), null,
+                transactionalListenersResponses, ms);
+          }
+        }
+      } catch (MetaException|AlreadyExistsException e) {
+        LOG.error("Caught exception creating schema", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("create_ischema", success, ex);
+      }
+    }
+
+    @Override
+    public void alter_ischema(AlterISchemaRequest rqst) throws TException {
+      startFunction("alter_ischema", ": " + rqst);
+      boolean success = false;
+      Exception ex = null;
+      RawStore ms = getMS();
+      try {
+        ISchema oldSchema = ms.getISchema(rqst.getName());
+        if (oldSchema == null) {
+          throw new NoSuchObjectException("Could not find schema " + 
rqst.getName());
+        }
+        firePreEvent(new PreAlterISchemaEvent(this, oldSchema, 
rqst.getNewSchema()));
+        Map<String, String> transactionalListenersResponses = 
Collections.emptyMap();
+        ms.openTransaction();
+        try {
+          ms.alterISchema(rqst.getName(), rqst.getNewSchema());
+          if (!transactionalListeners.isEmpty()) {
+            transactionalListenersResponses =
+                MetaStoreListenerNotifier.notifyEvent(transactionalListeners,
+                    EventType.ALTER_ISCHEMA, new AlterISchemaEvent(true, this, 
oldSchema, rqst.getNewSchema()));
+          }
+          success = ms.commitTransaction();
+        } finally {
+          if (!success) ms.rollbackTransaction();
+          if (!listeners.isEmpty()) {
+            MetaStoreListenerNotifier.notifyEvent(listeners, 
EventType.ALTER_ISCHEMA,
+                new AlterISchemaEvent(success, this, oldSchema, 
rqst.getNewSchema()), null,
+                transactionalListenersResponses, ms);
+          }
+        }
+      } catch (MetaException|NoSuchObjectException e) {
+        LOG.error("Caught exception altering schema", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("alter_ischema", success, ex);
+      }
+    }
+
+    @Override
+    public ISchema get_ischema(ISchemaName schemaName) throws TException {
+      startFunction("get_ischema", ": " + schemaName);
+      Exception ex = null;
+      ISchema schema = null;
+      try {
+        schema = getMS().getISchema(schemaName);
+        if (schema == null) {
+          throw new NoSuchObjectException("No schema named " + schemaName + " 
exists");
+        }
+        firePreEvent(new PreReadISchemaEvent(this, schema));
+        return schema;
+      } catch (MetaException e) {
+        LOG.error("Caught exception getting schema", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("get_ischema", schema != null, ex);
+      }
+    }
+
+    @Override
+    public void drop_ischema(ISchemaName schemaName) throws TException {
+      startFunction("drop_ischema", ": " + schemaName);
+      Exception ex = null;
+      boolean success = false;
+      RawStore ms = getMS();
+      try {
+        // look for any valid versions.  This will also throw 
NoSuchObjectException if the schema
+        // itself doesn't exist, which is what we want.
+        SchemaVersion latest = ms.getLatestSchemaVersion(schemaName);
+        if (latest != null) {
+          ex = new InvalidOperationException("Schema " + schemaName + " cannot 
be dropped, it has" +
+              " at least one valid version");
+          throw (InvalidObjectException)ex;
+        }
+        ISchema schema = ms.getISchema(schemaName);
+        firePreEvent(new PreDropISchemaEvent(this, schema));
+        Map<String, String> transactionalListenersResponses = 
Collections.emptyMap();
+        ms.openTransaction();
+        try {
+          ms.dropISchema(schemaName);
+          if (!transactionalListeners.isEmpty()) {
+            transactionalListenersResponses =
+                MetaStoreListenerNotifier.notifyEvent(transactionalListeners,
+                    EventType.DROP_ISCHEMA, new DropISchemaEvent(true, this, 
schema));
+          }
+          success = ms.commitTransaction();
+        } finally {
+          if (!success) ms.rollbackTransaction();
+          if (!listeners.isEmpty()) {
+            MetaStoreListenerNotifier.notifyEvent(listeners, 
EventType.DROP_ISCHEMA,
+                new DropISchemaEvent(success, this, schema), null,
+                transactionalListenersResponses, ms);
+          }
+        }
+      } catch (MetaException|NoSuchObjectException e) {
+        LOG.error("Caught exception dropping schema", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("drop_ischema", success, ex);
+      }
+    }
+
+    @Override
+    public void add_schema_version(SchemaVersion schemaVersion) throws 
TException {
+      startFunction("add_schema_version", ": " + schemaVersion);
+      boolean success = false;
+      Exception ex = null;
+      RawStore ms = getMS();
+      try {
+        // Make sure the referenced schema exists
+        if (ms.getISchema(schemaVersion.getSchema()) == null) {
+          throw new NoSuchObjectException("No schema named " + 
schemaVersion.getSchema());
+        }
+        firePreEvent(new PreAddSchemaVersionEvent(this, schemaVersion));
+        Map<String, String> transactionalListenersResponses = 
Collections.emptyMap();
+        ms.openTransaction();
+        try {
+          ms.addSchemaVersion(schemaVersion);
+
+          if (!transactionalListeners.isEmpty()) {
+            transactionalListenersResponses =
+                MetaStoreListenerNotifier.notifyEvent(transactionalListeners,
+                    EventType.ADD_SCHEMA_VERSION, new 
AddSchemaVersionEvent(true, this, schemaVersion));
+          }
+          success = ms.commitTransaction();
+        } finally {
+          if (!success) ms.rollbackTransaction();
+          if (!listeners.isEmpty()) {
+            MetaStoreListenerNotifier.notifyEvent(listeners, 
EventType.ADD_SCHEMA_VERSION,
+                new AddSchemaVersionEvent(success, this, schemaVersion), null,
+                transactionalListenersResponses, ms);
+          }
+        }
+      } catch (MetaException|AlreadyExistsException e) {
+        LOG.error("Caught exception adding schema version", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("add_schema_version", success, ex);
+      }
+    }
+
+    @Override
+    public SchemaVersion get_schema_version(SchemaVersionDescriptor version) 
throws TException {
+      startFunction("get_schema_version", ": " + version);
+      Exception ex = null;
+      SchemaVersion schemaVersion = null;
+      try {
+        schemaVersion = getMS().getSchemaVersion(version);
+        if (schemaVersion == null) {
+          throw new NoSuchObjectException("No schema version " + version + 
"exists");
+        }
+        firePreEvent(new PreReadhSchemaVersionEvent(this, 
Collections.singletonList(schemaVersion)));
+        return schemaVersion;
+      } catch (MetaException e) {
+        LOG.error("Caught exception getting schema version", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("get_schema_version", schemaVersion != null, ex);
+      }
+    }
+
+    @Override
+    public SchemaVersion get_schema_latest_version(ISchemaName schemaName) 
throws TException {
+      startFunction("get_latest_schema_version", ": " + schemaName);
+      Exception ex = null;
+      SchemaVersion schemaVersion = null;
+      try {
+        schemaVersion = getMS().getLatestSchemaVersion(schemaName);
+        if (schemaVersion == null) {
+          throw new NoSuchObjectException("No versions of schema " + 
schemaName + "exist");
+        }
+        firePreEvent(new PreReadhSchemaVersionEvent(this, 
Collections.singletonList(schemaVersion)));
+        return schemaVersion;
+      } catch (MetaException e) {
+        LOG.error("Caught exception getting latest schema version", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("get_latest_schema_version", schemaVersion != null, ex);
+      }
+    }
+
+    @Override
+    public List<SchemaVersion> get_schema_all_versions(ISchemaName schemaName) 
throws TException {
+      startFunction("get_all_schema_versions", ": " + schemaName);
+      Exception ex = null;
+      List<SchemaVersion> schemaVersions = null;
+      try {
+        schemaVersions = getMS().getAllSchemaVersion(schemaName);
+        if (schemaVersions == null) {
+          throw new NoSuchObjectException("No versions of schema " + 
schemaName + "exist");
+        }
+        firePreEvent(new PreReadhSchemaVersionEvent(this, schemaVersions));
+        return schemaVersions;
+      } catch (MetaException e) {
+        LOG.error("Caught exception getting all schema versions", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("get_all_schema_versions", schemaVersions != null, ex);
+      }
+    }
+
+    @Override
+    public void drop_schema_version(SchemaVersionDescriptor version) throws 
TException {
+      startFunction("drop_schema_version", ": " + version);
+      Exception ex = null;
+      boolean success = false;
+      RawStore ms = getMS();
+      try {
+        SchemaVersion schemaVersion = ms.getSchemaVersion(version);
+        if (schemaVersion == null) {
+          throw new NoSuchObjectException("No schema version " + version);
+        }
+        firePreEvent(new PreDropSchemaVersionEvent(this, schemaVersion));
+        Map<String, String> transactionalListenersResponses = 
Collections.emptyMap();
+        ms.openTransaction();
+        try {
+          ms.dropSchemaVersion(version);
+          if (!transactionalListeners.isEmpty()) {
+            transactionalListenersResponses =
+                MetaStoreListenerNotifier.notifyEvent(transactionalListeners,
+                    EventType.DROP_SCHEMA_VERSION, new 
DropSchemaVersionEvent(true, this, schemaVersion));
+          }
+          success = ms.commitTransaction();
+        } finally {
+          if (!success) ms.rollbackTransaction();
+          if (!listeners.isEmpty()) {
+            MetaStoreListenerNotifier.notifyEvent(listeners, 
EventType.DROP_SCHEMA_VERSION,
+                new DropSchemaVersionEvent(success, this, schemaVersion), null,
+                transactionalListenersResponses, ms);
+          }
+        }
+      } catch (MetaException|NoSuchObjectException e) {
+        LOG.error("Caught exception dropping schema version", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("drop_schema_version", success, ex);
+      }
+    }
+
+    @Override
+    public FindSchemasByColsResp get_schemas_by_cols(FindSchemasByColsRqst 
rqst) throws TException {
+      startFunction("get_schemas_by_cols");
+      Exception ex = null;
+      List<SchemaVersion> schemaVersions = Collections.emptyList();
+      try {
+        schemaVersions = getMS().getSchemaVersionsByColumns(rqst.getColName(),
+            rqst.getColNamespace(), rqst.getType());
+        firePreEvent(new PreReadhSchemaVersionEvent(this, schemaVersions));
+        final List<SchemaVersionDescriptor> entries = new 
ArrayList<>(schemaVersions.size());
+        schemaVersions.forEach(schemaVersion -> entries.add(
+            new SchemaVersionDescriptor(schemaVersion.getSchema(), 
schemaVersion.getVersion())));
+        return new FindSchemasByColsResp(entries);
+      } catch (MetaException e) {
+        LOG.error("Caught exception doing schema version query", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("get_schemas_by_cols", !schemaVersions.isEmpty(), ex);
+      }
+    }
+
+    @Override
+    public void map_schema_version_to_serde(MapSchemaVersionToSerdeRequest 
rqst)
+        throws TException {
+      startFunction("map_schema_version_to_serde, :" + rqst);
+      boolean success = false;
+      Exception ex = null;
+      RawStore ms = getMS();
+      try {
+        SchemaVersion oldSchemaVersion = 
ms.getSchemaVersion(rqst.getSchemaVersion());
+        if (oldSchemaVersion == null) {
+          throw new NoSuchObjectException("No schema version " + 
rqst.getSchemaVersion());
+        }
+        SerDeInfo serde = ms.getSerDeInfo(rqst.getSerdeName());
+        if (serde == null) {
+          throw new NoSuchObjectException("No SerDe named " + 
rqst.getSerdeName());
+        }
+        SchemaVersion newSchemaVersion = new SchemaVersion(oldSchemaVersion);
+        newSchemaVersion.setSerDe(serde);
+        firePreEvent(new PreAlterSchemaVersionEvent(this, oldSchemaVersion, 
newSchemaVersion));
+        Map<String, String> transactionalListenersResponses = 
Collections.emptyMap();
+        ms.openTransaction();
+        try {
+          ms.alterSchemaVersion(rqst.getSchemaVersion(), newSchemaVersion);
+          if (!transactionalListeners.isEmpty()) {
+            transactionalListenersResponses =
+                MetaStoreListenerNotifier.notifyEvent(transactionalListeners,
+                    EventType.ALTER_SCHEMA_VERSION, new 
AlterSchemaVersionEvent(true, this,
+                        oldSchemaVersion, newSchemaVersion));
+          }
+          success = ms.commitTransaction();
+        } finally {
+          if (!success) ms.rollbackTransaction();
+          if (!listeners.isEmpty()) {
+            MetaStoreListenerNotifier.notifyEvent(listeners, 
EventType.ALTER_SCHEMA_VERSION,
+                new AlterSchemaVersionEvent(success, this, oldSchemaVersion, 
newSchemaVersion), null,
+                transactionalListenersResponses, ms);
+          }
+        }
+      } catch (MetaException|NoSuchObjectException e) {
+        LOG.error("Caught exception mapping schema version to serde", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("map_schema_version_to_serde", success, ex);
+      }
+    }
+
+    @Override
+    public void set_schema_version_state(SetSchemaVersionStateRequest rqst) 
throws TException {
+      startFunction("set_schema_version_state, :" + rqst);
+      boolean success = false;
+      Exception ex = null;
+      RawStore ms = getMS();
+      try {
+        SchemaVersion oldSchemaVersion = 
ms.getSchemaVersion(rqst.getSchemaVersion());
+        if (oldSchemaVersion == null) {
+          throw new NoSuchObjectException("No schema version " + 
rqst.getSchemaVersion());
+        }
+        SchemaVersion newSchemaVersion = new SchemaVersion(oldSchemaVersion);
+        newSchemaVersion.setState(rqst.getState());
+        firePreEvent(new PreAlterSchemaVersionEvent(this, oldSchemaVersion, 
newSchemaVersion));
+        Map<String, String> transactionalListenersResponses = 
Collections.emptyMap();
+        ms.openTransaction();
+        try {
+          ms.alterSchemaVersion(rqst.getSchemaVersion(), newSchemaVersion);
+          if (!transactionalListeners.isEmpty()) {
+            transactionalListenersResponses =
+                MetaStoreListenerNotifier.notifyEvent(transactionalListeners,
+                    EventType.ALTER_SCHEMA_VERSION, new 
AlterSchemaVersionEvent(true, this,
+                        oldSchemaVersion, newSchemaVersion));
+          }
+          success = ms.commitTransaction();
+        } finally {
+          if (!success) ms.rollbackTransaction();
+          if (!listeners.isEmpty()) {
+            MetaStoreListenerNotifier.notifyEvent(listeners, 
EventType.ALTER_SCHEMA_VERSION,
+                new AlterSchemaVersionEvent(success, this, oldSchemaVersion, 
newSchemaVersion), null,
+                transactionalListenersResponses, ms);
+          }
+        }
+      } catch (MetaException|NoSuchObjectException e) {
+        LOG.error("Caught exception changing schema version state", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("set_schema_version_state", success, ex);
+      }
+    }
+
+    @Override
+    public void add_serde(SerDeInfo serde) throws TException {
+      startFunction("create_serde", ": " + serde.getName());
+      Exception ex = null;
+      boolean success = false;
+      RawStore ms = getMS();
+      try {
+        ms.openTransaction();
+        ms.addSerde(serde);
+        success = ms.commitTransaction();
+      } catch (MetaException|AlreadyExistsException e) {
+        LOG.error("Caught exception creating serde", e);
+        ex = e;
+        throw e;
+      } finally {
+        if (!success) ms.rollbackTransaction();
+        endFunction("create_serde", success, ex);
+      }
+    }
+
+    @Override
+    public SerDeInfo get_serde(GetSerdeRequest rqst) throws TException {
+      startFunction("get_serde", ": " + rqst);
+      Exception ex = null;
+      SerDeInfo serde = null;
+      try {
+        serde = getMS().getSerDeInfo(rqst.getSerdeName());
+        if (serde == null) {
+          throw new NoSuchObjectException("No serde named " + 
rqst.getSerdeName() + " exists");
+        }
+        return serde;
+      } catch (MetaException e) {
+        LOG.error("Caught exception getting serde", e);
+        ex = e;
+        throw e;
+      } finally {
+        endFunction("get_serde", serde != null, ex);
+      }
+    }
   }
 
   private static IHMSHandler newRetryingHMSHandler(IHMSHandler baseHandler, 
Configuration conf)

http://git-wip-us.apache.org/repos/asf/hive/blob/12041d39/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
index 3128089..1755700b 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java
@@ -2794,4 +2794,76 @@ public class HiveMetaStoreClient implements 
IMetaStoreClient, AutoCloseable {
     client.create_or_drop_wm_trigger_to_pool_mapping(request);
   }
 
+  public void createISchema(ISchema schema) throws TException {
+    client.create_ischema(schema);
+  }
+
+  @Override
+  public void alterISchema(String dbName, String schemaName, ISchema 
newSchema) throws TException {
+    client.alter_ischema(new AlterISchemaRequest(new ISchemaName(dbName, 
schemaName), newSchema));
+  }
+
+  @Override
+  public ISchema getISchema(String dbName, String name) throws TException {
+    return client.get_ischema(new ISchemaName(dbName, name));
+  }
+
+  @Override
+  public void dropISchema(String dbName, String name) throws TException {
+    client.drop_ischema(new ISchemaName(dbName, name));
+  }
+
+  @Override
+  public void addSchemaVersion(SchemaVersion schemaVersion) throws TException {
+    client.add_schema_version(schemaVersion);
+  }
+
+  @Override
+  public SchemaVersion getSchemaVersion(String dbName, String schemaName, int 
version) throws TException {
+    return client.get_schema_version(new SchemaVersionDescriptor(new 
ISchemaName(dbName, schemaName), version));
+  }
+
+  @Override
+  public SchemaVersion getSchemaLatestVersion(String dbName, String 
schemaName) throws TException {
+    return client.get_schema_latest_version(new ISchemaName(dbName, 
schemaName));
+  }
+
+  @Override
+  public List<SchemaVersion> getSchemaAllVersions(String dbName, String 
schemaName) throws TException {
+    return client.get_schema_all_versions(new ISchemaName(dbName, schemaName));
+  }
+
+  @Override
+  public void dropSchemaVersion(String dbName, String schemaName, int version) 
throws TException {
+    client.drop_schema_version(new SchemaVersionDescriptor(new 
ISchemaName(dbName, schemaName), version));
+  }
+
+  @Override
+  public FindSchemasByColsResp getSchemaByCols(FindSchemasByColsRqst rqst) 
throws TException {
+    return client.get_schemas_by_cols(rqst);
+  }
+
+  @Override
+  public void mapSchemaVersionToSerde(String dbName, String schemaName, int 
version, String serdeName)
+      throws TException {
+    client.map_schema_version_to_serde(new MapSchemaVersionToSerdeRequest(
+        new SchemaVersionDescriptor(new ISchemaName(dbName, schemaName), 
version), serdeName));
+  }
+
+  @Override
+  public void setSchemaVersionState(String dbName, String schemaName, int 
version, SchemaVersionState state)
+      throws TException {
+    client.set_schema_version_state(new SetSchemaVersionStateRequest(new 
SchemaVersionDescriptor(
+        new ISchemaName(dbName, schemaName), version), state));
+  }
+
+  @Override
+  public void addSerDe(SerDeInfo serDeInfo) throws TException {
+    client.add_serde(serDeInfo);
+  }
+
+  @Override
+  public SerDeInfo getSerDe(String serDeName) throws TException {
+    return client.get_serde(new GetSerdeRequest(serDeName));
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/12041d39/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
index 46984cb..f1d5066 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/IMetaStoreClient.java
@@ -49,6 +49,8 @@ import 
org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.EnvironmentContext;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.FindSchemasByColsResp;
+import org.apache.hadoop.hive.metastore.api.FindSchemasByColsRqst;
 import org.apache.hadoop.hive.metastore.api.FireEventRequest;
 import org.apache.hadoop.hive.metastore.api.FireEventResponse;
 import org.apache.hadoop.hive.metastore.api.ForeignKeysRequest;
@@ -62,6 +64,7 @@ import 
org.apache.hadoop.hive.metastore.api.GetRoleGrantsForPrincipalResponse;
 import org.apache.hadoop.hive.metastore.api.HeartbeatTxnRangeResponse;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
+import org.apache.hadoop.hive.metastore.api.ISchema;
 import org.apache.hadoop.hive.metastore.api.InvalidInputException;
 import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
@@ -94,6 +97,9 @@ import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
 import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
+import org.apache.hadoop.hive.metastore.api.SchemaVersion;
+import org.apache.hadoop.hive.metastore.api.SchemaVersionState;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.SetPartitionsStatsRequest;
 import org.apache.hadoop.hive.metastore.api.ShowCompactResponse;
 import org.apache.hadoop.hive.metastore.api.ShowLocksRequest;
@@ -1828,4 +1834,165 @@ public interface IMetaStoreClient {
   void createOrDropTriggerToPoolMapping(String resourcePlanName, String 
triggerName,
       String poolPath, boolean shouldDrop) throws AlreadyExistsException, 
NoSuchObjectException,
       InvalidObjectException, MetaException, TException;
+
+  /**
+   * Create a new schema.  This is really a schema container, as there will be 
specific versions
+   * of the schema that have columns, etc.
+   * @param schema schema to create
+   * @throws AlreadyExistsException if a schema of this name already exists
+   * @throws NoSuchObjectException database references by this schema does not 
exist
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  void createISchema(ISchema schema) throws TException;
+
+  /**
+   * Alter an existing schema.
+   * @param dbName database the schema is in
+   * @param schemaName name of the schema
+   * @param newSchema altered schema object
+   * @throws NoSuchObjectException no schema with this name could be found
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  void alterISchema(String dbName, String schemaName, ISchema newSchema) 
throws TException;
+
+  /**
+   * Fetch a schema.
+   * @param dbName database the schema is in
+   * @param name name of the schema
+   * @return the schema or null if no such schema
+   * @throws NoSuchObjectException no schema matching this name exists
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  ISchema getISchema(String dbName, String name) throws TException;
+
+  /**
+   * Drop an existing schema.  If there are schema versions of this, this call 
will fail.
+   * @param dbName database the schema is in
+   * @param name name of the schema to drop
+   * @throws NoSuchObjectException no schema with this name could be found
+   * @throws InvalidOperationException attempt to drop a schema that has 
versions
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  void dropISchema(String dbName, String name) throws TException;
+
+  /**
+   * Add a new version to an existing schema.
+   * @param schemaVersion version object to add
+   * @throws AlreadyExistsException a version of this schema with the same 
version id already exists
+   * @throws NoSuchObjectException no schema with this name could be found
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  void addSchemaVersion(SchemaVersion schemaVersion) throws TException;
+
+  /**
+   * Get a specific version of a schema.
+   * @param dbName database the schema is in
+   * @param schemaName name of the schema
+   * @param version version of the schema
+   * @return the schema version or null if no such schema version
+   * @throws NoSuchObjectException no schema matching this name and version 
exists
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  SchemaVersion getSchemaVersion(String dbName, String schemaName, int 
version) throws TException;
+
+  /**
+   * Get the latest version of a schema.
+   * @param dbName database the schema is in
+   * @param schemaName name of the schema
+   * @return latest version of the schema or null if the schema does not exist 
or there are no
+   * version of the schema.
+   * @throws NoSuchObjectException no versions of schema matching this name 
exist
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  SchemaVersion getSchemaLatestVersion(String dbName, String schemaName) 
throws TException;
+
+  /**
+   * Get all the extant versions of a schema.
+   * @param dbName database the schema is in
+   * @param schemaName name of the schema.
+   * @return list of all the schema versions or null if this schema does not 
exist or has no
+   * versions.
+   * @throws NoSuchObjectException no versions of schema matching this name 
exist
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  List<SchemaVersion> getSchemaAllVersions(String dbName, String schemaName) 
throws TException;
+
+  /**
+   * Drop a version of a schema.  Given that versions are supposed to be 
immutable you should
+   * think really hard before you call this method.  It should only be used 
for schema versions
+   * that were added in error and never referenced any data.
+   * @param dbName database the schema is in
+   * @param schemaName name of the schema
+   * @param version version of the schema
+   * @throws NoSuchObjectException no matching version of the schema could be 
found
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  void dropSchemaVersion(String dbName, String schemaName, int version) throws 
TException;
+
+  /**
+   * Find all schema versions that have columns that match a query.
+   * @param rqst query, this can include column names, namespaces (actually 
stored in the
+   *             description field in FieldSchema), and types.
+   * @return The (possibly empty) list of schema name/version pairs that match.
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  FindSchemasByColsResp getSchemaByCols(FindSchemasByColsRqst rqst) throws 
TException;
+
+  /**
+   * Map a schema version to a serde.  This mapping is one-to-one, thus this 
will destroy any
+   * previous mappings for this schema version.
+   * @param dbName database the schema is in
+   * @param schemaName name of the schema
+   * @param version version of the schema
+   * @param serdeName name of the serde
+   * @throws NoSuchObjectException no matching version of the schema could be 
found or no serde
+   * of the provided name could be found
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  void mapSchemaVersionToSerde(String dbName, String schemaName, int version, 
String serdeName) throws TException;
+
+  /**
+   * Set the state of a schema version.
+   * @param dbName database the schema is in
+   * @param schemaName name of the schema
+   * @param version version of the schema
+   * @param state state to set the schema too
+   * @throws NoSuchObjectException no matching version of the schema could be 
found
+   * @throws InvalidOperationException attempt to make a state change that is 
not valid
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  void setSchemaVersionState(String dbName, String schemaName, int version, 
SchemaVersionState state) throws TException;
+
+  /**
+   * Add a serde.  This is primarily intended for use with SchemaRegistry 
objects, since serdes
+   * are automatically added when needed as part of creating and altering 
tables and partitions.
+   * @param serDeInfo serde to add
+   * @throws AlreadyExistsException serde of this name already exists
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  void addSerDe(SerDeInfo serDeInfo) throws TException;
+
+  /**
+   * Fetch a serde.  This is primarily intended for use with SchemaRegistry 
objects, since serdes
+   * are automatically fetched along with other information for tables and 
partitions.
+   * @param serDeName name of the serde
+   * @return the serde.
+   * @throws NoSuchObjectException no serde with this name exists.
+   * @throws MetaException general metastore error
+   * @throws TException general thrift error
+   */
+  SerDeInfo getSerDe(String serDeName) throws TException;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/12041d39/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java
index 88d22d6..67600e1 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreEventListener.java
@@ -26,19 +26,25 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.events.AddForeignKeyEvent;
 import org.apache.hadoop.hive.metastore.events.AddNotNullConstraintEvent;
 import org.apache.hadoop.hive.metastore.events.AddPrimaryKeyEvent;
+import org.apache.hadoop.hive.metastore.events.AddSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.AddUniqueConstraintEvent;
 import org.apache.hadoop.hive.metastore.events.AlterDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.AlterISchemaEvent;
 import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.AlterSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
 import org.apache.hadoop.hive.metastore.events.ConfigChangeEvent;
 import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.CreateFunctionEvent;
+import org.apache.hadoop.hive.metastore.events.CreateISchemaEvent;
 import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
 import org.apache.hadoop.hive.metastore.events.DropConstraintEvent;
 import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.DropFunctionEvent;
+import org.apache.hadoop.hive.metastore.events.DropISchemaEvent;
 import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.DropSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.DropTableEvent;
 import org.apache.hadoop.hive.metastore.events.InsertEvent;
 import org.apache.hadoop.hive.metastore.events.LoadPartitionDoneEvent;
@@ -196,6 +202,26 @@ public abstract class MetaStoreEventListener implements 
Configurable {
   public void onDropConstraint(DropConstraintEvent dropConstraintEvent) throws 
MetaException {
   }
 
+  public void onCreateISchema(CreateISchemaEvent createISchemaEvent) throws 
MetaException {
+  }
+
+  public void onAlterISchema(AlterISchemaEvent alterISchemaEvent) throws 
MetaException {
+  }
+
+  public void onDropISchema(DropISchemaEvent dropISchemaEvent) throws 
MetaException {
+  }
+
+  public void onAddSchemaVersion(AddSchemaVersionEvent addSchemaVersionEvent) 
throws MetaException {
+  }
+
+  public void onAlterSchemaVersion(AlterSchemaVersionEvent 
alterSchemaVersionEvent)
+      throws MetaException {
+  }
+
+  public void onDropSchemaVersion(DropSchemaVersionEvent 
dropSchemaVersionEvent)
+      throws MetaException {
+  }
+
   @Override
   public Configuration getConf() {
     return this.conf;

http://git-wip-us.apache.org/repos/asf/hive/blob/12041d39/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreListenerNotifier.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreListenerNotifier.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreListenerNotifier.java
index af01e8a..f5a91b4 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreListenerNotifier.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreListenerNotifier.java
@@ -28,16 +28,22 @@ import 
org.apache.hadoop.hive.metastore.events.AddForeignKeyEvent;
 import org.apache.hadoop.hive.metastore.events.AddNotNullConstraintEvent;
 import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
 import org.apache.hadoop.hive.metastore.events.AddPrimaryKeyEvent;
+import org.apache.hadoop.hive.metastore.events.AddSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.AddUniqueConstraintEvent;
 import org.apache.hadoop.hive.metastore.events.AlterDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.AlterISchemaEvent;
 import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.AlterSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
 import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.CreateFunctionEvent;
+import org.apache.hadoop.hive.metastore.events.CreateISchemaEvent;
 import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
 import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
 import org.apache.hadoop.hive.metastore.events.DropFunctionEvent;
+import org.apache.hadoop.hive.metastore.events.DropISchemaEvent;
 import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.DropSchemaVersionEvent;
 import org.apache.hadoop.hive.metastore.events.DropTableEvent;
 import org.apache.hadoop.hive.metastore.events.InsertEvent;
 import org.apache.hadoop.hive.metastore.events.ListenerEvent;
@@ -158,6 +164,42 @@ public class MetaStoreListenerNotifier {
               
listener.onAddNotNullConstraint((AddNotNullConstraintEvent)event);
             }
           })
+          .put(EventType.CREATE_ISCHEMA, new EventNotifier() {
+            @Override
+            public void notify(MetaStoreEventListener listener, ListenerEvent 
event) throws MetaException {
+              listener.onCreateISchema((CreateISchemaEvent)event);
+            }
+          })
+          .put(EventType.ALTER_ISCHEMA, new EventNotifier() {
+            @Override
+            public void notify(MetaStoreEventListener listener, ListenerEvent 
event) throws MetaException {
+              listener.onAlterISchema((AlterISchemaEvent)event);
+            }
+          })
+          .put(EventType.DROP_ISCHEMA, new EventNotifier() {
+            @Override
+            public void notify(MetaStoreEventListener listener, ListenerEvent 
event) throws MetaException {
+              listener.onDropISchema((DropISchemaEvent)event);
+            }
+          })
+          .put(EventType.ADD_SCHEMA_VERSION, new EventNotifier() {
+            @Override
+            public void notify(MetaStoreEventListener listener, ListenerEvent 
event) throws MetaException {
+              listener.onAddSchemaVersion((AddSchemaVersionEvent) event);
+            }
+          })
+          .put(EventType.ALTER_SCHEMA_VERSION, new EventNotifier() {
+            @Override
+            public void notify(MetaStoreEventListener listener, ListenerEvent 
event) throws MetaException {
+              listener.onAlterSchemaVersion((AlterSchemaVersionEvent) event);
+            }
+          })
+          .put(EventType.DROP_SCHEMA_VERSION, new EventNotifier() {
+            @Override
+            public void notify(MetaStoreEventListener listener, ListenerEvent 
event) throws MetaException {
+              listener.onDropSchemaVersion((DropSchemaVersionEvent) event);
+            }
+          })
           .build()
   );
 

http://git-wip-us.apache.org/repos/asf/hive/blob/12041d39/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
index 3633c03..1f75105 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/ObjectStore.java
@@ -92,6 +92,8 @@ import org.apache.hadoop.hive.metastore.api.FunctionType;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
 import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
 import org.apache.hadoop.hive.metastore.api.HiveObjectType;
+import org.apache.hadoop.hive.metastore.api.ISchema;
+import org.apache.hadoop.hive.metastore.api.ISchemaName;
 import org.apache.hadoop.hive.metastore.api.InvalidInputException;
 import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
@@ -121,7 +123,14 @@ import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
 import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
+import org.apache.hadoop.hive.metastore.api.SchemaCompatibility;
+import org.apache.hadoop.hive.metastore.api.SchemaType;
+import org.apache.hadoop.hive.metastore.api.SchemaValidation;
+import org.apache.hadoop.hive.metastore.api.SchemaVersion;
+import org.apache.hadoop.hive.metastore.api.SchemaVersionDescriptor;
+import org.apache.hadoop.hive.metastore.api.SchemaVersionState;
 import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.SerdeType;
 import org.apache.hadoop.hive.metastore.api.SkewedInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
@@ -155,6 +164,7 @@ import 
org.apache.hadoop.hive.metastore.model.MDelegationToken;
 import org.apache.hadoop.hive.metastore.model.MFieldSchema;
 import org.apache.hadoop.hive.metastore.model.MFunction;
 import org.apache.hadoop.hive.metastore.model.MGlobalPrivilege;
+import org.apache.hadoop.hive.metastore.model.MISchema;
 import org.apache.hadoop.hive.metastore.model.MMasterKey;
 import org.apache.hadoop.hive.metastore.model.MMetastoreDBProperties;
 import org.apache.hadoop.hive.metastore.model.MNotificationLog;
@@ -168,6 +178,7 @@ import 
org.apache.hadoop.hive.metastore.model.MPartitionPrivilege;
 import org.apache.hadoop.hive.metastore.model.MResourceUri;
 import org.apache.hadoop.hive.metastore.model.MRole;
 import org.apache.hadoop.hive.metastore.model.MRoleMap;
+import org.apache.hadoop.hive.metastore.model.MSchemaVersion;
 import org.apache.hadoop.hive.metastore.model.MSerDeInfo;
 import org.apache.hadoop.hive.metastore.model.MStorageDescriptor;
 import org.apache.hadoop.hive.metastore.model.MStringList;
@@ -1780,15 +1791,22 @@ public class ObjectStore implements RawStore, 
Configurable {
     if (ms == null) {
       throw new MetaException("Invalid SerDeInfo object");
     }
-    return new SerDeInfo(ms.getName(), ms.getSerializationLib(), 
convertMap(ms.getParameters()));
+    SerDeInfo serde =
+        new SerDeInfo(ms.getName(), ms.getSerializationLib(), 
convertMap(ms.getParameters()));
+    if (ms.getDescription() != null) serde.setDescription(ms.getDescription());
+    if (ms.getSerializerClass() != null) 
serde.setSerializerClass(ms.getSerializerClass());
+    if (ms.getDeserializerClass() != null) 
serde.setDeserializerClass(ms.getDeserializerClass());
+    if (ms.getSerdeType() > 0) 
serde.setSerdeType(SerdeType.findByValue(ms.getSerdeType()));
+    return serde;
   }
 
   private MSerDeInfo convertToMSerDeInfo(SerDeInfo ms) throws MetaException {
     if (ms == null) {
       throw new MetaException("Invalid SerDeInfo object");
     }
-    return new MSerDeInfo(ms.getName(), ms.getSerializationLib(), ms
-        .getParameters());
+    return new MSerDeInfo(ms.getName(), ms.getSerializationLib(), 
ms.getParameters(),
+        ms.getDescription(), ms.getSerializerClass(), 
ms.getDeserializerClass(),
+        ms.getSerdeType() == null ? 0 : ms.getSerdeType().getValue());
   }
 
   /**
@@ -9569,6 +9587,408 @@ public class ObjectStore implements RawStore, 
Configurable {
     }
   }
 
+  @Override
+  public void createISchema(ISchema schema) throws AlreadyExistsException, 
MetaException,
+      NoSuchObjectException {
+    boolean committed = false;
+    MISchema mSchema = convertToMISchema(schema);
+    try {
+      openTransaction();
+      if (getMISchema(schema.getDbName(), schema.getName()) != null) {
+        throw new AlreadyExistsException("Schema with name " + 
schema.getDbName() + "." +
+            schema.getName() + " already exists");
+      }
+      pm.makePersistent(mSchema);
+      committed = commitTransaction();
+    } finally {
+      if (!committed) rollbackTransaction();
+    }
+  }
+
+  @Override
+  public void alterISchema(ISchemaName schemaName, ISchema newSchema)
+      throws NoSuchObjectException, MetaException {
+    boolean committed = false;
+    try {
+      openTransaction();
+      MISchema oldMSchema = getMISchema(schemaName.getDbName(), 
schemaName.getSchemaName());
+      if (oldMSchema == null) {
+        throw new NoSuchObjectException("Schema " + schemaName + " does not 
exist");
+      }
+
+      // Don't support changing name or type
+      oldMSchema.setCompatibility(newSchema.getCompatibility().getValue());
+      oldMSchema.setValidationLevel(newSchema.getValidationLevel().getValue());
+      oldMSchema.setCanEvolve(newSchema.isCanEvolve());
+      if (newSchema.isSetSchemaGroup()) 
oldMSchema.setSchemaGroup(newSchema.getSchemaGroup());
+      if (newSchema.isSetDescription()) 
oldMSchema.setDescription(newSchema.getDescription());
+      committed = commitTransaction();
+    } finally {
+      if (!committed) rollbackTransaction();
+    }
+  }
+
+  @Override
+  public ISchema getISchema(ISchemaName schemaName) throws MetaException {
+    boolean committed = false;
+    try {
+      openTransaction();
+      ISchema schema = convertToISchema(getMISchema(schemaName.getDbName(), 
schemaName.getSchemaName()));
+      committed = commitTransaction();
+      return schema;
+    } finally {
+      if (!committed) rollbackTransaction();
+    }
+  }
+
+  private MISchema getMISchema(String dbName, String name) {
+    Query query = null;
+    try {
+      name = normalizeIdentifier(name);
+      dbName = normalizeIdentifier(dbName);
+      query = pm.newQuery(MISchema.class, "name == schemaName && db.name == 
dbname");
+      query.declareParameters("java.lang.String schemaName, java.lang.String 
dbname");
+      query.setUnique(true);
+      MISchema mSchema = (MISchema)query.execute(name, dbName);
+      pm.retrieve(mSchema);
+      return mSchema;
+    } finally {
+      if (query != null) query.closeAll();
+    }
+  }
+
+  @Override
+  public void dropISchema(ISchemaName schemaName) throws 
NoSuchObjectException, MetaException {
+    boolean committed = false;
+    try {
+      openTransaction();
+      MISchema mSchema = getMISchema(schemaName.getDbName(), 
schemaName.getSchemaName());
+      if (mSchema != null) {
+        pm.deletePersistentAll(mSchema);
+      } else {
+        throw new NoSuchObjectException("Schema " + schemaName + " does not 
exist");
+      }
+      committed = commitTransaction();
+    } finally {
+      if (!committed) rollbackTransaction();
+    }
+  }
+
+  @Override
+  public void addSchemaVersion(SchemaVersion schemaVersion)
+      throws AlreadyExistsException, NoSuchObjectException, MetaException {
+    boolean committed = false;
+    MSchemaVersion mSchemaVersion = convertToMSchemaVersion(schemaVersion);
+    try {
+      openTransaction();
+      // Make sure it doesn't already exist
+      if (getMSchemaVersion(schemaVersion.getSchema().getDbName(),
+          schemaVersion.getSchema().getSchemaName(), 
schemaVersion.getVersion()) != null) {
+        throw new AlreadyExistsException("Schema name " + 
schemaVersion.getSchema() +
+            " version " + schemaVersion.getVersion() + " already exists");
+      }
+      // Make sure the referenced Schema exists
+      if (getMISchema(schemaVersion.getSchema().getDbName(), 
schemaVersion.getSchema().getSchemaName()) == null) {
+        throw new NoSuchObjectException("Schema " + schemaVersion.getSchema() 
+ " does not exist");
+      }
+      pm.makePersistent(mSchemaVersion);
+      committed = commitTransaction();
+    } finally {
+      if (!committed) rollbackTransaction();;
+    }
+  }
+
+  @Override
+  public void alterSchemaVersion(SchemaVersionDescriptor version, 
SchemaVersion newVersion)
+      throws NoSuchObjectException, MetaException {
+    boolean committed = false;
+    try {
+      openTransaction();
+      MSchemaVersion oldMSchemaVersion = 
getMSchemaVersion(version.getSchema().getDbName(),
+          version.getSchema().getSchemaName(), version.getVersion());
+      if (oldMSchemaVersion == null) {
+        throw new NoSuchObjectException("No schema version " + version + " 
exists");
+      }
+
+      // We only support changing the SerDe mapping and the state.
+      if (newVersion.isSetSerDe()) 
oldMSchemaVersion.setSerDe(convertToMSerDeInfo(newVersion.getSerDe()));
+      if (newVersion.isSetState()) 
oldMSchemaVersion.setState(newVersion.getState().getValue());
+      committed = commitTransaction();
+    } finally {
+      if (!committed) commitTransaction();
+    }
+  }
+
+  @Override
+  public SchemaVersion getSchemaVersion(SchemaVersionDescriptor version) 
throws MetaException {
+    boolean committed = false;
+    try {
+      openTransaction();
+      SchemaVersion schemaVersion =
+          
convertToSchemaVersion(getMSchemaVersion(version.getSchema().getDbName(),
+              version.getSchema().getSchemaName(), version.getVersion()));
+      committed = commitTransaction();
+      return schemaVersion;
+    } finally {
+      if (!committed) rollbackTransaction();;
+    }
+  }
+
+  private MSchemaVersion getMSchemaVersion(String dbName, String schemaName, 
int version) {
+    Query query = null;
+    try {
+      dbName = normalizeIdentifier(dbName);
+      schemaName = normalizeIdentifier(schemaName);
+      query = pm.newQuery(MSchemaVersion.class,
+          "iSchema.name == schemaName && iSchema.db.name == dbName && version 
== schemaVersion");
+      query.declareParameters(
+          "java.lang.String schemaName, java.lang.String dbName, 
java.lang.Integer schemaVersion");
+      query.setUnique(true);
+      MSchemaVersion mSchemaVersion = 
(MSchemaVersion)query.execute(schemaName, dbName, version);
+      pm.retrieve(mSchemaVersion);
+      if (mSchemaVersion != null) {
+        pm.retrieveAll(mSchemaVersion.getCols());
+        if (mSchemaVersion.getSerDe() != null) 
pm.retrieve(mSchemaVersion.getSerDe());
+      }
+      return mSchemaVersion;
+    } finally {
+      if (query != null) query.closeAll();
+    }
+  }
+
+  @Override
+  public SchemaVersion getLatestSchemaVersion(ISchemaName schemaName) throws 
MetaException {
+    boolean committed = false;
+    Query query = null;
+    try {
+      openTransaction();
+      String name = normalizeIdentifier(schemaName.getSchemaName());
+      String dbName = normalizeIdentifier(schemaName.getDbName());
+      query = pm.newQuery(MSchemaVersion.class,
+          "iSchema.name == schemaName && iSchema.db.name == dbName");
+      query.declareParameters("java.lang.String schemaName, java.lang.String 
dbName");
+      query.setUnique(true);
+      query.setOrdering("version descending");
+      query.setRange(0, 1);
+      MSchemaVersion mSchemaVersion = (MSchemaVersion)query.execute(name, 
dbName);
+      pm.retrieve(mSchemaVersion);
+      if (mSchemaVersion != null) {
+        pm.retrieveAll(mSchemaVersion.getCols());
+        if (mSchemaVersion.getSerDe() != null) 
pm.retrieve(mSchemaVersion.getSerDe());
+      }
+      SchemaVersion version = mSchemaVersion == null ? null : 
convertToSchemaVersion(mSchemaVersion);
+      committed = commitTransaction();
+      return version;
+    } finally {
+      rollbackAndCleanup(committed, query);
+    }
+  }
+
+  @Override
+  public List<SchemaVersion> getAllSchemaVersion(ISchemaName schemaName) 
throws MetaException {
+    boolean committed = false;
+    Query query = null;
+    try {
+      openTransaction();
+      String name = normalizeIdentifier(schemaName.getSchemaName());
+      String dbName = normalizeIdentifier(schemaName.getDbName());
+      query = pm.newQuery(MSchemaVersion.class,
+          "iSchema.name == schemaName && iSchema.db.name == dbName");
+      query.declareParameters("java.lang.String schemaName, java.lang.String 
dbName");
+      query.setOrdering("version descending");
+      List<MSchemaVersion> mSchemaVersions = query.setParameters(name, 
dbName).executeList();
+      pm.retrieveAll(mSchemaVersions);
+      if (mSchemaVersions == null || mSchemaVersions.isEmpty()) return null;
+      List<SchemaVersion> schemaVersions = new 
ArrayList<>(mSchemaVersions.size());
+      for (MSchemaVersion mSchemaVersion : mSchemaVersions) {
+        pm.retrieveAll(mSchemaVersion.getCols());
+        if (mSchemaVersion.getSerDe() != null) 
pm.retrieve(mSchemaVersion.getSerDe());
+        schemaVersions.add(convertToSchemaVersion(mSchemaVersion));
+      }
+      committed = commitTransaction();
+      return schemaVersions;
+    } finally {
+      rollbackAndCleanup(committed, query);
+    }
+  }
+
+  @Override
+  public List<SchemaVersion> getSchemaVersionsByColumns(String colName, String 
colNamespace,
+                                                        String type) throws 
MetaException {
+    if (colName == null && colNamespace == null) {
+      // Don't allow a query that returns everything, it will blow stuff up.
+      throw new MetaException("You must specify column name or column 
namespace, else your query " +
+          "may be too large");
+    }
+    boolean committed = false;
+    Query query = null;
+    try {
+      openTransaction();
+      if (colName != null) colName = normalizeIdentifier(colName);
+      if (type != null) type = normalizeIdentifier(type);
+      Map<String, String> parameters = new HashMap<>(3);
+      StringBuilder sql = new StringBuilder("select SCHEMA_VERSION_ID from " +
+          "SCHEMA_VERSION, COLUMNS_V2 where SCHEMA_VERSION.CD_ID = 
COLUMNS_V2.CD_ID ");
+      if (colName != null) {
+        sql.append("and COLUMNS_V2.COLUMN_NAME = :colName ");
+        parameters.put("colName", colName);
+      }
+      if (colNamespace != null) {
+        sql.append("and COLUMNS_V2.COMMENT = :colComment ");
+        parameters.put("colComment", colNamespace);
+      }
+      if (type != null) {
+        sql.append("and COLUMNS_V2.TYPE_NAME = :colType ");
+        parameters.put("colType", type);
+      }
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("getSchemaVersionsByColumns going to execute query " + 
sql.toString());
+        LOG.debug("With parameters");
+        for (Map.Entry<String, String> p : parameters.entrySet()) {
+          LOG.debug(p.getKey() + " : " + p.getValue());
+        }
+      }
+      query = pm.newQuery("javax.jdo.query.SQL", sql.toString());
+      query.setClass(MSchemaVersion.class);
+      List<MSchemaVersion> mSchemaVersions = 
query.setNamedParameters(parameters).executeList();
+      if (mSchemaVersions == null || mSchemaVersions.isEmpty()) return 
Collections.emptyList();
+      pm.retrieveAll(mSchemaVersions);
+      List<SchemaVersion> schemaVersions = new 
ArrayList<>(mSchemaVersions.size());
+      for (MSchemaVersion mSchemaVersion : mSchemaVersions) {
+        pm.retrieveAll(mSchemaVersion.getCols());
+        if (mSchemaVersion.getSerDe() != null) 
pm.retrieve(mSchemaVersion.getSerDe());
+        schemaVersions.add(convertToSchemaVersion(mSchemaVersion));
+      }
+      committed = commitTransaction();
+      return schemaVersions;
+    } finally {
+      rollbackAndCleanup(committed, query);
+    }
+
+  }
+
+  @Override
+  public void dropSchemaVersion(SchemaVersionDescriptor version) throws 
NoSuchObjectException,
+      MetaException {
+    boolean committed = false;
+    try {
+      openTransaction();
+      MSchemaVersion mSchemaVersion = 
getMSchemaVersion(version.getSchema().getDbName(),
+          version.getSchema().getSchemaName(), version.getVersion());
+      if (mSchemaVersion != null) {
+        pm.deletePersistentAll(mSchemaVersion);
+      } else {
+        throw new NoSuchObjectException("Schema version " + version + "does 
not exist");
+      }
+      committed = commitTransaction();
+    } finally {
+      if (!committed) rollbackTransaction();
+    }
+  }
+
+  @Override
+  public SerDeInfo getSerDeInfo(String serDeName) throws 
NoSuchObjectException, MetaException {
+    boolean committed = false;
+    try {
+      openTransaction();
+      MSerDeInfo mSerDeInfo = getMSerDeInfo(serDeName);
+      if (mSerDeInfo == null) {
+        throw new NoSuchObjectException("No SerDe named " + serDeName);
+      }
+      SerDeInfo serde = convertToSerDeInfo(mSerDeInfo);
+      committed = commitTransaction();
+      return serde;
+    } finally {
+      if (!committed) rollbackTransaction();;
+    }
+  }
+
+  private MSerDeInfo getMSerDeInfo(String serDeName) throws MetaException {
+    Query query = null;
+    try {
+      query = pm.newQuery(MSerDeInfo.class, "name == serDeName");
+      query.declareParameters("java.lang.String serDeName");
+      query.setUnique(true);
+      MSerDeInfo mSerDeInfo = (MSerDeInfo)query.execute(serDeName);
+      pm.retrieve(mSerDeInfo);
+      return mSerDeInfo;
+    } finally {
+      if (query != null) query.closeAll();
+    }
+  }
+
+  @Override
+  public void addSerde(SerDeInfo serde) throws AlreadyExistsException, 
MetaException {
+    boolean committed = false;
+    try {
+      openTransaction();
+      if (getMSerDeInfo(serde.getName()) != null) {
+        throw new AlreadyExistsException("Serde with name " + serde.getName() 
+ " already exists");
+      }
+      MSerDeInfo mSerde = convertToMSerDeInfo(serde);
+      pm.makePersistent(mSerde);
+      committed = commitTransaction();
+    } finally {
+      if (!committed) rollbackTransaction();
+    }
+
+  }
+
+  private MISchema convertToMISchema(ISchema schema) throws 
NoSuchObjectException {
+    return new MISchema(schema.getSchemaType().getValue(),
+                        normalizeIdentifier(schema.getName()),
+                        getMDatabase(schema.getDbName()),
+                        schema.getCompatibility().getValue(),
+                        schema.getValidationLevel().getValue(),
+                        schema.isCanEvolve(),
+                        schema.isSetSchemaGroup() ? schema.getSchemaGroup() : 
null,
+                        schema.isSetDescription() ? schema.getDescription() : 
null);
+  }
+
+  private ISchema convertToISchema(MISchema mSchema) {
+    if (mSchema == null) return null;
+    ISchema schema = new 
ISchema(SchemaType.findByValue(mSchema.getSchemaType()),
+                                 mSchema.getName(),
+                                 mSchema.getDb().getName(),
+                                 
SchemaCompatibility.findByValue(mSchema.getCompatibility()),
+                                 
SchemaValidation.findByValue(mSchema.getValidationLevel()),
+                                 mSchema.getCanEvolve());
+    if (mSchema.getDescription() != null) 
schema.setDescription(mSchema.getDescription());
+    if (mSchema.getSchemaGroup() != null) 
schema.setSchemaGroup(mSchema.getSchemaGroup());
+    return schema;
+  }
+
+  private MSchemaVersion convertToMSchemaVersion(SchemaVersion schemaVersion) 
throws MetaException {
+    return new 
MSchemaVersion(getMISchema(normalizeIdentifier(schemaVersion.getSchema().getDbName()),
+          normalizeIdentifier(schemaVersion.getSchema().getSchemaName())),
+        schemaVersion.getVersion(),
+        schemaVersion.getCreatedAt(),
+        
createNewMColumnDescriptor(convertToMFieldSchemas(schemaVersion.getCols())),
+        schemaVersion.isSetState() ? schemaVersion.getState().getValue() : 0,
+        schemaVersion.isSetDescription() ? schemaVersion.getDescription() : 
null,
+        schemaVersion.isSetSchemaText() ? schemaVersion.getSchemaText() : null,
+        schemaVersion.isSetFingerprint() ? schemaVersion.getFingerprint() : 
null,
+        schemaVersion.isSetName() ? schemaVersion.getName() : null,
+        schemaVersion.isSetSerDe() ? 
convertToMSerDeInfo(schemaVersion.getSerDe()) : null);
+  }
+
+  private SchemaVersion convertToSchemaVersion(MSchemaVersion mSchemaVersion) 
throws MetaException {
+    if (mSchemaVersion == null) return null;
+    SchemaVersion schemaVersion = new SchemaVersion(
+        new ISchemaName(mSchemaVersion.getiSchema().getDb().getName(),
+            mSchemaVersion.getiSchema().getName()),
+        mSchemaVersion.getVersion(),
+        mSchemaVersion.getCreatedAt(),
+        convertToFieldSchemas(mSchemaVersion.getCols().getCols()));
+    if (mSchemaVersion.getState() > 0) 
schemaVersion.setState(SchemaVersionState.findByValue(mSchemaVersion.getState()));
+    if (mSchemaVersion.getDescription() != null) 
schemaVersion.setDescription(mSchemaVersion.getDescription());
+    if (mSchemaVersion.getSchemaText() != null) 
schemaVersion.setSchemaText(mSchemaVersion.getSchemaText());
+    if (mSchemaVersion.getFingerprint() != null) 
schemaVersion.setFingerprint(mSchemaVersion.getFingerprint());
+    if (mSchemaVersion.getName() != null) 
schemaVersion.setName(mSchemaVersion.getName());
+    if (mSchemaVersion.getSerDe() != null) 
schemaVersion.setSerDe(convertToSerDeInfo(mSchemaVersion.getSerDe()));
+    return schemaVersion;
+  }
+
   /**
    * This is a cleanup method which is used to rollback a active transaction
    * if the success flag is false and close the associated Query object. This 
method is used

http://git-wip-us.apache.org/repos/asf/hive/blob/12041d39/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
index 40a7497..b079f8b 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/RawStore.java
@@ -18,7 +18,10 @@
 
 package org.apache.hadoop.hive.metastore;
 
+import org.apache.hadoop.hive.metastore.api.AlterISchemaRequest;
 import org.apache.hadoop.hive.metastore.api.CreationMetadata;
+import org.apache.hadoop.hive.metastore.api.ISchemaName;
+import org.apache.hadoop.hive.metastore.api.SchemaVersionDescriptor;
 import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
 
 import java.lang.annotation.ElementType;
@@ -40,6 +43,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
+import org.apache.hadoop.hive.metastore.api.ISchema;
 import org.apache.hadoop.hive.metastore.api.InvalidInputException;
 import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
@@ -69,6 +73,8 @@ import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
 import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
+import org.apache.hadoop.hive.metastore.api.SchemaVersion;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.TableMeta;
 import org.apache.hadoop.hive.metastore.api.Type;
@@ -805,4 +811,133 @@ public interface RawStore extends Configurable {
 
   void dropWMTriggerToPoolMapping(String resourcePlanName, String triggerName, 
String poolPath)
       throws NoSuchObjectException, InvalidOperationException, MetaException;
+
+  /**
+   * Create a new ISchema.
+   * @param schema schema to create
+   * @throws AlreadyExistsException there's already a schema with this name
+   * @throws MetaException general database exception
+   */
+  void createISchema(ISchema schema) throws AlreadyExistsException, 
MetaException,
+      NoSuchObjectException;
+
+  /**
+   * Alter an existing ISchema.  This assumes the caller has already checked 
that such a schema
+   * exists.
+   * @param schemaName name of the schema
+   * @param newSchema new schema object
+   * @throws NoSuchObjectException no function with this name exists
+   * @throws MetaException general database exception
+   */
+  void alterISchema(ISchemaName schemaName, ISchema newSchema) throws 
NoSuchObjectException, MetaException;
+
+  /**
+   * Get an ISchema by name.
+   * @param schemaName schema descriptor
+   * @return ISchema
+   * @throws MetaException general database exception
+   */
+  ISchema getISchema(ISchemaName schemaName) throws MetaException;
+
+  /**
+   * Drop an ISchema.  This does not check whether there are valid versions of 
the schema in
+   * existence, it assumes the caller has already done that.
+   * @param schemaName schema descriptor
+   * @throws NoSuchObjectException no schema of this name exists
+   * @throws MetaException general database exception
+   */
+  void dropISchema(ISchemaName schemaName) throws NoSuchObjectException, 
MetaException;
+
+  /**
+   * Create a new version of an existing schema.
+   * @param schemaVersion version number
+   * @throws AlreadyExistsException a version of the schema with the same 
version number already
+   * exists.
+   * @throws InvalidObjectException the passed in SchemaVersion object has 
problems.
+   * @throws NoSuchObjectException no schema with the passed in name exists.
+   * @throws MetaException general database exception
+   */
+  void addSchemaVersion(SchemaVersion schemaVersion)
+      throws AlreadyExistsException, InvalidObjectException, 
NoSuchObjectException, MetaException;
+
+  /**
+   * Alter a schema version.  Note that the Thrift interface only supports 
changing the serde
+   * mapping and states.  This method does not guarantee it will check anymore 
than that.  This
+   * method does not understand the state transitions and just assumes that 
the new state it is
+   * passed is reasonable.
+   * @param version version descriptor for the schema
+   * @param newVersion altered SchemaVersion
+   * @throws NoSuchObjectException no such version of the named schema exists
+   * @throws MetaException general database exception
+   */
+  void alterSchemaVersion(SchemaVersionDescriptor version, SchemaVersion 
newVersion)
+      throws NoSuchObjectException, MetaException;
+
+  /**
+   * Get a specific schema version.
+   * @param version version descriptor for the schema
+   * @return the SchemaVersion
+   * @throws MetaException general database exception
+   */
+  SchemaVersion getSchemaVersion(SchemaVersionDescriptor version) throws 
MetaException;
+
+  /**
+   * Get the latest version of a schema.
+   * @param schemaName name of the schema
+   * @return latest version of the schema
+   * @throws MetaException general database exception
+   */
+  SchemaVersion getLatestSchemaVersion(ISchemaName schemaName) throws 
MetaException;
+
+  /**
+   * Get all of the versions of a schema
+   * @param schemaName name of the schema
+   * @return all versions of the schema
+   * @throws MetaException general database exception
+   */
+  List<SchemaVersion> getAllSchemaVersion(ISchemaName schemaName) throws 
MetaException;
+
+  /**
+   * Find all SchemaVersion objects that match a query.  The query will select 
all SchemaVersions
+   * that are equal to all of the non-null passed in arguments.  That is, if 
arguments
+   * colName='name', colNamespace=null, type='string' are passed in, then all 
schemas that have
+   * a column with colName 'name' and type 'string' will be returned.
+   * @param colName column name.  Null is ok, which will cause this field to 
not be used in the
+   *                query.
+   * @param colNamespace column namespace.   Null is ok, which will cause this 
field to not be
+   *                     used in the query.
+   * @param type column type.   Null is ok, which will cause this field to not 
be used in the
+   *             query.
+   * @return List of all SchemaVersions that match.  Note that there is no 
expectation that these
+   * SchemaVersions derive from the same ISchema.  The list will be empty if 
there are no
+   * matching SchemaVersions.
+   * @throws MetaException general database exception
+   */
+  List<SchemaVersion> getSchemaVersionsByColumns(String colName, String 
colNamespace, String type)
+      throws MetaException;
+
+  /**
+   * Drop a version of the schema.
+   * @param version version descriptor for the schema
+   * @throws NoSuchObjectException no such version of the named schema exists
+   * @throws MetaException general database exception
+   */
+  void dropSchemaVersion(SchemaVersionDescriptor version) throws 
NoSuchObjectException, MetaException;
+
+  /**
+   * Get serde information
+   * @param serDeName name of the SerDe
+   * @return the SerDe, or null if there is no such serde
+   * @throws NoSuchObjectException no serde with this name exists
+   * @throws MetaException general database exception
+   */
+  SerDeInfo getSerDeInfo(String serDeName) throws NoSuchObjectException, 
MetaException;
+
+  /**
+   * Add a serde
+   * @param serde serde to add
+   * @throws AlreadyExistsException a serde of this name already exists
+   * @throws MetaException general database exception
+   */
+  void addSerde(SerDeInfo serde) throws AlreadyExistsException, MetaException;
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/12041d39/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
index f58ba04..d28b196 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/cache/CachedStore.java
@@ -18,6 +18,8 @@
 package org.apache.hadoop.hive.metastore.cache;
 
 import org.apache.hadoop.hive.metastore.api.CreationMetadata;
+import org.apache.hadoop.hive.metastore.api.ISchemaName;
+import org.apache.hadoop.hive.metastore.api.SchemaVersionDescriptor;
 import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
 
 import java.nio.ByteBuffer;
@@ -59,6 +61,7 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.FileMetadataExprType;
 import org.apache.hadoop.hive.metastore.api.Function;
 import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
+import org.apache.hadoop.hive.metastore.api.ISchema;
 import org.apache.hadoop.hive.metastore.api.InvalidInputException;
 import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
 import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
@@ -91,6 +94,8 @@ import org.apache.hadoop.hive.metastore.api.SQLForeignKey;
 import org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint;
 import org.apache.hadoop.hive.metastore.api.SQLPrimaryKey;
 import org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint;
+import org.apache.hadoop.hive.metastore.api.SchemaVersion;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
 import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.metastore.api.Table;
 import org.apache.hadoop.hive.metastore.api.TableMeta;
@@ -2500,6 +2505,78 @@ public class CachedStore implements RawStore, 
Configurable {
     return rawStore.getPartitionColStatsForDatabase(dbName);
   }
 
+  // TODO - not clear if we should cache these or not.  For now, don't bother
+  @Override
+  public void createISchema(ISchema schema)
+      throws AlreadyExistsException, NoSuchObjectException, MetaException {
+    rawStore.createISchema(schema);
+  }
+
+  @Override
+  public void alterISchema(ISchemaName schemaName, ISchema newSchema)
+      throws NoSuchObjectException, MetaException {
+    rawStore.alterISchema(schemaName, newSchema);
+  }
+
+  @Override
+  public ISchema getISchema(ISchemaName schemaName) throws MetaException {
+    return rawStore.getISchema(schemaName);
+  }
+
+  @Override
+  public void dropISchema(ISchemaName schemaName) throws 
NoSuchObjectException, MetaException {
+    rawStore.dropISchema(schemaName);
+  }
+
+  @Override
+  public void addSchemaVersion(SchemaVersion schemaVersion) throws
+      AlreadyExistsException, InvalidObjectException, NoSuchObjectException, 
MetaException {
+    rawStore.addSchemaVersion(schemaVersion);
+  }
+
+  @Override
+  public void alterSchemaVersion(SchemaVersionDescriptor version, 
SchemaVersion newVersion) throws
+      NoSuchObjectException, MetaException {
+    rawStore.alterSchemaVersion(version, newVersion);
+  }
+
+  @Override
+  public SchemaVersion getSchemaVersion(SchemaVersionDescriptor version) 
throws MetaException {
+    return rawStore.getSchemaVersion(version);
+  }
+
+  @Override
+  public SchemaVersion getLatestSchemaVersion(ISchemaName schemaName) throws 
MetaException {
+    return rawStore.getLatestSchemaVersion(schemaName);
+  }
+
+  @Override
+  public List<SchemaVersion> getAllSchemaVersion(ISchemaName schemaName) 
throws MetaException {
+    return rawStore.getAllSchemaVersion(schemaName);
+  }
+
+  @Override
+  public List<SchemaVersion> getSchemaVersionsByColumns(String colName, String 
colNamespace,
+                                                        String type) throws 
MetaException {
+    return rawStore.getSchemaVersionsByColumns(colName, colNamespace, type);
+  }
+
+  @Override
+  public void dropSchemaVersion(SchemaVersionDescriptor version) throws 
NoSuchObjectException,
+      MetaException {
+    rawStore.dropSchemaVersion(version);
+  }
+
+  @Override
+  public SerDeInfo getSerDeInfo(String serDeName) throws 
NoSuchObjectException, MetaException {
+    return rawStore.getSerDeInfo(serDeName);
+  }
+
+  @Override
+  public void addSerde(SerDeInfo serde) throws AlreadyExistsException, 
MetaException {
+    rawStore.addSerde(serde);
+  }
+
   public RawStore getRawStore() {
     return rawStore;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/12041d39/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/DatabaseBuilder.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/DatabaseBuilder.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/DatabaseBuilder.java
index 7627d89..01693ec 100644
--- 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/DatabaseBuilder.java
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/DatabaseBuilder.java
@@ -73,7 +73,7 @@ public class DatabaseBuilder {
     return this;
   }
 
-  public Database build() throws TException {
+  public Database build() throws MetaException {
     if (name == null) throw new MetaException("You must name the database");
     Database db = new Database(name, description, location, params);
     try {

http://git-wip-us.apache.org/repos/asf/hive/blob/12041d39/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/ISchemaBuilder.java
----------------------------------------------------------------------
diff --git 
a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/ISchemaBuilder.java
 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/ISchemaBuilder.java
new file mode 100644
index 0000000..32a84ac
--- /dev/null
+++ 
b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/client/builder/ISchemaBuilder.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore.client.builder;
+
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.ISchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.SchemaCompatibility;
+import org.apache.hadoop.hive.metastore.api.SchemaType;
+import org.apache.hadoop.hive.metastore.api.SchemaValidation;
+
+public class ISchemaBuilder {
+  private SchemaType schemaType; // required
+  private String name; // required
+  private String dbName; // required
+  private SchemaCompatibility compatibility; // required
+  private SchemaValidation validationLevel; // required
+  private boolean canEvolve; // required
+  private String schemaGroup; // optional
+  private String description; // optional
+
+  public ISchemaBuilder() {
+    compatibility = SchemaCompatibility.BACKWARD;
+    validationLevel = SchemaValidation.ALL;
+    canEvolve = true;
+    dbName = Warehouse.DEFAULT_DATABASE_NAME;
+  }
+
+  public ISchemaBuilder setSchemaType(SchemaType schemaType) {
+    this.schemaType = schemaType;
+    return this;
+  }
+
+  public ISchemaBuilder setName(String name) {
+    this.name = name;
+    return this;
+  }
+
+  public ISchemaBuilder setDbName(String dbName) {
+    this.dbName = dbName;
+    return this;
+  }
+
+  public ISchemaBuilder setCompatibility(SchemaCompatibility compatibility) {
+    this.compatibility = compatibility;
+    return this;
+  }
+
+  public ISchemaBuilder setValidationLevel(SchemaValidation validationLevel) {
+    this.validationLevel = validationLevel;
+    return this;
+  }
+
+  public ISchemaBuilder setCanEvolve(boolean canEvolve) {
+    this.canEvolve = canEvolve;
+    return this;
+  }
+
+  public ISchemaBuilder setSchemaGroup(String schemaGroup) {
+    this.schemaGroup = schemaGroup;
+    return this;
+  }
+
+  public ISchemaBuilder setDescription(String description) {
+    this.description = description;
+    return this;
+  }
+
+  public ISchema build() throws MetaException {
+    if (schemaType == null || name == null) {
+      throw new MetaException("You must provide a schemaType and name");
+    }
+    ISchema iSchema =
+        new ISchema(schemaType, name, dbName, compatibility, validationLevel, 
canEvolve);
+    if (schemaGroup != null) iSchema.setSchemaGroup(schemaGroup);
+    if (description != null) iSchema.setDescription(description);
+    return iSchema;
+  }
+}

Reply via email to