This is an automated email from the ASF dual-hosted git repository.

daijy pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 10b6d70  HIVE-21753: HiveMetastore authorization to enable use of 
HiveAuthorizer implementation (Ramesh Mani, reviewed by Daniel Dai)
10b6d70 is described below

commit 10b6d70da1442cccf533bc97f56a622ec9f39661
Author: Daniel Dai <dai...@gmail.com>
AuthorDate: Sun Jun 2 14:29:43 2019 -0700

    HIVE-21753: HiveMetastore authorization to enable use of HiveAuthorizer 
implementation (Ramesh Mani, reviewed by Daniel Dai)
---
 .../plugin/HiveAuthzSessionContext.java            |   2 +-
 .../plugin/fallback/FallbackHiveAuthorizer.java    |   2 +-
 .../metastore/HiveMetaStoreAuthorizableEvent.java  |  70 +++++
 .../plugin/metastore/HiveMetaStoreAuthorizer.java  | 316 +++++++++++++++++++++
 .../plugin/metastore/HiveMetaStoreAuthzInfo.java   | 107 +++++++
 .../plugin/metastore/events/AddPartitionEvent.java | 112 ++++++++
 .../metastore/events/AlterDatabaseEvent.java       | 111 ++++++++
 .../metastore/events/AlterPartitionEvent.java      | 111 ++++++++
 .../plugin/metastore/events/AlterTableEvent.java   | 119 ++++++++
 .../metastore/events/CreateDatabaseEvent.java      |  97 +++++++
 .../plugin/metastore/events/CreateTableEvent.java  |  95 +++++++
 .../plugin/metastore/events/DropDatabaseEvent.java |  90 ++++++
 .../metastore/events/DropPartitionEvent.java       |  98 +++++++
 .../plugin/metastore/events/DropTableEvent.java    |  86 ++++++
 .../metastore/events/LoadPartitionDoneEvent.java   |  74 +++++
 .../plugin/metastore/DummyHiveAuthorizer.java      |  72 +++++
 .../metastore/DummyHiveAuthorizerFactory.java      |  40 +++
 .../metastore/TestHiveMetaStoreAuthorizer.java     | 266 +++++++++++++++++
 18 files changed, 1866 insertions(+), 2 deletions(-)

diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzSessionContext.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzSessionContext.java
index a26febf..30b069e 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzSessionContext.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthzSessionContext.java
@@ -29,7 +29,7 @@ import 
org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving;
 public final class HiveAuthzSessionContext {
 
   public enum CLIENT_TYPE {
-    HIVESERVER2, HIVECLI
+    HIVESERVER2, HIVECLI, HIVEMETASTORE, OTHER
   };
 
   public static class Builder {
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/fallback/FallbackHiveAuthorizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/fallback/FallbackHiveAuthorizer.java
index 10cf4d4..744241f 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/fallback/FallbackHiveAuthorizer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/fallback/FallbackHiveAuthorizer.java
@@ -51,7 +51,7 @@ public class FallbackHiveAuthorizer extends 
AbstractHiveAuthorizer {
   private final HiveAuthenticationProvider authenticator;
   private String[] admins = null;
 
-  FallbackHiveAuthorizer(HiveConf hiveConf, HiveAuthenticationProvider 
hiveAuthenticator,
+  public FallbackHiveAuthorizer(HiveConf hiveConf, HiveAuthenticationProvider 
hiveAuthenticator,
                                 HiveAuthzSessionContext ctx) {
     this.authenticator = hiveAuthenticator;
     this.sessionCtx = applyTestSettings(ctx, hiveConf);
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizableEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizableEvent.java
new file mode 100644
index 0000000..d3d475a
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizableEvent.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore;
+
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+
+import java.util.Collections;
+import java.util.List;
+
+/*
+HiveMetaStoreAuthorizableEvent: Abstract class for getting the MetaStore Event 
context for HiveMetaStore Authorization
+ */
+
+public abstract class HiveMetaStoreAuthorizableEvent {
+  protected final PreEventContext preEventContext;
+
+  protected HiveMetaStoreAuthorizableEvent(PreEventContext preEventContext) {
+    this.preEventContext = preEventContext;
+  }
+
+  public abstract HiveMetaStoreAuthzInfo getAuthzContext();
+
+  protected String getSdLocation(StorageDescriptor sd) {
+    return sd == null ? "" : sd.getLocation();
+  }
+
+  protected List<String> getCommandParams(String cmdStr, String objectName) {
+    String commandString = (objectName != null) ? cmdStr + " " + objectName : 
cmdStr;
+
+    return Collections.singletonList(commandString);
+  }
+
+  protected HivePrivilegeObject getHivePrivilegeObject(Database database) {
+    return new 
HivePrivilegeObject(HivePrivilegeObject.HivePrivilegeObjectType.DATABASE, 
database.getName(), null);
+  }
+
+  protected HivePrivilegeObject getHivePrivilegeObject(Table table) {
+    return new 
HivePrivilegeObject(HivePrivilegeObject.HivePrivilegeObjectType.TABLE_OR_VIEW, 
table.getDbName(), table.getTableName());
+  }
+
+  protected HivePrivilegeObject getHivePrivilegeObjectDfsUri(String uri) {
+    return new 
HivePrivilegeObject(HivePrivilegeObject.HivePrivilegeObjectType.DFS_URI, null, 
uri);
+  }
+
+  protected HivePrivilegeObject getHivePrivilegeObjectLocalUri(String uri) {
+    return new 
HivePrivilegeObject(HivePrivilegeObject.HivePrivilegeObjectType.LOCAL_URI, 
null, uri);
+  }
+
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java
new file mode 100644
index 0000000..50c7fc6
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthorizer.java
@@ -0,0 +1,316 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.MetaStorePreEventListener;
+import org.apache.hadoop.hive.metastore.TableType;
+import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreDropTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import org.apache.hadoop.hive.ql.security.HiveMetastoreAuthenticationProvider;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events.*;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import java.io.IOException;
+import java.util.List;
+
+/**
+ * HiveMetaStoreAuthorizer :  Do authorization checks on MetaStore Events in 
MetaStorePreEventListener
+ */
+
+public class HiveMetaStoreAuthorizer extends MetaStorePreEventListener {
+  private static final Log    LOG              = 
LogFactory.getLog(HiveMetaStoreAuthorizer.class);
+
+  private static final ThreadLocal<Configuration> tConfig = new 
ThreadLocal<Configuration>() {
+    @Override
+    protected Configuration initialValue() {
+      return new HiveConf(HiveMetaStoreAuthorizer.class);
+    }
+  };
+
+  private static final ThreadLocal<HiveMetastoreAuthenticationProvider> 
tAuthenticator = new ThreadLocal<HiveMetastoreAuthenticationProvider>() {
+    @Override
+    protected HiveMetastoreAuthenticationProvider initialValue() {
+      try {
+        return (HiveMetastoreAuthenticationProvider) 
HiveUtils.getAuthenticator(tConfig.get(), 
HiveConf.ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER);
+      } catch (HiveException excp) {
+        throw new IllegalStateException("Authentication provider instantiation 
failure", excp);
+      }
+    }
+  };
+
+  public HiveMetaStoreAuthorizer(Configuration config) {
+    super(config);
+  }
+
+  @Override
+  public final void onEvent(PreEventContext preEventContext) throws 
MetaException, NoSuchObjectException, InvalidOperationException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> HiveMetaStoreAuthorizer.onEvent(): EventType=" + 
preEventContext.getEventType());
+    }
+
+    HiveMetaStoreAuthzInfo authzContext = buildAuthzContext(preEventContext);
+
+    if (!skipAuthorization(authzContext)) {
+      try {
+        HiveConf              hiveConf          = new 
HiveConf(super.getConf(), HiveConf.class);
+        HiveAuthorizerFactory authorizerFactory = 
HiveUtils.getAuthorizerFactory(hiveConf, 
HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
+
+        if (authorizerFactory != null) {
+          HiveMetastoreAuthenticationProvider authenticator = 
tAuthenticator.get();
+
+          authenticator.setConf(hiveConf);
+
+          HiveAuthzSessionContext.Builder authzContextBuilder = new 
HiveAuthzSessionContext.Builder();
+
+          
authzContextBuilder.setClientType(HiveAuthzSessionContext.CLIENT_TYPE.HIVEMETASTORE);
+          authzContextBuilder.setSessionString("HiveMetaStore");
+
+          HiveAuthzSessionContext authzSessionContext = 
authzContextBuilder.build();
+
+          HiveAuthorizer hiveAuthorizer = 
authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), 
hiveConf, authenticator, authzSessionContext);
+
+          checkPrivileges(authzContext, hiveAuthorizer);
+        }
+      } catch (Exception e) {
+        LOG.error("HiveMetaStoreAuthorizer.onEvent(): failed", e);
+        throw new MetaException(e.getMessage());
+      }
+    }
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== HiveMetaStoreAuthorizer.onEvent(): EventType=" + 
preEventContext.getEventType());
+    }
+  }
+
+  HiveMetaStoreAuthzInfo buildAuthzContext(PreEventContext preEventContext) 
throws MetaException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> HiveMetaStoreAuthorizer.buildAuthzContext(): EventType=" 
+ preEventContext.getEventType());
+    }
+
+    HiveMetaStoreAuthorizableEvent authzEvent = null;
+
+    if (preEventContext != null) {
+
+      switch (preEventContext.getEventType()) {
+        case CREATE_DATABASE:
+          authzEvent = new CreateDatabaseEvent(preEventContext);
+          break;
+        case ALTER_DATABASE:
+          authzEvent = new AlterDatabaseEvent(preEventContext);
+          break;
+        case DROP_DATABASE:
+          authzEvent = new DropDatabaseEvent(preEventContext);
+          break;
+        case CREATE_TABLE:
+          authzEvent = new CreateTableEvent(preEventContext);
+          if (isViewOperation(preEventContext) && 
(!isSuperUser(getCurrentUser(authzEvent)))) {
+            throw new MetaException(getErrorMessage("CREATE_VIEW", 
getCurrentUser(authzEvent)));
+          }
+          break;
+        case ALTER_TABLE:
+          authzEvent = new CreateTableEvent(preEventContext);
+          if (isViewOperation(preEventContext) && 
(!isSuperUser(getCurrentUser(authzEvent)))) {
+            throw new MetaException(getErrorMessage("ALTER_VIEW", 
getCurrentUser(authzEvent)));
+          }
+          break;
+        case DROP_TABLE:
+          authzEvent = new CreateTableEvent(preEventContext);
+          if (isViewOperation(preEventContext) && 
(!isSuperUser(getCurrentUser(authzEvent)))) {
+            throw new MetaException(getErrorMessage("DROP_VIEW", 
getCurrentUser(authzEvent)));
+          }
+          break;
+        case ADD_PARTITION:
+          authzEvent = new AddPartitionEvent(preEventContext);
+          break;
+        case ALTER_PARTITION:
+          authzEvent = new AlterPartitionEvent(preEventContext);
+          break;
+        case LOAD_PARTITION_DONE:
+          authzEvent = new LoadPartitionDoneEvent(preEventContext);
+          break;
+        case DROP_PARTITION:
+          authzEvent = new DropPartitionEvent(preEventContext);
+          break;
+        case AUTHORIZATION_API_CALL:
+        case READ_ISCHEMA:
+        case CREATE_ISCHEMA:
+        case DROP_ISCHEMA:
+        case ALTER_ISCHEMA:
+        case ADD_SCHEMA_VERSION:
+        case ALTER_SCHEMA_VERSION:
+        case DROP_SCHEMA_VERSION:
+        case READ_SCHEMA_VERSION:
+        case CREATE_CATALOG:
+        case ALTER_CATALOG:
+        case DROP_CATALOG:
+          if (!isSuperUser(getCurrentUser())) {
+            throw new MetaException(getErrorMessage(preEventContext, 
getCurrentUser()));
+          }
+          break;
+        default:
+          break;
+       }
+    }
+
+    HiveMetaStoreAuthzInfo ret = authzEvent != null ? 
authzEvent.getAuthzContext() : null;
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== HiveMetaStoreAuthorizer.buildAuthzContext(): EventType=" 
+ preEventContext.getEventType() + "; ret=" + ret);
+    }
+
+    return ret;
+  }
+
+  boolean isSuperUser(String userName) {
+    Configuration conf      = getConf();
+    String        ipAddress = HiveMetaStore.HMSHandler.getIPAddress();
+    return (MetaStoreServerUtils.checkUserHasHostProxyPrivileges(userName, 
conf, ipAddress));
+  }
+
+  boolean isViewOperation(PreEventContext preEventContext) {
+    boolean ret = false;
+
+    PreEventContext.PreEventType  preEventType = 
preEventContext.getEventType();
+
+    switch (preEventType) {
+      case CREATE_TABLE:
+        PreCreateTableEvent preCreateTableEvent = (PreCreateTableEvent) 
preEventContext;
+        Table table = preCreateTableEvent.getTable();
+        ret         = isViewType(table);
+        break;
+      case ALTER_TABLE:
+        PreAlterTableEvent preAlterTableEvent  = (PreAlterTableEvent) 
preEventContext;
+        Table inTable  = preAlterTableEvent.getOldTable();
+        Table outTable = preAlterTableEvent.getNewTable();
+        ret            = (isViewType(inTable) || isViewType(outTable));
+        break;
+      case  DROP_TABLE:
+        PreDropTableEvent preDropTableEvent = (PreDropTableEvent) 
preEventContext;
+        Table droppedTable = preDropTableEvent.getTable();
+        ret                = isViewType(droppedTable);
+        break;
+    }
+
+    return ret;
+  }
+
+  private void checkPrivileges(final HiveMetaStoreAuthzInfo authzContext, 
HiveAuthorizer authorizer) throws MetaException {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> HiveMetaStoreAuthorizer.checkPrivileges(): authzContext=" 
+ authzContext + ", authorizer=" + authorizer);
+    }
+
+    HiveOperationType         hiveOpType       = 
authzContext.getOperationType();
+    List<HivePrivilegeObject> inputHObjs       = authzContext.getInputHObjs();
+    List<HivePrivilegeObject> outputHObjs      = authzContext.getOutputHObjs();
+    HiveAuthzContext          hiveAuthzContext = 
authzContext.getHiveAuthzContext();
+
+    try {
+      authorizer.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, 
hiveAuthzContext);
+    } catch (Exception e) {
+      throw new MetaException(e.getMessage());
+    }
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== HiveMetaStoreAuthorizer.checkPrivileges(): authzContext=" 
+ authzContext + ", authorizer=" + authorizer);
+    }
+  }
+
+  private boolean skipAuthorization(HiveMetaStoreAuthzInfo authzContext) {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> HiveMetaStoreAuthorizer.skipAuthorization(): 
authzContext=" + authzContext);
+    }
+
+    boolean ret = false;
+
+    if (authzContext == null) {
+      ret = true;
+    } else {
+
+      UserGroupInformation ugi = authzContext.getUGI();
+
+      if (ugi == null) {
+        ret = true;
+      } else {
+        ret = isSuperUser(ugi.getShortUserName());
+      }
+    }
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== HiveMetaStoreAuthorizer.skipAuthorization(): 
authzContext=" + authzContext + "; ret=" + ret);
+    }
+
+    return ret;
+  }
+
+  private  boolean isViewType(Table table) {
+    boolean ret = false;
+
+    String tableType = table.getTableType();
+
+    if (TableType.MATERIALIZED_VIEW.name().equals(tableType) || 
TableType.VIRTUAL_VIEW.name().equals(tableType)) {
+      ret = true;
+    }
+
+    return ret;
+  }
+
+  private String getErrorMessage(PreEventContext preEventContext, String user) 
{
+    String err = "Operation type " + preEventContext.getEventType().name() + " 
not allowed for user:" + user;
+    return err;
+  }
+
+  private String getErrorMessage(String eventType, String user) {
+    String err = "Operation type " + eventType + " not allowed for user:" + 
user;
+    return err;
+  }
+
+  private String getCurrentUser() {
+    try {
+      return UserGroupInformation.getCurrentUser().getShortUserName();
+    } catch (IOException excp) {
+    }
+    return null;
+  }
+
+  private String getCurrentUser(HiveMetaStoreAuthorizableEvent 
authorizableEvent) {
+    return authorizableEvent.getAuthzContext().getUGI().getShortUserName();
+  }
+}
+
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthzInfo.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthzInfo.java
new file mode 100644
index 0000000..a372c78
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/HiveMetaStoreAuthzInfo.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore;
+
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+/*
+HiveMetaStoreAuthzInfo : Context for HiveMetaStore authorization done by 
HiveMetaStoreAuthorizer
+ */
+
+public class HiveMetaStoreAuthzInfo {
+  private final PreEventContext preEventContext;
+  private final HiveOperationType operationType;
+  private final List<HivePrivilegeObject> inputHObjs;
+  private final List<HivePrivilegeObject> outputHObjs;
+  private final String                    commandString;
+  private final HiveAuthzContext hiveAuthzContext;
+
+  public HiveMetaStoreAuthzInfo(PreEventContext preEventContext, 
HiveOperationType operationType, List<HivePrivilegeObject> inputHObjs, 
List<HivePrivilegeObject> outputHObjs, String commandString) {
+    this.preEventContext  = preEventContext;
+    this.operationType    = operationType;
+    this.inputHObjs       = inputHObjs;
+    this.outputHObjs      = outputHObjs;
+    this.commandString    = commandString;
+    this.hiveAuthzContext = createHiveAuthzContext();
+  }
+
+    public HiveOperationType getOperationType() {
+        return operationType;
+    }
+
+    public List<HivePrivilegeObject> getInputHObjs() { return inputHObjs; }
+
+    public List<HivePrivilegeObject> getOutputHObjs() { return outputHObjs; }
+
+    public String getCommandString() {
+        return commandString;
+    }
+
+    public HiveAuthzContext getHiveAuthzContext() { return hiveAuthzContext; }
+
+    public PreEventContext getPreEventContext(){
+      return preEventContext;
+    }
+
+    public UserGroupInformation getUGI() {
+      try {
+        return UserGroupInformation.getCurrentUser();
+        } catch (IOException excp) {
+      }
+      return null;
+    }
+
+    @Override
+    public String toString() {
+      StringBuilder sb = new StringBuilder();
+      sb.append("HiveMetaStoreAuthzInfo= ").append("{");
+      sb.append("eventType=").append(preEventContext.getEventType().name());
+      sb.append(", operationType=").append(operationType.name());
+      sb.append(", commandString=" ).append(commandString);
+      sb.append(", inputHObjs=").append(inputHObjs);
+      sb.append(", outputHObjs=").append(outputHObjs);
+      sb.append(" }");
+      return sb.toString();
+    }
+
+    private HiveAuthzContext createHiveAuthzContext() {
+      HiveAuthzContext.Builder builder = new HiveAuthzContext.Builder();
+      builder.setCommandString(commandString);
+
+      // TODO: refer to SessionManager/HiveSessionImpl for details on getting 
ipAddress and forwardedAddresses
+      builder.setForwardedAddresses(new ArrayList<>());
+
+      String ipAddress = HiveMetaStore.HMSHandler.getIPAddress();
+
+      builder.setUserIpAddress(ipAddress);
+
+      HiveAuthzContext ret = builder.build();
+
+      return ret;
+    }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AddPartitionEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AddPartitionEvent.java
new file mode 100644
index 0000000..1cb33c0
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AddPartitionEvent.java
@@ -0,0 +1,112 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.events.PreAddPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/*
+ Authorizable Event for HiveMetaStore operation  AddPartition
+ */
+
+public class AddPartitionEvent extends HiveMetaStoreAuthorizableEvent {
+  private static final Log LOG = LogFactory.getLog(AddPartitionEvent.class);
+
+  private String COMMAND_STR = "alter table %s add partition %s";
+
+  public AddPartitionEvent(PreEventContext preEventContext) {
+    super(preEventContext);
+  }
+
+  @Override
+  public HiveMetaStoreAuthzInfo getAuthzContext() {
+    HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, 
HiveOperationType.ALTERTABLE_ADDPARTS, getInputHObjs(), getOutputHObjs(), 
COMMAND_STR);
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getInputHObjs() {
+    return Collections.emptyList();
+  }
+
+  private List<HivePrivilegeObject> getOutputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> AddPartitionEvent.getOutputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret   = new ArrayList<>();
+    PreAddPartitionEvent      event = (PreAddPartitionEvent) preEventContext;
+    Table                     table = event.getTable();
+
+    ret.add(getHivePrivilegeObject(table));
+
+    List<Partition> partitions = event.getPartitions();
+
+    if (partitions != null) {
+      for (Partition partition : partitions) {
+        String uri = getSdLocation(partition.getSd());
+
+        if (StringUtils.isNotEmpty(uri)) {
+          ret.add(getHivePrivilegeObjectDfsUri(uri));
+        }
+      }
+    }
+
+    COMMAND_STR = buildCommandString(COMMAND_STR, table);
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== AddPartitionEvent.getOutputHObjs(): ret=" + ret );
+    }
+
+    return ret;
+  }
+
+  private String buildCommandString(String cmdStr, Table tbl) {
+    String ret = cmdStr;
+
+    if (tbl != null) {
+      String tblName     = (StringUtils.isNotEmpty(tbl.getTableName())? " " + 
tbl.getTableName() : "");
+
+      StringBuilder partitions  = new StringBuilder();
+      List<FieldSchema> fieldSchemas = tbl.getPartitionKeys();
+      for (FieldSchema fieldSchema : fieldSchemas) {
+         partitions.append(" ");
+         partitions.append(fieldSchema.getName());
+      }
+
+      ret = String.format(cmdStr, tblName, partitions.toString());
+    }
+    return ret;
+  }
+}
\ No newline at end of file
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterDatabaseEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterDatabaseEvent.java
new file mode 100644
index 0000000..e78f27b
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterDatabaseEvent.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.events.PreAlterDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/*
+ Authorizable Event for HiveMetaStore operation  AlterDatabase
+ */
+
+public class AlterDatabaseEvent extends HiveMetaStoreAuthorizableEvent {
+  private static final Log LOG = LogFactory.getLog(AlterDatabaseEvent.class);
+
+  private String COMMAND_STR = "alter database";
+
+  public AlterDatabaseEvent(PreEventContext preEventContext) {
+    super(preEventContext);
+  }
+
+  @Override
+  public HiveMetaStoreAuthzInfo getAuthzContext() {
+    HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, 
getOperationType(), getInputHObjs(), getOutputHObjs(), COMMAND_STR);
+
+    return ret;
+  }
+
+  private HiveOperationType getOperationType() {
+    PreAlterDatabaseEvent event = (PreAlterDatabaseEvent) preEventContext;
+
+    Database database    = event.getNewDatabase();
+    Database oldDatabase = event.getOldDatabase();
+    String   newUri      = (database != null) ? database.getLocationUri(): "";
+    String   oldUri      = (oldDatabase != null) ? 
oldDatabase.getLocationUri(): "";
+
+    return StringUtils.equals(oldUri, newUri) ? 
HiveOperationType.ALTERDATABASE : HiveOperationType.ALTERDATABASE_LOCATION;
+  }
+
+  private List<HivePrivilegeObject> getInputHObjs() {
+    return Collections.emptyList();
+  }
+
+  private List<HivePrivilegeObject> getOutputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> AlterDatabaseEvent.getOutputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret           = new ArrayList<>();
+    PreAlterDatabaseEvent     event         = (PreAlterDatabaseEvent) 
preEventContext;
+    Database                  database      = event.getNewDatabase();
+
+    if (database != null) {
+      ret.add(getHivePrivilegeObject(database));
+
+      String newUri = (database != null) ? database.getLocationUri(): "";
+
+      if (StringUtils.isNotEmpty(newUri)) {
+        ret.add(getHivePrivilegeObjectDfsUri(newUri));
+      }
+
+      COMMAND_STR = buildCommandString(COMMAND_STR, database);
+
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("<== AlterDatabaseEvent.getOutputHObjs(): ret=" + ret);
+      }
+    }
+
+   return ret;
+
+  }
+
+  private String buildCommandString(String cmdStr, Database db) {
+    String ret = cmdStr;
+
+    if (db != null) {
+      String dbName = db.getName();
+      ret           = ret + (StringUtils.isNotEmpty(dbName) ? " " + dbName : 
"");
+    }
+
+    return ret;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterPartitionEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterPartitionEvent.java
new file mode 100644
index 0000000..f83a737
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterPartitionEvent.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.api.Partition;
+import org.apache.hadoop.hive.metastore.events.PreAlterPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/*
+ Authorizable Event for HiveMetaStore operation  AlterPartition
+ */
+
+public class AlterPartitionEvent extends HiveMetaStoreAuthorizableEvent {
+  private static final Log LOG = LogFactory.getLog(AlterPartitionEvent.class);
+
+  private String COMMAND_STR = "alter table %s partition %s";
+
+  public AlterPartitionEvent(PreEventContext preEventContext) {
+    super(preEventContext);
+  }
+
+  @Override
+  public HiveMetaStoreAuthzInfo getAuthzContext() {
+    HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, 
HiveOperationType.ALTERPARTITION_FILEFORMAT, getInputHObjs(), getOutputHObjs(), 
COMMAND_STR);
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getInputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> AlterPartitionEvent.getInputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret   = new ArrayList<>();
+    PreAlterPartitionEvent    event = (PreAlterPartitionEvent) preEventContext;
+
+    ret.add(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, 
event.getDbName(), event.getTableName()));
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== AlterPartitionEvent.getInputHObjs()" + ret);
+    }
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getOutputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> AlterPartitionEvent.getOutputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret   = new ArrayList<>();
+    PreAlterPartitionEvent    event = (PreAlterPartitionEvent) preEventContext;
+
+    ret.add(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, 
event.getDbName(), event.getTableName()));
+
+    Partition newPartition = event.getNewPartition();
+    String    newUri       = (newPartition != null) ? 
getSdLocation(newPartition.getSd()) : "";
+
+    if (StringUtils.isNotEmpty(newUri)) {
+        ret.add(getHivePrivilegeObjectDfsUri(newUri));
+    }
+
+    COMMAND_STR = buildCommandString(COMMAND_STR, event.getTableName(), 
newPartition);
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== AlterPartitionEvent.getOutputHObjs()" + ret );
+    }
+
+    return ret;
+  }
+
+  private String buildCommandString(String cmdStr, String tbl, Partition 
partition ) {
+    String ret = cmdStr;
+
+    if (tbl != null) {
+      String tblName    = (StringUtils.isNotEmpty(tbl) ? " " + tbl : "");
+      String partionStr = (partition != null) ? partition.toString() : "";
+      ret               = String.format(cmdStr, tblName, partionStr);
+    }
+
+    return ret;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterTableEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterTableEvent.java
new file mode 100644
index 0000000..bb688d6
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/AlterTableEvent.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.events.PreAlterTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/*
+ Authorizable Event for HiveMetaStore operation  AlterTableEvent
+ */
+
+public class AlterTableEvent extends HiveMetaStoreAuthorizableEvent {
+  private static final Log LOG = LogFactory.getLog(AlterTableEvent.class);
+
+  private String COMMAND_STR = "alter table";
+
+  public AlterTableEvent(PreEventContext preEventContext) {
+    super(preEventContext);
+  }
+
+  @Override
+  public HiveMetaStoreAuthzInfo getAuthzContext() {
+    HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, 
getOperationType(), getInputHObjs(), getOutputHObjs(), COMMAND_STR);
+
+    return ret;
+  }
+
+  private HiveOperationType getOperationType() {
+    PreAlterTableEvent event    = (PreAlterTableEvent) preEventContext;
+    Table              table    = event.getNewTable();
+    Table              oldTable = event.getOldTable();
+    String             newUri   = (table != null) ? 
getSdLocation(table.getSd()) : "";
+    String             oldUri   = (oldTable != null) ? 
getSdLocation(oldTable.getSd()) : "";
+
+    return StringUtils.equals(oldUri, newUri) ? 
HiveOperationType.ALTERTABLE_ADDCOLS : HiveOperationType.ALTERTABLE_LOCATION;
+  }
+
+  private List<HivePrivilegeObject> getInputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> AlterTableEvent.getInputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret      = new ArrayList<>();
+    PreAlterTableEvent        event    = (PreAlterTableEvent) preEventContext;
+    Table                     oldTable = event.getOldTable();
+
+    ret.add(getHivePrivilegeObject(oldTable));
+
+    COMMAND_STR = buildCommandString(COMMAND_STR, oldTable);
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== AlterTableEvent.getInputHObjs(): ret=" + ret);
+    }
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getOutputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> AlterTableEvent.getOutputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret      = new ArrayList<>();
+    PreAlterTableEvent event    = (PreAlterTableEvent) preEventContext;
+    Table newTable = event.getNewTable();
+
+    ret.add(getHivePrivilegeObject(newTable));
+    Table oldTable = event.getOldTable();
+    String oldUri   = (oldTable != null) ? getSdLocation(oldTable.getSd()) : 
"";
+    String newUri   = getSdLocation(newTable.getSd());
+
+    if (!StringUtils.equals(oldUri, newUri)) {
+      ret.add(getHivePrivilegeObjectDfsUri(newUri));
+    }
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== AlterTableEvent.getOutputHObjs(): ret=" + ret);
+    }
+
+    return ret;
+  }
+
+  private String buildCommandString(String cmdStr, Table tbl) {
+    String ret = cmdStr;
+    if (tbl != null) {
+      String tblName = tbl.getTableName();
+      ret            = ret + (StringUtils.isNotEmpty(tblName)? " " + tblName : 
"");
+    }
+    return ret;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/CreateDatabaseEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/CreateDatabaseEvent.java
new file mode 100644
index 0000000..969740c
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/CreateDatabaseEvent.java
@@ -0,0 +1,97 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.events.PreCreateDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/*
+ Authorizable Event for HiveMetaStore operation CreateDatabase
+ */
+
+public class CreateDatabaseEvent extends HiveMetaStoreAuthorizableEvent {
+  private static final Log LOG = LogFactory.getLog(CreateDatabaseEvent.class);
+
+  private String COMMAND_STR = "create database";
+
+  public CreateDatabaseEvent(PreEventContext preEventContext) {
+    super(preEventContext);
+  }
+
+  @Override
+  public HiveMetaStoreAuthzInfo getAuthzContext() {
+    HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, 
HiveOperationType.CREATEDATABASE, getInputHObjs(), getOutputHObjs(), 
COMMAND_STR);
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getInputHObjs() { return 
Collections.emptyList(); }
+
+  private List<HivePrivilegeObject> getOutputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> CreateDatabaseEvent.getOutputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret      = new ArrayList<>();
+    PreCreateDatabaseEvent    event    = (PreCreateDatabaseEvent) 
preEventContext;
+    Database                  database = event.getDatabase();
+    String                    uri      = (database != null) ? 
database.getLocationUri(): "";
+
+    if (database != null) {
+      ret.add(getHivePrivilegeObject(database));
+      if (StringUtils.isNotEmpty(uri)) {
+        ret.add(getHivePrivilegeObjectDfsUri(uri));
+      }
+
+      COMMAND_STR = buildCommandString(COMMAND_STR, database);
+
+      if (LOG.isDebugEnabled()) {
+        LOG.debug("<== CreateDatabaseEvent.getOutputHObjs(): ret=" + ret);
+      }
+    }
+
+    return ret;
+
+  }
+
+  private String buildCommandString(String cmdStr, Database db) {
+    String ret = cmdStr;
+
+    if (db != null) {
+      String dbName = db.getName();
+      ret           = ret + (StringUtils.isNotEmpty(dbName) ? " " + dbName : 
"");
+    }
+
+    return ret;
+
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/CreateTableEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/CreateTableEvent.java
new file mode 100644
index 0000000..cb5d617
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/CreateTableEvent.java
@@ -0,0 +1,95 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.events.PreCreateTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/*
+ Authorizable Event for HiveMetaStore operation CreateTable
+ */
+
+public class CreateTableEvent extends HiveMetaStoreAuthorizableEvent {
+  private static final Log LOG = LogFactory.getLog(CreateTableEvent.class);
+
+  private String COMMAND_STR = "create table";
+
+
+  public CreateTableEvent(PreEventContext preEventContext) {
+      super(preEventContext);
+  }
+
+  @Override
+  public HiveMetaStoreAuthzInfo getAuthzContext() {
+    HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, 
HiveOperationType.CREATETABLE, getInputHObjs(), getOutputHObjs(), COMMAND_STR);
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getInputHObjs() { return 
Collections.emptyList(); }
+
+  private List<HivePrivilegeObject> getOutputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> CreateTableEvent.getOutputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret   = new ArrayList<>();
+    PreCreateTableEvent       event = (PreCreateTableEvent) preEventContext;
+    Table                     table = event.getTable();
+    String                    uri   = getSdLocation(table.getSd());
+
+    ret.add(new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, 
table.getDbName(), null));
+    ret.add(getHivePrivilegeObject(table));
+
+    if (StringUtils.isNotEmpty(uri)) {
+      ret.add(new HivePrivilegeObject(HivePrivilegeObjectType.DFS_URI, null, 
uri));
+    }
+
+    COMMAND_STR = buildCommandString(COMMAND_STR,table);
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== CreateTableEvent.getOutputHObjs(): ret=" + ret);
+    }
+
+    return ret;
+  }
+
+  private String buildCommandString(String cmdStr, Table tbl) {
+    String ret = cmdStr;
+    if (tbl != null) {
+      String tblName = tbl.getTableName();
+      ret            = ret + (StringUtils.isNotEmpty(tblName)? " " + tblName : 
"");
+    }
+    return ret;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/DropDatabaseEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/DropDatabaseEvent.java
new file mode 100644
index 0000000..c642378
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/DropDatabaseEvent.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.events.PreDropDatabaseEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/*
+ Authorizable Event for HiveMetaStore operation DropDatabase
+ */
+
+public class DropDatabaseEvent extends HiveMetaStoreAuthorizableEvent {
+  private static final Log  LOG = LogFactory.getLog(DropDatabaseEvent.class);
+
+  private String COMMAND_STR = "drop database";
+
+  public DropDatabaseEvent(PreEventContext preEventContext) {
+    super(preEventContext);
+  }
+
+  @Override
+  public HiveMetaStoreAuthzInfo getAuthzContext() {
+    HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, 
HiveOperationType.DROPDATABASE, getInputHObjs(), getOutputHObjs(), COMMAND_STR);
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getInputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> DropDatabaseEvent.getInputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret      = new ArrayList<>();
+    PreDropDatabaseEvent      event    = (PreDropDatabaseEvent) 
preEventContext;
+    Database                  database = event.getDatabase();
+
+    ret.add(getHivePrivilegeObject(database));
+
+    COMMAND_STR = buildCommandString(COMMAND_STR, database);
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== DropDatabaseEvent.getInputHObjs(): ret=" + ret);
+    }
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getOutputHObjs() {
+      return getInputHObjs(); // same as inputs
+  }
+
+  private String buildCommandString(String cmdStr, Database db) {
+    String ret = cmdStr;
+
+    if (db != null) {
+      String dbName = db.getName();
+      ret           = ret + (StringUtils.isNotEmpty(dbName) ? " " + dbName : 
"");
+    }
+
+    return ret;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/DropPartitionEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/DropPartitionEvent.java
new file mode 100644
index 0000000..c249887
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/DropPartitionEvent.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.events.PreDropPartitionEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/*
+ Authorizable Event for HiveMetaStore operation DropPartition
+ */
+
+public class DropPartitionEvent extends HiveMetaStoreAuthorizableEvent {
+  private static final Log LOG = LogFactory.getLog(DropPartitionEvent.class);
+
+  private String COMMAND_STR = "alter table %s drop partition %s";
+
+  public DropPartitionEvent(PreEventContext preEventContext) {
+    super(preEventContext);
+  }
+
+  @Override
+  public HiveMetaStoreAuthzInfo getAuthzContext() {
+    HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, 
HiveOperationType.ALTERTABLE_DROPPARTS, getInputHObjs(), getOutputHObjs(), 
COMMAND_STR);
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getInputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> DropPartitionEvent.getInputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret   = new ArrayList<>();
+    PreDropPartitionEvent     event = (PreDropPartitionEvent) preEventContext;
+    Table                     table = event.getTable();
+
+    ret.add(getHivePrivilegeObject(table));
+
+    COMMAND_STR = buildCommandString(COMMAND_STR,table);
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== DropPartitionEvent.getInputHObjs(): ret=" + ret);
+    }
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getOutputHObjs() {
+    return getInputHObjs(); // same as inputs
+  }
+
+  private String buildCommandString(String cmdStr, Table tbl) {
+    String ret = cmdStr;
+
+    if (tbl != null) {
+      String tblName     = (StringUtils.isNotEmpty(tbl.getTableName())? " " + 
tbl.getTableName() : "");
+
+      StringBuilder partitions  = new StringBuilder();
+      List<FieldSchema> fieldSchemas = tbl.getPartitionKeys();
+      for (FieldSchema fieldSchema : fieldSchemas) {
+        partitions.append(" ");
+        partitions.append(fieldSchema.getName());
+      }
+
+      ret = String.format(cmdStr, tblName, partitions.toString());
+    }
+    return ret;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/DropTableEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/DropTableEvent.java
new file mode 100644
index 0000000..393a0ce
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/DropTableEvent.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.events.PreDropTableEvent;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/*
+ Authorizable Event for HiveMetaStore operation DropTable
+ */
+
+public class DropTableEvent extends HiveMetaStoreAuthorizableEvent {
+  private static final Log LOG = LogFactory.getLog(DropTableEvent.class);
+
+  private String COMMAND_STR = "Drop table";
+
+  public DropTableEvent(PreEventContext preEventContext) {
+    super(preEventContext);
+  }
+
+  @Override
+  public HiveMetaStoreAuthzInfo getAuthzContext() {
+    HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, 
HiveOperationType.DROPTABLE, getInputHObjs(), getOutputHObjs(), COMMAND_STR);
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getInputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> DropTableEvent.getInputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret   = new ArrayList<>();
+    PreDropTableEvent         event = (PreDropTableEvent) preEventContext;
+    Table                     table = event.getTable();
+    ret.add(getHivePrivilegeObject(table));
+
+    COMMAND_STR = buildCommandString(COMMAND_STR, table);
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== DropTableEvent.getInputHObjs(): ret=" + ret);
+    }
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getOutputHObjs() { return 
Collections.emptyList(); }
+
+  private String buildCommandString(String cmdStr, Table tbl) {
+    String ret = cmdStr;
+    if (tbl != null) {
+      String tblName = tbl.getTableName();
+      ret            = ret + (StringUtils.isNotEmpty(tblName)? " " + tblName : 
"");
+    }
+    return ret;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/LoadPartitionDoneEvent.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/LoadPartitionDoneEvent.java
new file mode 100644
index 0000000..aded0e9
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/events/LoadPartitionDoneEvent.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.events;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hive.metastore.events.PreEventContext;
+import org.apache.hadoop.hive.metastore.events.PreLoadPartitionDoneEvent;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthorizableEvent;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.metastore.HiveMetaStoreAuthzInfo;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/*
+ Authorizable Event for HiveMetaStore operation LoadPartition
+ */
+
+public class LoadPartitionDoneEvent extends HiveMetaStoreAuthorizableEvent {
+  private static final Log LOG = 
LogFactory.getLog(LoadPartitionDoneEvent.class);
+
+  private String COMMAND_STR = "alter table load partition";
+
+  public LoadPartitionDoneEvent(PreEventContext preEventContext) {
+    super(preEventContext);
+  }
+
+  @Override
+  public HiveMetaStoreAuthzInfo getAuthzContext() {
+    HiveMetaStoreAuthzInfo ret = new HiveMetaStoreAuthzInfo(preEventContext, 
HiveOperationType.ALTERTABLE_ADDPARTS, getInputHObjs(), getOutputHObjs(), 
COMMAND_STR);
+
+    return ret;
+  }
+
+  private List<HivePrivilegeObject> getInputHObjs() { return 
Collections.emptyList(); }
+
+  private List<HivePrivilegeObject> getOutputHObjs() {
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("==> DropTableEvent.getOutputHObjs()");
+    }
+
+    List<HivePrivilegeObject> ret   = new ArrayList<>();
+    PreLoadPartitionDoneEvent event = (PreLoadPartitionDoneEvent) 
preEventContext;
+
+    ret.add(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, 
event.getDbName(), event.getTableName()));
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("<== DropTableEvent.getOutputHObjs(): ret=" + ret);
+    }
+
+    return ret;
+  }
+}
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/DummyHiveAuthorizer.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/DummyHiveAuthorizer.java
new file mode 100644
index 0000000..a46f7dd
--- /dev/null
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/DummyHiveAuthorizer.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAccessControlException;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.fallback.FallbackHiveAuthorizer;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Test HiveAuthorizer for invoking checkPrivilege Methods for authorization 
call
+ * Authorizes user sam and rob.
+ */
+public class DummyHiveAuthorizer extends FallbackHiveAuthorizer {
+
+  static final List<String> allowedUsers = Arrays.asList("sam","rob");
+
+  DummyHiveAuthorizer(HiveConf hiveConf, HiveAuthenticationProvider 
hiveAuthenticator,
+                      HiveAuthzSessionContext ctx) {
+    super(hiveConf,hiveAuthenticator, ctx);
+  }
+
+  @Override
+  public void checkPrivileges(HiveOperationType hiveOpType, 
List<HivePrivilegeObject> inputHObjs,
+                              List<HivePrivilegeObject> outputHObjs, 
HiveAuthzContext context) throws
+          HiveAuthzPluginException, HiveAccessControlException {
+
+    String user         = null;
+    String errorMessage = "";
+    try {
+      user = UserGroupInformation.getLoginUser().getShortUserName();
+    } catch (Exception e) {
+      throw  new HiveAuthzPluginException("Unable to get 
UserGroupInformation");
+    }
+
+    if (!isOperationAllowed(user)) {
+      errorMessage = "Operation type " + hiveOpType + " not allowed for user:" 
+ user;
+      throw  new HiveAuthzPluginException(errorMessage);
+    }
+  }
+
+  private boolean isOperationAllowed(String user) {
+      return allowedUsers.contains(user);
+  }
+
+}
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/DummyHiveAuthorizerFactory.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/DummyHiveAuthorizerFactory.java
new file mode 100644
index 0000000..a09d5f4
--- /dev/null
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/DummyHiveAuthorizerFactory.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizerFactory;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzSessionContext;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactory;
+
+/*
+Test DummyHiveAuthorizerFactory
+*/
+
+public class DummyHiveAuthorizerFactory implements HiveAuthorizerFactory {
+  @Override
+  public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory 
metastoreClientFactory,
+                                               HiveConf conf, 
HiveAuthenticationProvider hiveAuthenticator, HiveAuthzSessionContext ctx)
+    throws HiveAuthzPluginException {
+    return new DummyHiveAuthorizer(conf, hiveAuthenticator, ctx);
+  }
+}
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java
new file mode 100644
index 0000000..9bbc70e
--- /dev/null
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/security/authorization/plugin/metastore/TestHiveMetaStoreAuthorizer.java
@@ -0,0 +1,266 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.security.authorization.plugin.metastore;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.FileUtils;
+import org.apache.hadoop.hive.metastore.ColumnType;
+import org.apache.hadoop.hive.metastore.*;
+import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.metastore.api.*;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+import org.apache.hadoop.hive.metastore.client.builder.*;
+import org.apache.hadoop.hive.metastore.events.*;
+import org.apache.hadoop.hive.ql.security.HadoopDefaultMetastoreAuthenticator;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.FixMethodOrder;
+import org.junit.runners.MethodSorters;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.io.File;
+
+import static org.junit.Assert.assertEquals;
+
+/*
+Test whether HiveAuthorizer for MetaStore operation is trigger and 
HiveMetaStoreAuthzInfo is created by HiveMetaStoreAuthorizer
+ */
+@FixMethodOrder(MethodSorters.NAME_ASCENDING)
+public class TestHiveMetaStoreAuthorizer {
+  private static final String dbName           = "test";
+  private static final String tblName          = "tmptbl";
+  private static final String viewName         = "tmpview";
+  private static final String roleName         = "tmpRole";
+  private static final String catalogName      = "testCatalog";
+  private static final String unAuthorizedUser = "bob";
+  private static final String authorizedUser   = "sam";
+  private static final String superUser        = "hive";
+
+  private static final String metaConfVal = "";
+
+  private static final String TEST_DATA_DIR = new 
File("file:///testdata").getPath();
+  private RawStore rawStore;
+  private Configuration conf;
+  private HiveMetaStore.HMSHandler hmsHandler;
+
+  @Before
+  public void setUp() throws Exception {
+    conf = MetastoreConf.newMetastoreConf();
+    MetastoreConf.setBoolVar(conf, ConfVars.HIVE_IN_TEST, true);
+    MetastoreConf.setBoolVar(conf, ConfVars.HIVE_TXN_STATS_ENABLED, true);
+    MetastoreConf.setBoolVar(conf, ConfVars.AGGREGATE_STATS_CACHE_ENABLED, 
false);
+    MetastoreConf.setVar(conf, ConfVars.PARTITION_NAME_WHITELIST_PATTERN, 
metaConfVal);
+    MetastoreConf.setLongVar(conf, ConfVars.THRIFT_CONNECTION_RETRIES, 3);
+    MetastoreConf.setBoolVar(conf, ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
+    MetastoreConf.setVar(conf, ConfVars.HIVE_AUTHORIZATION_MANAGER, 
DummyHiveAuthorizerFactory.class.getName());
+    MetastoreConf.setVar(conf, ConfVars.PRE_EVENT_LISTENERS, 
HiveMetaStoreAuthorizer.class.getName());
+    MetastoreConf.setVar(conf, ConfVars.HIVE_METASTORE_AUTHENTICATOR_MANAGER, 
HadoopDefaultMetastoreAuthenticator.class.getName() );
+    conf.set("hadoop.proxyuser.hive.groups", "*");
+    conf.set("hadoop.proxyuser.hive.hosts", "*");
+    conf.set("hadoop.proxyuser.hive.users", "*");
+
+    MetaStoreTestUtils.setConfForStandloneMode(conf);
+
+    hmsHandler = new HiveMetaStore.HMSHandler("test", conf, true);
+    rawStore   = new ObjectStore();
+    rawStore.setConf(hmsHandler.getConf());
+    // Create the 'hive' catalog with new warehouse directory
+    HiveMetaStore.HMSHandler.createDefaultCatalog(rawStore, new 
Warehouse(conf));
+    try {
+      hmsHandler.drop_table(dbName, tblName, true);
+      hmsHandler.drop_database(dbName, true, false);
+      hmsHandler.drop_catalog(new DropCatalogRequest(catalogName));
+      FileUtils.deleteDirectory(new File(TEST_DATA_DIR));
+    } catch (Exception e) {
+      // NoSuchObjectException will be ignored if the step objects are not 
there
+    }
+  }
+
+  @Test
+  public void testA_CreateDatabase_unAuthorizedUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser));
+    try {
+      Database db = new DatabaseBuilder()
+              .setName(dbName)
+              .build(conf);
+      hmsHandler.create_database(db);
+    } catch (Exception e) {
+      String err = e.getMessage();
+      String expected = "Operation type " + HiveOperationType.CREATEDATABASE+ 
" not allowed for user:" + unAuthorizedUser;
+      assertEquals(expected, err);
+    }
+  }
+
+  @Test
+  public void testB_CreateTable_unAuthorizedUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(unAuthorizedUser));
+    try {
+      Table table = new TableBuilder()
+              .setTableName(tblName)
+              .addCol("name", ColumnType.STRING_TYPE_NAME)
+              .setOwner(unAuthorizedUser)
+              .build(conf);
+      hmsHandler.create_table(table);
+    } catch (Exception e) {
+      String err = e.getMessage();
+      String expected = "Operation type " + HiveOperationType.CREATETABLE + " 
not allowed for user:" + unAuthorizedUser;
+      assertEquals(expected, err);
+    }
+  }
+
+  @Test
+  public void testC_CreateView_anyUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser));
+    try {
+      Table viewObj = new TableBuilder()
+              .setTableName(viewName)
+              .setType(TableType.VIRTUAL_VIEW.name())
+              .addCol("name", ColumnType.STRING_TYPE_NAME)
+              .setOwner(authorizedUser)
+              .build(conf);
+      hmsHandler.create_table(viewObj);
+    } catch (Exception e) {
+      String err = e.getMessage();
+      String expected = "Operation type CREATE_VIEW not allowed for user:" + 
authorizedUser;
+      assertEquals(expected, err);
+    }
+  }
+
+  @Test
+  public void testD_CreateView_SuperUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(superUser));
+    try {
+      Table viewObj = new TableBuilder()
+              .setTableName(viewName)
+              .setType(TableType.VIRTUAL_VIEW.name())
+              .addCol("name", ColumnType.STRING_TYPE_NAME)
+              .build(conf);
+      hmsHandler.create_table(viewObj);
+    } catch (Exception e) {
+      // no Exceptions for superuser as hive is allowed CREATE_VIEW operation
+    }
+  }
+
+  @Test
+  public void testE_CreateRole__anyUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser));
+    try {
+      Role role = new RoleBuilder()
+              .setRoleName(roleName)
+              .setOwnerName(authorizedUser)
+              .build();
+      hmsHandler.create_role(role);
+    } catch (Exception e) {
+      String err = e.getMessage();
+      String expected = "Operation type " + 
PreEventContext.PreEventType.AUTHORIZATION_API_CALL.name()+ " not allowed for 
user:" + authorizedUser;
+      assertEquals(expected, err);
+    }
+  }
+
+  @Test
+  public void testF_CreateCatalog_anyUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser));
+    try {
+      Catalog catalog = new CatalogBuilder()
+              .setName(catalogName)
+              .setLocation(TEST_DATA_DIR)
+              .build();
+      hmsHandler.create_catalog(new CreateCatalogRequest(catalog));
+    } catch (Exception e) {
+      String err = e.getMessage();
+      String expected = "Operation type " + 
PreEventContext.PreEventType.CREATE_CATALOG.name()+ " not allowed for user:" + 
authorizedUser;
+      assertEquals(expected,err);
+    }
+  }
+
+  @Test
+  public void testG_CreateCatalog_SuperUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(superUser));
+    try {
+      Catalog catalog = new CatalogBuilder()
+              .setName(catalogName)
+              .setLocation(TEST_DATA_DIR)
+              .build();
+      hmsHandler.create_catalog(new CreateCatalogRequest(catalog));
+    } catch (Exception e) {
+      // no Exceptions for superuser as hive is allowed CREATE CATALOG 
operation
+    }
+  }
+
+
+  @Test
+  public void testH_CreateDatabase_authorizedUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser));
+    try {
+      Database db = new DatabaseBuilder()
+              .setName(dbName)
+              .build(conf);
+      hmsHandler.create_database(db);
+    } catch (Exception e) {
+      // No Exception for create database for authorized user
+    }
+  }
+
+  @Test
+  public void testI_CreateTable_authorizedUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser));
+    try {
+      Table table = new TableBuilder()
+              .setTableName(tblName)
+              .addCol("name", ColumnType.STRING_TYPE_NAME)
+              .setOwner(authorizedUser)
+              .build(conf);
+      hmsHandler.create_table(table);
+    } catch (Exception e) {
+      // No Exception for create table for authorized user
+    }
+  }
+
+  @Test
+  public void testJ_DropTable_authorizedUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser));
+    try {
+      hmsHandler.drop_table(dbName,tblName,true);
+    } catch (Exception e) {
+      // No Exception for create table for authorized user
+    }
+  }
+
+  @Test
+  public void testK_DropDatabase_authorizedUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(authorizedUser));
+    try {
+      hmsHandler.drop_database(dbName,true,true);
+    } catch (Exception e) {
+      // No Exception for dropDatabase for authorized user
+    }
+  }
+
+  @Test
+  public void testL_DropCatalog_SuperUser() throws Exception {
+    
UserGroupInformation.setLoginUser(UserGroupInformation.createRemoteUser(superUser));
+    try {
+      hmsHandler.drop_catalog(new DropCatalogRequest(catalogName));
+    } catch (Exception e) {
+      // no Exceptions for superuser as hive is allowed CREATE CATALOG 
operation
+    }
+  }
+}

Reply via email to