Author: thejas Date: Wed Feb 18 01:42:47 2015 New Revision: 1660556 URL: http://svn.apache.org/r1660556 Log: HIVE-9350 : Add ability for HiveAuthorizer implementations to filter out results of 'show tables', 'show databases' (Thejas Nair, reviewed by Jason Dere)
Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original) +++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Wed Feb 18 01:42:47 2015 @@ -149,6 +149,7 @@ public class HiveConf extends Configurat HiveConf.ConfVars.METASTORE_EVENT_LISTENERS, HiveConf.ConfVars.METASTORE_EVENT_CLEAN_FREQ, HiveConf.ConfVars.METASTORE_EVENT_EXPIRY_DURATION, + HiveConf.ConfVars.METASTORE_FILTER_HOOK, HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL, HiveConf.ConfVars.METASTORE_END_FUNCTION_LISTENERS, HiveConf.ConfVars.METASTORE_PART_INHERIT_TBL_PROPS, @@ -592,7 +593,8 @@ public class HiveConf extends Configurat "List of comma separated keys occurring in table properties which will get inherited to newly created partitions. \n" + "* implies all the keys will get inherited."), METASTORE_FILTER_HOOK("hive.metastore.filter.hook", "org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl", - "Metastore hook class for filtering the metadata read results"), + "Metastore hook class for filtering the metadata read results. If hive.security.authorization.manager" + + "is set to instance of HiveAuthorizerFactory, then this value is ignored."), FIRE_EVENTS_FOR_DML("hive.metastore.dml.events", false, "If true, the metastore will be asked" + " to fire events for DML operations"), Modified: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java (original) +++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestFilterHooks.java Wed Feb 18 01:42:47 2015 @@ -17,19 +17,19 @@ */ package org.apache.hadoop.hive.metastore; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.List; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hive.cli.CliSessionState; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Index; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionSpec; @@ -53,7 +53,7 @@ public class TestFilterHooks { } @Override - public List<String> filterDatabases(List<String> dbList) { + public List<String> filterDatabases(List<String> dbList) throws MetaException { if (blockResults) { return new ArrayList<String>(); } @@ -69,7 +69,7 @@ public class TestFilterHooks { } @Override - public List<String> filterTableNames(String dbName, List<String> tableList) { + public List<String> filterTableNames(String dbName, List<String> tableList) throws MetaException { if (blockResults) { return new ArrayList<String>(); } @@ -85,7 +85,7 @@ public class TestFilterHooks { } @Override - public List<Table> filterTables(List<Table> tableList) { + public List<Table> filterTables(List<Table> tableList) throws MetaException { if (blockResults) { return new ArrayList<Table>(); } @@ -93,7 +93,7 @@ public class TestFilterHooks { } @Override - public List<Partition> filterPartitions(List<Partition> partitionList) { + public List<Partition> filterPartitions(List<Partition> partitionList) throws MetaException { if (blockResults) { return new ArrayList<Partition>(); } @@ -102,7 +102,7 @@ public class TestFilterHooks { @Override public List<PartitionSpec> filterPartitionSpecs( - List<PartitionSpec> partitionSpecList) { + List<PartitionSpec> partitionSpecList) throws MetaException { if (blockResults) { return new ArrayList<PartitionSpec>(); } @@ -119,7 +119,7 @@ public class TestFilterHooks { @Override public List<String> filterPartitionNames(String dbName, String tblName, - List<String> partitionNames) { + List<String> partitionNames) throws MetaException { if (blockResults) { return new ArrayList<String>(); } @@ -136,7 +136,7 @@ public class TestFilterHooks { @Override public List<String> filterIndexNames(String dbName, String tblName, - List<String> indexList) { + List<String> indexList) throws MetaException { if (blockResults) { return new ArrayList<String>(); } @@ -144,7 +144,7 @@ public class TestFilterHooks { } @Override - public List<Index> filterIndexes(List<Index> indexeList) { + public List<Index> filterIndexes(List<Index> indexeList) throws MetaException { if (blockResults) { return new ArrayList<Index>(); } Added: hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java URL: http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java?rev=1660556&view=auto ============================================================================== --- hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java (added) +++ hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/security/authorization/plugin/TestHiveAuthorizerShowFilters.java Wed Feb 18 01:42:47 2015 @@ -0,0 +1,275 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Matchers.any; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.hadoop.hive.ql.CommandNeedRetryException; +import org.apache.hadoop.hive.ql.Driver; +import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; +import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider; +import org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.session.SessionState; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.mockito.Mockito; + +/** + * Test HiveAuthorizer api invocation + */ +public class TestHiveAuthorizerShowFilters { + protected static HiveConf conf; + protected static Driver driver; + private static final String tableName1 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "table1") + .toLowerCase(); + private static final String tableName2 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "table2") + .toLowerCase(); + private static final String dbName1 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "db1") + .toLowerCase(); + private static final String dbName2 = (TestHiveAuthorizerShowFilters.class.getSimpleName() + "db2") + .toLowerCase(); + + static HiveAuthorizer mockedAuthorizer; + + static final List<String> AllTables = getSortedList(tableName1, tableName2); + static final List<String> AllDbs = getSortedList("default", dbName1, dbName2); + + private static List<HivePrivilegeObject> filterArguments = null; + private static List<HivePrivilegeObject> filteredResults = new ArrayList<HivePrivilegeObject>(); + + /** + * This factory creates a mocked HiveAuthorizer class. The mocked class is + * used to capture the argument passed to HiveAuthorizer.filterListCmdObjects. + * It returns fileredResults object for call to + * HiveAuthorizer.filterListCmdObjects, and stores the list argument in + * filterArguments + */ + static class MockedHiveAuthorizerFactory implements HiveAuthorizerFactory { + @Override + public HiveAuthorizer createHiveAuthorizer(HiveMetastoreClientFactory metastoreClientFactory, + HiveConf conf, HiveAuthenticationProvider authenticator, HiveAuthzSessionContext ctx) { + Mockito.validateMockitoUsage(); + + abstract class AuthorizerWithFilterCmdImpl implements HiveAuthorizer { + @Override + public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, + HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException { + // capture arguments in static + filterArguments = listObjs; + // return static variable with results, if it is set to some set of + // values + // otherwise return the arguments + if (filteredResults.size() == 0) { + return filterArguments; + } + return filteredResults; + } + } + + mockedAuthorizer = Mockito.mock(AuthorizerWithFilterCmdImpl.class, Mockito.withSettings() + .verboseLogging()); + + try { + Mockito.when( + mockedAuthorizer.filterListCmdObjects((List<HivePrivilegeObject>) any(), + (HiveAuthzContext) any())).thenCallRealMethod(); + } catch (Exception e) { + org.junit.Assert.fail("Caught exception " + e); + } + return mockedAuthorizer; + } + + } + + @BeforeClass + public static void beforeTest() throws Exception { + conf = new HiveConf(); + + // Turn on mocked authorization + conf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, MockedHiveAuthorizerFactory.class.getName()); + conf.setVar(ConfVars.HIVE_AUTHENTICATOR_MANAGER, SessionStateUserAuthenticator.class.getName()); + conf.setBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED, true); + conf.setBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS, false); + conf.setBoolVar(ConfVars.HIVE_SUPPORT_CONCURRENCY, false); + + SessionState.start(conf); + driver = new Driver(conf); + runCmd("create table " + tableName1 + + " (i int, j int, k string) partitioned by (city string, date string) "); + runCmd("create table " + tableName2 + "(i int)"); + + runCmd("create database " + dbName1); + runCmd("create database " + dbName2); + + } + + @Before + public void setup() { + filterArguments = null; + filteredResults.clear(); + } + + @AfterClass + public static void afterTests() throws Exception { + // Drop the tables when we're done. This makes the test work inside an IDE + runCmd("drop table if exists " + tableName1); + runCmd("drop table if exists " + tableName2); + runCmd("drop database if exists " + dbName1); + runCmd("drop database if exists " + dbName2); + driver.close(); + } + + @Test + public void testShowDatabasesAll() throws HiveAuthzPluginException, HiveAccessControlException, + CommandNeedRetryException, IOException { + runShowDbTest(AllDbs); + } + + @Test + public void testShowDatabasesSelected() throws HiveAuthzPluginException, + HiveAccessControlException, CommandNeedRetryException, IOException { + setFilteredResults(HivePrivilegeObjectType.DATABASE, dbName2); + runShowDbTest(Arrays.asList(dbName2)); + } + + private void runShowDbTest(List<String> expectedDbList) throws HiveAuthzPluginException, + HiveAccessControlException, CommandNeedRetryException, IOException { + runCmd("show databases"); + verifyAllDb(); + assertEquals("filtered result check ", expectedDbList, getSortedResults()); + } + + @Test + public void testShowTablesAll() throws HiveAuthzPluginException, HiveAccessControlException, + CommandNeedRetryException, IOException { + runShowTablesTest(AllTables); + } + + @Test + public void testShowTablesSelected() throws HiveAuthzPluginException, HiveAccessControlException, + CommandNeedRetryException, IOException { + setFilteredResults(HivePrivilegeObjectType.TABLE_OR_VIEW, tableName2); + runShowTablesTest(Arrays.asList(tableName2)); + } + + private void runShowTablesTest(List<String> expectedTabs) throws IOException, + CommandNeedRetryException, HiveAuthzPluginException, HiveAccessControlException { + runCmd("show tables"); + verifyAllTables(); + assertEquals("filtered result check ", expectedTabs, getSortedResults()); + } + + private List<String> getSortedResults() throws IOException, CommandNeedRetryException { + List<String> res = new ArrayList<String>(); + // set results to be returned + driver.getResults(res); + Collections.sort(res); + return res; + } + + /** + * Verify that arguments to call to HiveAuthorizer.filterListCmdObjects are of + * type DATABASE and contain all databases. + * + * @throws HiveAccessControlException + * @throws HiveAuthzPluginException + */ + private void verifyAllDb() throws HiveAuthzPluginException, HiveAccessControlException { + List<HivePrivilegeObject> privObjs = filterArguments; + + // get the db names out + List<String> dbArgs = new ArrayList<String>(); + for (HivePrivilegeObject privObj : privObjs) { + assertEquals("Priv object type should be db", HivePrivilegeObjectType.DATABASE, + privObj.getType()); + dbArgs.add(privObj.getDbname()); + } + + // sort before comparing with expected results + Collections.sort(dbArgs); + assertEquals("All db should be passed as arguments", AllDbs, dbArgs); + } + + /** + * Verify that arguments to call to HiveAuthorizer.filterListCmdObjects are of + * type TABLE and contain all tables. + * + * @throws HiveAccessControlException + * @throws HiveAuthzPluginException + */ + private void verifyAllTables() throws HiveAuthzPluginException, HiveAccessControlException { + List<HivePrivilegeObject> privObjs = filterArguments; + + // get the table names out + List<String> tables = new ArrayList<String>(); + for (HivePrivilegeObject privObj : privObjs) { + assertEquals("Priv object type should be db", HivePrivilegeObjectType.TABLE_OR_VIEW, + privObj.getType()); + assertEquals("Database name", "default", privObj.getDbname()); + tables.add(privObj.getObjectName()); + } + + // sort before comparing with expected results + Collections.sort(tables); + assertEquals("All tables should be passed as arguments", AllTables, tables); + } + + private static void setFilteredResults(HivePrivilegeObjectType type, String... objs) { + filteredResults.clear(); + for (String obj : objs) { + String dbname; + String tabname = null; + if (type == HivePrivilegeObjectType.DATABASE) { + dbname = obj; + } else { + dbname = "default"; + tabname = obj; + } + filteredResults.add(new HivePrivilegeObject(type, dbname, tabname)); + } + } + + private static void runCmd(String cmd) throws CommandNeedRetryException { + CommandProcessorResponse resp = driver.run(cmd); + assertEquals(0, resp.getResponseCode()); + } + + private static List<String> getSortedList(String... strings) { + return getSortedList(Arrays.asList(strings)); + } + + private static List<String> getSortedList(List<String> columns) { + List<String> sortedCols = new ArrayList<String>(columns); + Collections.sort(sortedCols); + return sortedCols; + } + +} Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java (original) +++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/DefaultMetaStoreFilterHookImpl.java Wed Feb 18 01:42:47 2015 @@ -23,6 +23,7 @@ import java.util.List; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Index; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionSpec; @@ -37,7 +38,7 @@ public class DefaultMetaStoreFilterHookI } @Override - public List<String> filterDatabases(List<String> dbList) { + public List<String> filterDatabases(List<String> dbList) throws MetaException { return dbList; } @@ -47,7 +48,7 @@ public class DefaultMetaStoreFilterHookI } @Override - public List<String> filterTableNames(String dbName, List<String> tableList) { + public List<String> filterTableNames(String dbName, List<String> tableList) throws MetaException { return tableList; } @@ -57,18 +58,18 @@ public class DefaultMetaStoreFilterHookI } @Override - public List<Table> filterTables(List<Table> tableList) { + public List<Table> filterTables(List<Table> tableList) throws MetaException { return tableList; } @Override - public List<Partition> filterPartitions(List<Partition> partitionList) { + public List<Partition> filterPartitions(List<Partition> partitionList) throws MetaException { return partitionList; } @Override public List<PartitionSpec> filterPartitionSpecs( - List<PartitionSpec> partitionSpecList) { + List<PartitionSpec> partitionSpecList) throws MetaException { return partitionSpecList; } @@ -79,7 +80,7 @@ public class DefaultMetaStoreFilterHookI @Override public List<String> filterPartitionNames(String dbName, String tblName, - List<String> partitionNames) { + List<String> partitionNames) throws MetaException { return partitionNames; } @@ -90,12 +91,12 @@ public class DefaultMetaStoreFilterHookI @Override public List<String> filterIndexNames(String dbName, String tblName, - List<String> indexList) { + List<String> indexList) throws MetaException { return indexList; } @Override - public List<Index> filterIndexes(List<Index> indexeList) { + public List<Index> filterIndexes(List<Index> indexeList) throws MetaException { return indexeList; } } Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java (original) +++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java Wed Feb 18 01:42:47 2015 @@ -24,6 +24,7 @@ import org.apache.hadoop.hive.common.cla import org.apache.hadoop.hive.common.classification.InterfaceStability.Evolving; import org.apache.hadoop.hive.metastore.api.Database; import org.apache.hadoop.hive.metastore.api.Index; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.metastore.api.Partition; import org.apache.hadoop.hive.metastore.api.PartitionSpec; @@ -43,7 +44,7 @@ public interface MetaStoreFilterHook { * @param dbList * @return List of filtered Db names */ - public List<String> filterDatabases(List<String> dbList); + public List<String> filterDatabases(List<String> dbList) throws MetaException; /** * filter to given database object if applicable @@ -51,7 +52,7 @@ public interface MetaStoreFilterHook { * @return the same database if it's not filtered out * @throws NoSuchObjectException */ - public Database filterDatabase(Database dataBase) throws NoSuchObjectException; + public Database filterDatabase(Database dataBase) throws MetaException, NoSuchObjectException; /** * Filter given list of tables @@ -59,7 +60,7 @@ public interface MetaStoreFilterHook { * @param tableList * @returnList of filtered table names */ - public List<String> filterTableNames(String dbName, List<String> tableList); + public List<String> filterTableNames(String dbName, List<String> tableList) throws MetaException; /** * filter to given table object if applicable @@ -67,7 +68,7 @@ public interface MetaStoreFilterHook { * @return the same table if it's not filtered out * @throws NoSuchObjectException */ - public Table filterTable(Table table) throws NoSuchObjectException; + public Table filterTable(Table table) throws MetaException, NoSuchObjectException; /** * Filter given list of tables @@ -75,21 +76,22 @@ public interface MetaStoreFilterHook { * @param tableList * @returnList of filtered table names */ - public List<Table> filterTables(List<Table> tableList); + public List<Table> filterTables(List<Table> tableList) throws MetaException; /** * Filter given list of partitions * @param partitionList * @return */ - public List<Partition> filterPartitions(List<Partition> partitionList); + public List<Partition> filterPartitions(List<Partition> partitionList) throws MetaException; /** * Filter given list of partition specs * @param partitionSpecList * @return */ - public List<PartitionSpec> filterPartitionSpecs(List<PartitionSpec> partitionSpecList); + public List<PartitionSpec> filterPartitionSpecs(List<PartitionSpec> partitionSpecList) + throws MetaException; /** * filter to given partition object if applicable @@ -97,7 +99,7 @@ public interface MetaStoreFilterHook { * @return the same partition object if it's not filtered out * @throws NoSuchObjectException */ - public Partition filterPartition(Partition partition) throws NoSuchObjectException; + public Partition filterPartition(Partition partition) throws MetaException, NoSuchObjectException; /** * Filter given list of partition names @@ -107,9 +109,9 @@ public interface MetaStoreFilterHook { * @return */ public List<String> filterPartitionNames(String dbName, String tblName, - List<String> partitionNames); + List<String> partitionNames) throws MetaException; - public Index filterIndex(Index index) throws NoSuchObjectException; + public Index filterIndex(Index index) throws MetaException, NoSuchObjectException; /** * Filter given list of index names @@ -119,13 +121,13 @@ public interface MetaStoreFilterHook { * @return */ public List<String> filterIndexNames(String dbName, String tblName, - List<String> indexList); + List<String> indexList) throws MetaException; /** * Filter given list of index objects * @param indexeList * @return */ - public List<Index> filterIndexes(List<Index> indexeList); + public List<Index> filterIndexes(List<Index> indexeList) throws MetaException; } Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java?rev=1660556&view=auto ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java (added) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/AuthorizationMetaStoreFilterHook.java Wed Feb 18 01:42:47 2015 @@ -0,0 +1,108 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hive.ql.security.authorization.plugin; + +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hive.common.classification.InterfaceAudience.Private; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.DefaultMetaStoreFilterHookImpl; +import org.apache.hadoop.hive.metastore.api.MetaException; +import org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType; +import org.apache.hadoop.hive.ql.session.SessionState; + +/** + * Metastore filter hook for filtering out the list of objects that the current authorization + * implementation does not allow user to see + */ +@Private +public class AuthorizationMetaStoreFilterHook extends DefaultMetaStoreFilterHookImpl { + + public static final Log LOG = LogFactory.getLog(AuthorizationMetaStoreFilterHook.class); + + public AuthorizationMetaStoreFilterHook(HiveConf conf) { + super(conf); + } + + @Override + public List<String> filterTableNames(String dbName, List<String> tableList) throws MetaException { + List<HivePrivilegeObject> listObjs = getHivePrivObjects(dbName, tableList); + return getTableNames(getFilteredObjects(listObjs)); + } + + @Override + public List<String> filterDatabases(List<String> dbList) throws MetaException { + List<HivePrivilegeObject> listObjs = getHivePrivObjects(dbList); + return getDbNames(getFilteredObjects(listObjs)); + } + + private List<HivePrivilegeObject> getHivePrivObjects(List<String> dbList) { + List<HivePrivilegeObject> objs = new ArrayList<HivePrivilegeObject>(); + for(String dbname : dbList) { + objs.add(new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, dbname, dbname)); + } + return objs; + } + + private List<String> getDbNames(List<HivePrivilegeObject> filteredObjects) { + List<String> tnames = new ArrayList<String>(); + for(HivePrivilegeObject obj : filteredObjects) { + tnames.add(obj.getDbname()); + } + return tnames; + } + + private List<String> getTableNames(List<HivePrivilegeObject> filteredObjects) { + List<String> tnames = new ArrayList<String>(); + for(HivePrivilegeObject obj : filteredObjects) { + tnames.add(obj.getObjectName()); + } + return tnames; + } + + private List<HivePrivilegeObject> getFilteredObjects(List<HivePrivilegeObject> listObjs) throws MetaException { + SessionState ss = SessionState.get(); + HiveAuthzContext.Builder authzContextBuilder = new HiveAuthzContext.Builder(); + authzContextBuilder.setUserIpAddress(ss.getUserIpAddress()); + try { + return ss.getAuthorizerV2().filterListCmdObjects(listObjs, authzContextBuilder.build()); + } catch (HiveAuthzPluginException e) { + LOG.error(e); + throw new MetaException(e.getMessage()); + } catch (HiveAccessControlException e) { + // authorization error is not really expected in a filter call + // the impl should have just filtered out everything. A checkPrivileges call + // would have already been made to authorize this action + LOG.error(e); + throw new MetaException(e.getMessage()); + } + } + + private List<HivePrivilegeObject> getHivePrivObjects(String dbName, List<String> tableList) { + List<HivePrivilegeObject> objs = new ArrayList<HivePrivilegeObject>(); + for(String tname : tableList) { + objs.add(new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, dbName, tname)); + } + return objs; + } + +} + Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAccessControlException.java Wed Feb 18 01:42:47 2015 @@ -24,8 +24,7 @@ import org.apache.hadoop.hive.ql.metadat /** * Exception thrown by the Authorization plugin api (v2). Indicates - * an error while performing authorization, and not a authorization being - * denied. + * a authorization check denying permissions for an action. */ @LimitedPrivate(value = { "Apache Argus (incubating)" }) @Evolving Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizationValidator.java Wed Feb 18 01:42:47 2015 @@ -29,17 +29,15 @@ import org.apache.hadoop.classification. public interface HiveAuthorizationValidator { /** - * Check if current user has privileges to perform given operation type - * hiveOpType on the given input and output objects - * - * @param hiveOpType - * @param inputHObjs - * @param outputHObjs - * @param context - * @throws HiveAuthzPluginException - * @throws HiveAccessControlException + * see HiveAuthorizer.checkPrivileges */ void checkPrivileges(HiveOperationType hiveOpType, List<HivePrivilegeObject> inputHObjs, List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException; + /** + * see HiveAuthorizer.filterListCmdObjects + */ + List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, + HiveAuthzContext context); + } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizer.java Wed Feb 18 01:42:47 2015 @@ -154,6 +154,21 @@ public interface HiveAuthorizer { List<HivePrivilegeObject> outputHObjs, HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException; + + /** + * Filter out any objects that should not be shown to the user, from the list of + * tables or databases coming from a 'show tables' or 'show databases' command + * @param listObjs List of all objects obtained as result of a show command + * @param context + * @return filtered list of objects that will be returned to the user invoking the command + * @throws HiveAuthzPluginException + * @throws HiveAccessControlException + */ + List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, + HiveAuthzContext context) + throws HiveAuthzPluginException, HiveAccessControlException; + + /** * @return all existing roles * @throws HiveAuthzPluginException Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveAuthorizerImpl.java Wed Feb 18 01:42:47 2015 @@ -85,6 +85,13 @@ public class HiveAuthorizerImpl implemen authValidator.checkPrivileges(hiveOpType, inputHObjs, outputHObjs, context); } + + @Override + public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, + HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException { + return authValidator.filterListCmdObjects(listObjs, context); + } + @Override public List<String> getAllRoles() throws HiveAuthzPluginException, HiveAccessControlException { return accessController.getAllRoles(); Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveV1Authorizer.java Wed Feb 18 01:42:47 2015 @@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.securi import java.util.ArrayList; import java.util.List; import java.util.Map; -import java.util.Set; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.Warehouse; @@ -372,4 +371,11 @@ public class HiveV1Authorizer implements @Override public void applyAuthorizationConfigPolicy(HiveConf hiveConf) { } + + @Override + public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, + HiveAuthzContext context) throws HiveAuthzPluginException, HiveAccessControlException { + // do no filtering in old authorizer + return listObjs; + } } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/DummyHiveAuthorizationValidator.java Wed Feb 18 01:42:47 2015 @@ -42,4 +42,10 @@ public class DummyHiveAuthorizationValid // no-op } + @Override + public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, + HiveAuthzContext context) { + return listObjs; + } + } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/SQLStdHiveAuthorizationValidator.java Wed Feb 18 01:42:47 2015 @@ -149,4 +149,10 @@ public class SQLStdHiveAuthorizationVali } } + @Override + public List<HivePrivilegeObject> filterListCmdObjects(List<HivePrivilegeObject> listObjs, + HiveAuthzContext context) { + return listObjs; + } + } Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java (original) +++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java Wed Feb 18 01:42:47 2015 @@ -419,7 +419,7 @@ public class SessionState { return hdfsEncryptionShim; } - // SessionState is not available in runtime and Hive.get().getConf() is not safe to call + // SessionState is not available in runtime and Hive.get().getConf() is not safe to call private static class SessionStates { private SessionState state; private HiveConf conf; @@ -435,7 +435,7 @@ public class SessionState { } } } - + /** * Singleton Session object per thread. * @@ -705,7 +705,7 @@ public class SessionState { clsStr, authenticator, true); if (authorizer == null) { - // if it was null, the new authorization plugin must be specified in + // if it was null, the new (V2) authorization plugin must be specified in // config HiveAuthorizerFactory authorizerFactory = HiveUtils.getAuthorizerFactory(conf, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER); @@ -717,13 +717,14 @@ public class SessionState { authorizerV2 = authorizerFactory.createHiveAuthorizer(new HiveMetastoreClientFactoryImpl(), conf, authenticator, authzContextBuilder.build()); + setAuthorizerV2Config(); - authorizerV2.applyAuthorizationConfigPolicy(conf); } // create the create table grants with new config createTableGrants = CreateTableAutomaticGrant.create(conf); } catch (HiveException e) { + LOG.error("Error setting up authorization: " + e.getMessage(), e); throw new RuntimeException(e); } @@ -734,6 +735,28 @@ public class SessionState { return; } + private void setAuthorizerV2Config() throws HiveException { + // avoid processing the same config multiple times, check marker + if (conf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) { + return; + } + conf.setVar(ConfVars.METASTORE_FILTER_HOOK, + "org.apache.hadoop.hive.ql.security.authorization.plugin.AuthorizationMetaStoreFilterHook"); + + authorizerV2.applyAuthorizationConfigPolicy(conf); + // update config in Hive thread local as well and init the metastore client + try { + Hive.get(conf).getMSC(); + } catch (Exception e) { + // catch-all due to some exec time dependencies on session state + // that would cause ClassNoFoundException otherwise + throw new HiveException(e.getMessage(), e); + } + + // set a marker that this conf has been processed. + conf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString()); + } + public Object getActiveAuthorizer() { return getAuthorizationMode() == AuthorizationMode.V1 ? getAuthorizer() : getAuthorizerV2(); @@ -1416,20 +1439,7 @@ public class SessionState { * any security configuration changes. */ public void applyAuthorizationPolicy() throws HiveException { - if(!isAuthorizationModeV2()){ - // auth v1 interface does not have this functionality - return; - } - - // avoid processing the same config multiple times, check marker - if (conf.get(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, "").equals(Boolean.TRUE.toString())) { - return; - } - - authorizerV2.applyAuthorizationConfigPolicy(conf); - // set a marker that this conf has been processed. - conf.set(CONFIG_AUTHZ_SETTINGS_APPLIED_MARKER, Boolean.TRUE.toString()); - + setupAuth(); } public Map<String, Map<String, Table>> getTempTables() { Modified: hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java?rev=1660556&r1=1660555&r2=1660556&view=diff ============================================================================== --- hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java (original) +++ hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java Wed Feb 18 01:42:47 2015 @@ -34,6 +34,7 @@ import org.apache.hadoop.hive.conf.HiveC import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.metastore.IMetaStoreClient; +import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; @@ -109,14 +110,16 @@ public class CLIService extends Composit // creates connection to HMS and thus *must* occur after kerberos login above try { applyAuthorizationConfigPolicy(hiveConf); - } catch (HiveException e) { - throw new RuntimeException("Error applying authorization policy on hive configuration", e); + } catch (Exception e) { + throw new RuntimeException("Error applying authorization policy on hive configuration: " + + e.getMessage(), e); } setupBlockedUdfs(); super.init(hiveConf); } - private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException { + private void applyAuthorizationConfigPolicy(HiveConf newHiveConf) throws HiveException, + MetaException { // authorization setup using SessionState should be revisited eventually, as // authorization and authentication are not session specific settings SessionState ss = new SessionState(newHiveConf);