HBASE-19634 Add permission check for executeProcedures in AccessController
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/ac1ff887 Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/ac1ff887 Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/ac1ff887 Branch: refs/heads/HBASE-19397-branch-2 Commit: ac1ff887d0cede4260461687b04347b3acc820fc Parents: 9f0a40b Author: zhangduo <zhang...@apache.org> Authored: Thu Jan 4 16:18:21 2018 +0800 Committer: zhangduo <zhang...@apache.org> Committed: Tue Jan 30 09:27:47 2018 +0800 ---------------------------------------------------------------------- .../hbase/coprocessor/RegionServerObserver.java | 14 +++++ .../hbase/regionserver/RSRpcServices.java | 54 +++++++++++--------- .../RegionServerCoprocessorHost.java | 18 +++++++ .../hbase/security/access/AccessController.java | 30 ++++++----- .../hadoop/hbase/TestJMXConnectorServer.java | 7 +++ .../security/access/TestAccessController.java | 18 +++++-- 6 files changed, 101 insertions(+), 40 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hbase/blob/ac1ff887/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java index c1af3fb..5b751df 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionServerObserver.java @@ -126,4 +126,18 @@ public interface RegionServerObserver { default void postClearCompactionQueues( final ObserverContext<RegionServerCoprocessorEnvironment> ctx) throws IOException {} + + /** + * This will be called before executing procedures + * @param ctx the environment to interact with the framework and region server. + */ + default void preExecuteProcedures(ObserverContext<RegionServerCoprocessorEnvironment> ctx) + throws IOException {} + + /** + * This will be called after executing procedures + * @param ctx the environment to interact with the framework and region server. + */ + default void postExecuteProcedures(ObserverContext<RegionServerCoprocessorEnvironment> ctx) + throws IOException {} } http://git-wip-us.apache.org/repos/asf/hbase/blob/ac1ff887/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java index 5391a82..e540464 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RSRpcServices.java @@ -41,7 +41,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.LongAdder; - import org.apache.commons.lang3.mutable.MutableObject; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -142,6 +141,7 @@ import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat; import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations; + import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter; import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter; @@ -3454,36 +3454,40 @@ public class RSRpcServices implements HBaseRPCErrorHandler, } @Override + @QosPriority(priority = HConstants.ADMIN_QOS) public ExecuteProceduresResponse executeProcedures(RpcController controller, ExecuteProceduresRequest request) throws ServiceException { - if (request.getOpenRegionCount() > 0) { - for (OpenRegionRequest req : request.getOpenRegionList()) { - openRegion(controller, req); + try { + checkOpen(); + regionServer.getRegionServerCoprocessorHost().preExecuteProcedures(); + if (request.getOpenRegionCount() > 0) { + for (OpenRegionRequest req : request.getOpenRegionList()) { + openRegion(controller, req); + } } - } - if (request.getCloseRegionCount() > 0) { - for (CloseRegionRequest req : request.getCloseRegionList()) { - closeRegion(controller, req); + if (request.getCloseRegionCount() > 0) { + for (CloseRegionRequest req : request.getCloseRegionList()) { + closeRegion(controller, req); + } } - } - if (request.getProcCount() > 0) { - for (RemoteProcedureRequest req : request.getProcList()) { - RSProcedureCallable callable; - try { - callable = - Class.forName(req.getProcClass()).asSubclass(RSProcedureCallable.class).newInstance(); - } catch (Exception e) { - // here we just ignore the error as this should not happen and we do not provide a general - // way to report errors for all types of remote procedure. The procedure will hang at - // master side but after you solve the problem and restart master it will be executed - // again and pass. - LOG.warn("create procedure of type " + req.getProcClass() + " failed, give up", e); - continue; + if (request.getProcCount() > 0) { + for (RemoteProcedureRequest req : request.getProcList()) { + RSProcedureCallable callable; + try { + callable = + Class.forName(req.getProcClass()).asSubclass(RSProcedureCallable.class).newInstance(); + } catch (Exception e) { + regionServer.remoteProcedureComplete(req.getProcId(), e); + continue; + } + callable.init(req.getProcData().toByteArray(), regionServer); + regionServer.executeProcedure(req.getProcId(), callable); } - callable.init(req.getProcData().toByteArray(), regionServer); - regionServer.executeProcedure(req.getProcId(), callable); } + regionServer.getRegionServerCoprocessorHost().postExecuteProcedures(); + return ExecuteProceduresResponse.getDefaultInstance(); + } catch (IOException e) { + throw new ServiceException(e); } - return ExecuteProceduresResponse.getDefaultInstance(); } } http://git-wip-us.apache.org/repos/asf/hbase/blob/ac1ff887/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java index 1986668..f4122ce 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionServerCoprocessorHost.java @@ -205,6 +205,24 @@ public class RegionServerCoprocessorHost extends }); } + public void preExecuteProcedures() throws IOException { + execOperation(coprocEnvironments.isEmpty() ? null : new RegionServerObserverOperation() { + @Override + public void call(RegionServerObserver observer) throws IOException { + observer.preExecuteProcedures(this); + } + }); + } + + public void postExecuteProcedures() throws IOException { + execOperation(coprocEnvironments.isEmpty() ? null : new RegionServerObserverOperation() { + @Override + public void call(RegionServerObserver observer) throws IOException { + observer.postExecuteProcedures(this); + } + }); + } + /** * Coprocessor environment extension providing access to region server * related services. http://git-wip-us.apache.org/repos/asf/hbase/blob/ac1ff887/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java index f191c9d..6acc133 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -117,13 +117,6 @@ import org.apache.hadoop.hbase.security.Superusers; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.access.Permission.Action; -import org.apache.hbase.thirdparty.com.google.common.collect.ArrayListMultimap; -import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet; -import org.apache.hbase.thirdparty.com.google.common.collect.ListMultimap; -import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -import org.apache.hbase.thirdparty.com.google.common.collect.MapMaker; -import org.apache.hbase.thirdparty.com.google.common.collect.Maps; -import org.apache.hbase.thirdparty.com.google.common.collect.Sets; import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils; import org.apache.hadoop.hbase.util.ByteRange; import org.apache.hadoop.hbase.util.Bytes; @@ -136,6 +129,14 @@ import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.apache.hbase.thirdparty.com.google.common.collect.ArrayListMultimap; +import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet; +import org.apache.hbase.thirdparty.com.google.common.collect.ListMultimap; +import org.apache.hbase.thirdparty.com.google.common.collect.Lists; +import org.apache.hbase.thirdparty.com.google.common.collect.MapMaker; +import org.apache.hbase.thirdparty.com.google.common.collect.Maps; +import org.apache.hbase.thirdparty.com.google.common.collect.Sets; + /** * Provides basic authorization checks for data access and administrative * operations. @@ -2455,7 +2456,7 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor, throws IOException { requirePermission(ctx, "replicateLogEntries", Action.WRITE); } - + @Override public void preClearCompactionQueues(ObserverContext<RegionServerCoprocessorEnvironment> ctx) throws IOException { @@ -2507,8 +2508,7 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor, @Override public void preRequestLock(ObserverContext<MasterCoprocessorEnvironment> ctx, String namespace, - TableName tableName, RegionInfo[] regionInfos, String description) - throws IOException { + TableName tableName, RegionInfo[] regionInfos, String description) throws IOException { // There are operations in the CREATE and ADMIN domain which may require lock, READ // or WRITE. So for any lock request, we check for these two perms irrespective of lock type. String reason = String.format("Description=%s", description); @@ -2521,12 +2521,18 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor, checkLockPermissions(ctx, null, tableName, null, description); } + @Override + public void preExecuteProcedures(ObserverContext<RegionServerCoprocessorEnvironment> ctx) + throws IOException { + checkSystemOrSuperUser(getActiveUser(ctx)); + } + /** * Returns the active user to which authorization checks should be applied. * If we are in the context of an RPC call, the remote user is used, * otherwise the currently logged in user is used. */ - public User getActiveUser(ObserverContext<?> ctx) throws IOException { + private User getActiveUser(ObserverContext<?> ctx) throws IOException { // for non-rpc handling, fallback to system user Optional<User> optionalUser = ctx.getCaller(); if (optionalUser.isPresent()) { http://git-wip-us.apache.org/repos/asf/hbase/blob/ac1ff887/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java index fee1439..6a2a4e0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestJMXConnectorServer.java @@ -213,5 +213,12 @@ public class TestJMXConnectorServer { throw new AccessDeniedException("Insufficient permissions to shut down cluster."); } } + + @Override + public void preExecuteProcedures(ObserverContext<RegionServerCoprocessorEnvironment> ctx) + throws IOException { + // FIXME: ignore the procedure permission check since in our UT framework master is neither + // the systemuser nor the superuser so we can not call executeProcedures... + } } } http://git-wip-us.apache.org/repos/asf/hbase/blob/ac1ff887/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java ---------------------------------------------------------------------- diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java index abb0ddd..cb9ccdd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestAccessController.java @@ -1,4 +1,4 @@ -/* +/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information @@ -30,7 +30,6 @@ import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcController; import com.google.protobuf.Service; import com.google.protobuf.ServiceException; - import java.io.IOException; import java.security.PrivilegedAction; import java.util.ArrayList; @@ -38,7 +37,6 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; - import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; @@ -3074,4 +3072,18 @@ public class TestAccessController extends SecureTestUtil { verifyAllowed( action, SUPERUSER, USER_ADMIN, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER); } + + @Test + public void testExecuteProcedures() throws Exception { + AccessTestAction action = new AccessTestAction() { + @Override + public Object run() throws Exception { + ACCESS_CONTROLLER.preExecuteProcedures(ObserverContextImpl.createAndPrepare(RSCP_ENV)); + return null; + } + }; + + verifyAllowed(action, SUPERUSER); + verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER, USER_ADMIN); + } }