HBASE-19634 Add permission check for executeProcedures in AccessController
This commit is contained in:
parent
8eeedf0403
commit
1a89793ee8
|
@ -126,4 +126,18 @@ public interface RegionServerObserver {
|
|||
default void postClearCompactionQueues(
|
||||
final ObserverContext<RegionServerCoprocessorEnvironment> ctx)
|
||||
throws IOException {}
|
||||
|
||||
/**
|
||||
* This will be called before executing procedures
|
||||
* @param ctx the environment to interact with the framework and region server.
|
||||
*/
|
||||
default void preExecuteProcedures(ObserverContext<RegionServerCoprocessorEnvironment> ctx)
|
||||
throws IOException {}
|
||||
|
||||
/**
|
||||
* This will be called after executing procedures
|
||||
* @param ctx the environment to interact with the framework and region server.
|
||||
*/
|
||||
default void postExecuteProcedures(ObserverContext<RegionServerCoprocessorEnvironment> ctx)
|
||||
throws IOException {}
|
||||
}
|
||||
|
|
|
@ -41,7 +41,6 @@ import java.util.concurrent.TimeUnit;
|
|||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.atomic.LongAdder;
|
||||
|
||||
import org.apache.commons.lang3.mutable.MutableObject;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
@ -142,6 +141,7 @@ import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
|
|||
import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException;
|
||||
import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;
|
||||
import org.apache.hbase.thirdparty.com.google.protobuf.UnsafeByteOperations;
|
||||
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.RequestConverter;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.ResponseConverter;
|
||||
|
@ -3454,36 +3454,40 @@ public class RSRpcServices implements HBaseRPCErrorHandler,
|
|||
}
|
||||
|
||||
@Override
|
||||
@QosPriority(priority = HConstants.ADMIN_QOS)
|
||||
public ExecuteProceduresResponse executeProcedures(RpcController controller,
|
||||
ExecuteProceduresRequest request) throws ServiceException {
|
||||
if (request.getOpenRegionCount() > 0) {
|
||||
for (OpenRegionRequest req : request.getOpenRegionList()) {
|
||||
openRegion(controller, req);
|
||||
}
|
||||
}
|
||||
if (request.getCloseRegionCount() > 0) {
|
||||
for (CloseRegionRequest req : request.getCloseRegionList()) {
|
||||
closeRegion(controller, req);
|
||||
}
|
||||
}
|
||||
if (request.getProcCount() > 0) {
|
||||
for (RemoteProcedureRequest req : request.getProcList()) {
|
||||
RSProcedureCallable callable;
|
||||
try {
|
||||
callable =
|
||||
Class.forName(req.getProcClass()).asSubclass(RSProcedureCallable.class).newInstance();
|
||||
} catch (Exception e) {
|
||||
// here we just ignore the error as this should not happen and we do not provide a general
|
||||
// way to report errors for all types of remote procedure. The procedure will hang at
|
||||
// master side but after you solve the problem and restart master it will be executed
|
||||
// again and pass.
|
||||
LOG.warn("create procedure of type " + req.getProcClass() + " failed, give up", e);
|
||||
continue;
|
||||
try {
|
||||
checkOpen();
|
||||
regionServer.getRegionServerCoprocessorHost().preExecuteProcedures();
|
||||
if (request.getOpenRegionCount() > 0) {
|
||||
for (OpenRegionRequest req : request.getOpenRegionList()) {
|
||||
openRegion(controller, req);
|
||||
}
|
||||
callable.init(req.getProcData().toByteArray(), regionServer);
|
||||
regionServer.executeProcedure(req.getProcId(), callable);
|
||||
}
|
||||
if (request.getCloseRegionCount() > 0) {
|
||||
for (CloseRegionRequest req : request.getCloseRegionList()) {
|
||||
closeRegion(controller, req);
|
||||
}
|
||||
}
|
||||
if (request.getProcCount() > 0) {
|
||||
for (RemoteProcedureRequest req : request.getProcList()) {
|
||||
RSProcedureCallable callable;
|
||||
try {
|
||||
callable =
|
||||
Class.forName(req.getProcClass()).asSubclass(RSProcedureCallable.class).newInstance();
|
||||
} catch (Exception e) {
|
||||
regionServer.remoteProcedureComplete(req.getProcId(), e);
|
||||
continue;
|
||||
}
|
||||
callable.init(req.getProcData().toByteArray(), regionServer);
|
||||
regionServer.executeProcedure(req.getProcId(), callable);
|
||||
}
|
||||
}
|
||||
regionServer.getRegionServerCoprocessorHost().postExecuteProcedures();
|
||||
return ExecuteProceduresResponse.getDefaultInstance();
|
||||
} catch (IOException e) {
|
||||
throw new ServiceException(e);
|
||||
}
|
||||
return ExecuteProceduresResponse.getDefaultInstance();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -205,6 +205,24 @@ public class RegionServerCoprocessorHost extends
|
|||
});
|
||||
}
|
||||
|
||||
public void preExecuteProcedures() throws IOException {
|
||||
execOperation(coprocEnvironments.isEmpty() ? null : new RegionServerObserverOperation() {
|
||||
@Override
|
||||
public void call(RegionServerObserver observer) throws IOException {
|
||||
observer.preExecuteProcedures(this);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void postExecuteProcedures() throws IOException {
|
||||
execOperation(coprocEnvironments.isEmpty() ? null : new RegionServerObserverOperation() {
|
||||
@Override
|
||||
public void call(RegionServerObserver observer) throws IOException {
|
||||
observer.postExecuteProcedures(this);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Coprocessor environment extension providing access to region server
|
||||
* related services.
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
/*
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
|
@ -117,13 +117,6 @@ import org.apache.hadoop.hbase.security.Superusers;
|
|||
import org.apache.hadoop.hbase.security.User;
|
||||
import org.apache.hadoop.hbase.security.UserProvider;
|
||||
import org.apache.hadoop.hbase.security.access.Permission.Action;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.ArrayListMultimap;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.ListMultimap;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.MapMaker;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
|
||||
import org.apache.hadoop.hbase.snapshot.SnapshotDescriptionUtils;
|
||||
import org.apache.hadoop.hbase.util.ByteRange;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
@ -136,6 +129,14 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.ArrayListMultimap;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.ImmutableSet;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.ListMultimap;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.MapMaker;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Maps;
|
||||
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
|
||||
|
||||
/**
|
||||
* Provides basic authorization checks for data access and administrative
|
||||
* operations.
|
||||
|
@ -2507,8 +2508,7 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor,
|
|||
|
||||
@Override
|
||||
public void preRequestLock(ObserverContext<MasterCoprocessorEnvironment> ctx, String namespace,
|
||||
TableName tableName, RegionInfo[] regionInfos, String description)
|
||||
throws IOException {
|
||||
TableName tableName, RegionInfo[] regionInfos, String description) throws IOException {
|
||||
// There are operations in the CREATE and ADMIN domain which may require lock, READ
|
||||
// or WRITE. So for any lock request, we check for these two perms irrespective of lock type.
|
||||
String reason = String.format("Description=%s", description);
|
||||
|
@ -2521,12 +2521,18 @@ public class AccessController implements MasterCoprocessor, RegionCoprocessor,
|
|||
checkLockPermissions(ctx, null, tableName, null, description);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preExecuteProcedures(ObserverContext<RegionServerCoprocessorEnvironment> ctx)
|
||||
throws IOException {
|
||||
checkSystemOrSuperUser(getActiveUser(ctx));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the active user to which authorization checks should be applied.
|
||||
* If we are in the context of an RPC call, the remote user is used,
|
||||
* otherwise the currently logged in user is used.
|
||||
*/
|
||||
public User getActiveUser(ObserverContext<?> ctx) throws IOException {
|
||||
private User getActiveUser(ObserverContext<?> ctx) throws IOException {
|
||||
// for non-rpc handling, fallback to system user
|
||||
Optional<User> optionalUser = ctx.getCaller();
|
||||
if (optionalUser.isPresent()) {
|
||||
|
|
|
@ -202,5 +202,12 @@ public class TestJMXConnectorServer {
|
|||
throw new AccessDeniedException("Insufficient permissions to shut down cluster.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preExecuteProcedures(ObserverContext<RegionServerCoprocessorEnvironment> ctx)
|
||||
throws IOException {
|
||||
// FIXME: ignore the procedure permission check since in our UT framework master is neither
|
||||
// the systemuser nor the superuser so we can not call executeProcedures...
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
/*
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
|
@ -30,7 +30,6 @@ import com.google.protobuf.RpcCallback;
|
|||
import com.google.protobuf.RpcController;
|
||||
import com.google.protobuf.Service;
|
||||
import com.google.protobuf.ServiceException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.ArrayList;
|
||||
|
@ -38,7 +37,6 @@ import java.util.Arrays;
|
|||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
|
@ -3064,4 +3062,18 @@ public class TestAccessController extends SecureTestUtil {
|
|||
verifyAllowed(
|
||||
action, SUPERUSER, USER_ADMIN, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExecuteProcedures() throws Exception {
|
||||
AccessTestAction action = new AccessTestAction() {
|
||||
@Override
|
||||
public Object run() throws Exception {
|
||||
ACCESS_CONTROLLER.preExecuteProcedures(ObserverContextImpl.createAndPrepare(RSCP_ENV));
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
verifyAllowed(action, SUPERUSER);
|
||||
verifyDenied(action, USER_CREATE, USER_RW, USER_RO, USER_NONE, USER_OWNER, USER_ADMIN);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue