HBASE-11589 AccessControlException should be a not retriable exception (Qiang Tian)

Amending-Author: Andrew Purtell <apurtell@apache.org>
This commit is contained in:
Andrew Purtell 2014-08-08 19:10:56 -07:00
parent 2df6b0562f
commit b8f2f67d4c
4 changed files with 26 additions and 21 deletions

View File

@ -41,4 +41,9 @@ public class AccessDeniedException extends DoNotRetryIOException {
public AccessDeniedException(String s) {
super(s);
}
public AccessDeniedException(Throwable cause) {
super(cause);
}
}

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.ipc;
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION;
import io.netty.util.internal.ConcurrentSet;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@ -63,8 +64,6 @@ import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
import io.netty.util.internal.ConcurrentSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@ -91,6 +90,7 @@ import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.RequestHeader;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.ResponseHeader;
import org.apache.hadoop.hbase.protobuf.generated.RPCProtos.UserInformation;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.AuthMethod;
import org.apache.hadoop.hbase.security.HBasePolicyProvider;
import org.apache.hadoop.hbase.security.HBaseSaslRpcServer;
@ -108,7 +108,6 @@ import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authorize.AuthorizationException;
@ -119,8 +118,8 @@ import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.StringUtils;
import org.htrace.TraceInfo;
import org.codehaus.jackson.map.ObjectMapper;
import org.htrace.TraceInfo;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.protobuf.BlockingService;
@ -1235,7 +1234,7 @@ public class RpcServer implements RpcServerInterface {
secretManager);
UserGroupInformation ugi = tokenId.getUser();
if (ugi == null) {
throw new AccessControlException(
throw new AccessDeniedException(
"Can't retrieve username from tokenIdentifier.");
}
ugi.addTokenIdentifier(tokenId);
@ -1265,7 +1264,7 @@ public class RpcServer implements RpcServerInterface {
switch (authMethod) {
case DIGEST:
if (secretManager == null) {
throw new AccessControlException(
throw new AccessDeniedException(
"Server is not configured to do DIGEST authentication.");
}
saslServer = Sasl.createSaslServer(AuthMethod.DIGEST
@ -1282,7 +1281,7 @@ public class RpcServer implements RpcServerInterface {
}
final String names[] = SaslUtil.splitKerberosName(fullName);
if (names.length != 3) {
throw new AccessControlException(
throw new AccessDeniedException(
"Kerberos principal name does NOT have the expected "
+ "hostname part: " + fullName);
}
@ -1297,7 +1296,7 @@ public class RpcServer implements RpcServerInterface {
});
}
if (saslServer == null)
throw new AccessControlException(
throw new AccessDeniedException(
"Unable to find SASL server implementation for "
+ authMethod.getMechanismName());
if (LOG.isDebugEnabled()) {
@ -1421,7 +1420,7 @@ public class RpcServer implements RpcServerInterface {
return doBadPreambleHandling(msg, new BadAuthException(msg));
}
if (isSecurityEnabled && authMethod == AuthMethod.SIMPLE) {
AccessControlException ae = new AccessControlException("Authentication is required");
AccessDeniedException ae = new AccessDeniedException("Authentication is required");
setupResponse(authFailedResponse, authFailedCall, ae, ae.getMessage());
responder.doRespond(authFailedCall);
throw ae;
@ -1581,7 +1580,7 @@ public class RpcServer implements RpcServerInterface {
&& (!protocolUser.getUserName().equals(user.getUserName()))) {
if (authMethod == AuthMethod.DIGEST) {
// Not allowed to doAs if token authentication is used
throw new AccessControlException("Authenticated user (" + user
throw new AccessDeniedException("Authenticated user (" + user
+ ") doesn't match what the client claims to be ("
+ protocolUser + ")");
} else {
@ -1669,7 +1668,7 @@ public class RpcServer implements RpcServerInterface {
if (!authorizeConnection()) {
// Throw FatalConnectionException wrapping ACE so client does right thing and closes
// down the connection instead of trying to read non-existent retun.
throw new AccessControlException("Connection from " + this + " for service " +
throw new AccessDeniedException("Connection from " + this + " for service " +
connectionHeader.getServiceName() + " is unauthorized for user: " + user);
}
}
@ -1778,7 +1777,8 @@ public class RpcServer implements RpcServerInterface {
LOG.debug("Connection authorization failed: " + ae.getMessage(), ae);
}
metrics.authorizationFailure();
setupResponse(authFailedResponse, authFailedCall, ae, ae.getMessage());
setupResponse(authFailedResponse, authFailedCall,
new AccessDeniedException(ae), ae.getMessage());
responder.doRespond(authFailedCall);
return false;
}

View File

@ -66,6 +66,7 @@ import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.fs.HFileSystem;
import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.master.RegionPlacementMaintainer;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.FSProtos;
import org.apache.hadoop.hbase.regionserver.HRegion;
@ -73,7 +74,6 @@ import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.FSConstants;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
@ -1669,7 +1669,7 @@ public abstract class FSUtils {
* the action
*/
public static void checkAccess(UserGroupInformation ugi, FileStatus file,
FsAction action) throws AccessControlException {
FsAction action) throws AccessDeniedException {
if (ugi.getShortUserName().equals(file.getOwner())) {
if (file.getPermission().getUserAction().implies(action)) {
return;
@ -1681,7 +1681,7 @@ public abstract class FSUtils {
} else if (file.getPermission().getOtherAction().implies(action)) {
return;
}
throw new AccessControlException("Permission denied:" + " action=" + action
throw new AccessDeniedException("Permission denied:" + " action=" + action
+ " path=" + file.getPath() + " user=" + ugi.getShortUserName());
}

View File

@ -106,8 +106,8 @@ import org.apache.hadoop.hbase.util.hbck.TableLockChecker;
import org.apache.hadoop.hbase.zookeeper.MetaTableLocator;
import org.apache.hadoop.hbase.zookeeper.ZKTableStateClientSideReader;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Tool;
@ -1565,7 +1565,7 @@ public class HBaseFsck extends Configured {
}
}
private void preCheckPermission() throws IOException, AccessControlException {
private void preCheckPermission() throws IOException, AccessDeniedException {
if (shouldIgnorePreCheckPermission()) {
return;
}
@ -1578,12 +1578,12 @@ public class HBaseFsck extends Configured {
for (FileStatus file : files) {
try {
FSUtils.checkAccess(ugi, file, FsAction.WRITE);
} catch (AccessControlException ace) {
LOG.warn("Got AccessControlException when preCheckPermission ", ace);
} catch (AccessDeniedException ace) {
LOG.warn("Got AccessDeniedException when preCheckPermission ", ace);
errors.reportError(ERROR_CODE.WRONG_USAGE, "Current user " + ugi.getUserName()
+ " does not have write perms to " + file.getPath()
+ ". Please rerun hbck as hdfs user " + file.getOwner());
throw new AccessControlException(ace);
throw ace;
}
}
}
@ -4014,7 +4014,7 @@ public class HBaseFsck extends Configured {
// pre-check current user has FS write permission or not
try {
preCheckPermission();
} catch (AccessControlException ace) {
} catch (AccessDeniedException ace) {
Runtime.getRuntime().exit(-1);
} catch (IOException ioe) {
Runtime.getRuntime().exit(-1);