HADOOP-6586. Log authentication and authorization failures and successes for RPC

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@916779 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Boris Shkolnik 2010-02-26 18:35:54 +00:00
parent ea650d8d6c
commit c4c122a0de
6 changed files with 59 additions and 24 deletions

View File

@ -65,6 +65,8 @@ Trunk (unreleased changes)
HADOOP-6568. Adds authorization for the default servlets.
(Vinod Kumar Vavilapalli via ddas)
HADOOP-6586. Log authentication and authorization failures and successes
for RPC (boryas)
IMPROVEMENTS
HADOOP-6283. Improve the exception messages thrown by

View File

@ -57,6 +57,19 @@ log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
#
#Security appender
#
hadoop.security.log.file=SecurityAuth.audit
log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
#new logger
log4j.category.SecurityLogger=INFO,DRFAS
#
# Rolling File Appender
#

View File

@ -103,6 +103,10 @@ public abstract class Server {
static int INITIAL_RESP_BUF_SIZE = 10240;
public static final Log LOG = LogFactory.getLog(Server.class);
public static final Log auditLOG =
LogFactory.getLog("SecurityLogger."+Server.class.getName());
private static final String AUTH_FAILED_FOR = "Auth failed for ";
private static final String AUTH_SUCCESSFULL_FOR = "Auth successfull for ";
private static final ThreadLocal<Server> SERVER = new ThreadLocal<Server>();
@ -718,7 +722,7 @@ public abstract class Server {
}
/** Reads calls from a connection and queues them for handling. */
private class Connection {
public class Connection {
private boolean rpcHeaderRead = false; // if initial rpc header is read
private boolean headerRead = false; //if the connection header that
//follows version is read.
@ -748,6 +752,7 @@ public abstract class Server {
private ByteBuffer unwrappedDataLengthBuffer;
UserGroupInformation user = null;
public UserGroupInformation attemptingUser = null; // user name before auth
// Fake 'call' for failed authorization response
private static final int AUTHROIZATION_FAILED_CALLID = -1;
@ -844,7 +849,7 @@ public abstract class Server {
saslServer = Sasl.createSaslServer(AuthMethod.DIGEST
.getMechanismName(), null, SaslRpcServer.SASL_DEFAULT_REALM,
SaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler(
secretManager));
secretManager, this));
break;
default:
UserGroupInformation current = UserGroupInformation
@ -884,6 +889,9 @@ public abstract class Server {
replyToken = saslServer.evaluateResponse(saslToken);
} catch (SaslException se) {
rpcMetrics.authenticationFailures.inc();
String clientIP = this.toString();
// attempting user could be null
auditLOG.warn(AUTH_FAILED_FOR + clientIP + ":" + attemptingUser, se);
throw se;
}
if (replyToken != null) {
@ -905,6 +913,8 @@ public abstract class Server {
}
user = getAuthorizedUgi(saslServer.getAuthorizationID());
LOG.info("SASL server successfully authenticated client: " + user);
rpcMetrics.authenticationSuccesses.inc();
auditLOG.info(AUTH_SUCCESSFULL_FOR + user);
saslContextEstablished = true;
}
} else {
@ -1103,7 +1113,6 @@ public abstract class Server {
private void processOneRpc(byte[] buf) throws IOException,
InterruptedException {
rpcMetrics.authenticationSuccesses.inc();
if (headerRead) {
processData(buf);
} else {

View File

@ -38,6 +38,7 @@ import javax.security.sasl.Sasl;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.TokenIdentifier;
@ -125,10 +126,13 @@ public class SaslRpcServer {
/** CallbackHandler for SASL DIGEST-MD5 mechanism */
public static class SaslDigestCallbackHandler implements CallbackHandler {
private SecretManager<TokenIdentifier> secretManager;
private Server.Connection connection;
public SaslDigestCallbackHandler(
SecretManager<TokenIdentifier> secretManager) {
SecretManager<TokenIdentifier> secretManager,
Server.Connection connection) {
this.secretManager = secretManager;
this.connection = connection;
}
private char[] getPassword(TokenIdentifier tokenid) throws IOException {
@ -159,6 +163,10 @@ public class SaslRpcServer {
if (pc != null) {
TokenIdentifier tokenIdentifier = getIdentifier(nc.getDefaultName(), secretManager);
char[] password = getPassword(tokenIdentifier);
UserGroupInformation user = null;
user = tokenIdentifier.getUser(); // may throw exception
connection.attemptingUser = user;
if (LOG.isDebugEnabled()) {
LOG.debug("SASL server DIGEST-MD5 callback: setting password "
+ "for client: " + tokenIdentifier.getUser());

View File

@ -20,6 +20,8 @@ package org.apache.hadoop.security.authorize;
import java.util.IdentityHashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.security.UserGroupInformation;
@ -45,6 +47,13 @@ public class ServiceAuthorizationManager {
public static final String SERVICE_AUTHORIZATION_CONFIG =
"hadoop.security.authorization";
public static final Log auditLOG =
LogFactory.getLog("SecurityLogger."+ServiceAuthorizationManager.class.getName());
private static final String AUTHZ_SUCCESSFULL_FOR = "Authorization successfull for ";
private static final String AUTHZ_FAILED_FOR = "Authorization failed for ";
/**
* Authorize the user to access the protocol being used.
*
@ -61,10 +70,12 @@ public class ServiceAuthorizationManager {
" is not known.");
}
if (!acl.isUserAllowed(user)) {
throw new AuthorizationException("User " + user.toString() +
auditLOG.warn(AUTHZ_FAILED_FOR + user + " for protocol="+protocol);
throw new AuthorizationException("User " + user +
" is not authorized for protocol " +
protocol);
}
auditLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol);
}
public static synchronized void refresh(Configuration conf,

View File

@ -370,30 +370,22 @@ public class TestRPC extends TestCase {
RPC.stopProxy(proxy);
}
if (expectFailure) {
assertTrue("Expected 1 but got " +
assertEquals("Wrong number of authorizationFailures ", 1,
server.getRpcMetrics().authorizationFailures
.getCurrentIntervalValue(),
server.getRpcMetrics().authorizationFailures
.getCurrentIntervalValue() == 1);
.getCurrentIntervalValue());
} else {
assertTrue("Expected 1 but got " +
assertEquals("Wrong number of authorizationSuccesses ", 1,
server.getRpcMetrics().authorizationSuccesses
.getCurrentIntervalValue(),
server.getRpcMetrics().authorizationSuccesses
.getCurrentIntervalValue() == 1);
.getCurrentIntervalValue());
}
//since we don't have authentication turned ON, we should see
// >0 for the authentication successes and 0 for failure
assertTrue("Expected 0 but got " +
// 0 for the authentication successes and 0 for failure
assertEquals("Wrong number of authenticationFailures ", 0,
server.getRpcMetrics().authenticationFailures
.getCurrentIntervalValue(),
server.getRpcMetrics().authenticationFailures
.getCurrentIntervalValue() == 0);
assertTrue("Expected greater than 0 but got " +
.getCurrentIntervalValue());
assertEquals("Wrong number of authenticationSuccesses ", 0,
server.getRpcMetrics().authenticationSuccesses
.getCurrentIntervalValue(),
server.getRpcMetrics().authenticationSuccesses
.getCurrentIntervalValue() > 0);
.getCurrentIntervalValue());
}
}