HADOOP-6586. Log authentication and authorization failures and successes for RPC

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@916779 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Boris Shkolnik 2010-02-26 18:35:54 +00:00
parent ea650d8d6c
commit c4c122a0de
6 changed files with 59 additions and 24 deletions

View File

@ -65,6 +65,8 @@ Trunk (unreleased changes)
HADOOP-6568. Adds authorization for the default servlets. HADOOP-6568. Adds authorization for the default servlets.
(Vinod Kumar Vavilapalli via ddas) (Vinod Kumar Vavilapalli via ddas)
HADOOP-6586. Log authentication and authorization failures and successes
for RPC (boryas)
IMPROVEMENTS IMPROVEMENTS
HADOOP-6283. Improve the exception messages thrown by HADOOP-6283. Improve the exception messages thrown by

View File

@ -57,6 +57,19 @@ log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
log4j.appender.TLA.layout=org.apache.log4j.PatternLayout log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
#
#Security appender
#
hadoop.security.log.file=SecurityAuth.audit
log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender
log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file}
log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout
log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
#new logger
log4j.category.SecurityLogger=INFO,DRFAS
# #
# Rolling File Appender # Rolling File Appender
# #

View File

@ -103,7 +103,11 @@ public abstract class Server {
static int INITIAL_RESP_BUF_SIZE = 10240; static int INITIAL_RESP_BUF_SIZE = 10240;
public static final Log LOG = LogFactory.getLog(Server.class); public static final Log LOG = LogFactory.getLog(Server.class);
public static final Log auditLOG =
LogFactory.getLog("SecurityLogger."+Server.class.getName());
private static final String AUTH_FAILED_FOR = "Auth failed for ";
private static final String AUTH_SUCCESSFULL_FOR = "Auth successfull for ";
private static final ThreadLocal<Server> SERVER = new ThreadLocal<Server>(); private static final ThreadLocal<Server> SERVER = new ThreadLocal<Server>();
private static final Map<String, Class<?>> PROTOCOL_CACHE = private static final Map<String, Class<?>> PROTOCOL_CACHE =
@ -718,7 +722,7 @@ private synchronized void waitPending() throws InterruptedException {
} }
/** Reads calls from a connection and queues them for handling. */ /** Reads calls from a connection and queues them for handling. */
private class Connection { public class Connection {
private boolean rpcHeaderRead = false; // if initial rpc header is read private boolean rpcHeaderRead = false; // if initial rpc header is read
private boolean headerRead = false; //if the connection header that private boolean headerRead = false; //if the connection header that
//follows version is read. //follows version is read.
@ -748,6 +752,7 @@ private class Connection {
private ByteBuffer unwrappedDataLengthBuffer; private ByteBuffer unwrappedDataLengthBuffer;
UserGroupInformation user = null; UserGroupInformation user = null;
public UserGroupInformation attemptingUser = null; // user name before auth
// Fake 'call' for failed authorization response // Fake 'call' for failed authorization response
private static final int AUTHROIZATION_FAILED_CALLID = -1; private static final int AUTHROIZATION_FAILED_CALLID = -1;
@ -844,7 +849,7 @@ private void saslReadAndProcess(byte[] saslToken) throws IOException,
saslServer = Sasl.createSaslServer(AuthMethod.DIGEST saslServer = Sasl.createSaslServer(AuthMethod.DIGEST
.getMechanismName(), null, SaslRpcServer.SASL_DEFAULT_REALM, .getMechanismName(), null, SaslRpcServer.SASL_DEFAULT_REALM,
SaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler( SaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler(
secretManager)); secretManager, this));
break; break;
default: default:
UserGroupInformation current = UserGroupInformation UserGroupInformation current = UserGroupInformation
@ -884,6 +889,9 @@ public Object run() throws IOException {
replyToken = saslServer.evaluateResponse(saslToken); replyToken = saslServer.evaluateResponse(saslToken);
} catch (SaslException se) { } catch (SaslException se) {
rpcMetrics.authenticationFailures.inc(); rpcMetrics.authenticationFailures.inc();
String clientIP = this.toString();
// attempting user could be null
auditLOG.warn(AUTH_FAILED_FOR + clientIP + ":" + attemptingUser, se);
throw se; throw se;
} }
if (replyToken != null) { if (replyToken != null) {
@ -905,6 +913,8 @@ public Object run() throws IOException {
} }
user = getAuthorizedUgi(saslServer.getAuthorizationID()); user = getAuthorizedUgi(saslServer.getAuthorizationID());
LOG.info("SASL server successfully authenticated client: " + user); LOG.info("SASL server successfully authenticated client: " + user);
rpcMetrics.authenticationSuccesses.inc();
auditLOG.info(AUTH_SUCCESSFULL_FOR + user);
saslContextEstablished = true; saslContextEstablished = true;
} }
} else { } else {
@ -1103,7 +1113,6 @@ private void processUnwrappedData(byte[] inBuf) throws IOException,
private void processOneRpc(byte[] buf) throws IOException, private void processOneRpc(byte[] buf) throws IOException,
InterruptedException { InterruptedException {
rpcMetrics.authenticationSuccesses.inc();
if (headerRead) { if (headerRead) {
processData(buf); processData(buf);
} else { } else {

View File

@ -38,6 +38,7 @@
import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
@ -125,10 +126,13 @@ public void write(DataOutput out) throws IOException {
/** CallbackHandler for SASL DIGEST-MD5 mechanism */ /** CallbackHandler for SASL DIGEST-MD5 mechanism */
public static class SaslDigestCallbackHandler implements CallbackHandler { public static class SaslDigestCallbackHandler implements CallbackHandler {
private SecretManager<TokenIdentifier> secretManager; private SecretManager<TokenIdentifier> secretManager;
private Server.Connection connection;
public SaslDigestCallbackHandler( public SaslDigestCallbackHandler(
SecretManager<TokenIdentifier> secretManager) { SecretManager<TokenIdentifier> secretManager,
Server.Connection connection) {
this.secretManager = secretManager; this.secretManager = secretManager;
this.connection = connection;
} }
private char[] getPassword(TokenIdentifier tokenid) throws IOException { private char[] getPassword(TokenIdentifier tokenid) throws IOException {
@ -159,6 +163,10 @@ public void handle(Callback[] callbacks) throws IOException,
if (pc != null) { if (pc != null) {
TokenIdentifier tokenIdentifier = getIdentifier(nc.getDefaultName(), secretManager); TokenIdentifier tokenIdentifier = getIdentifier(nc.getDefaultName(), secretManager);
char[] password = getPassword(tokenIdentifier); char[] password = getPassword(tokenIdentifier);
UserGroupInformation user = null;
user = tokenIdentifier.getUser(); // may throw exception
connection.attemptingUser = user;
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("SASL server DIGEST-MD5 callback: setting password " LOG.debug("SASL server DIGEST-MD5 callback: setting password "
+ "for client: " + tokenIdentifier.getUser()); + "for client: " + tokenIdentifier.getUser());

View File

@ -20,6 +20,8 @@
import java.util.IdentityHashMap; import java.util.IdentityHashMap;
import java.util.Map; import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
@ -45,6 +47,13 @@ public class ServiceAuthorizationManager {
public static final String SERVICE_AUTHORIZATION_CONFIG = public static final String SERVICE_AUTHORIZATION_CONFIG =
"hadoop.security.authorization"; "hadoop.security.authorization";
public static final Log auditLOG =
LogFactory.getLog("SecurityLogger."+ServiceAuthorizationManager.class.getName());
private static final String AUTHZ_SUCCESSFULL_FOR = "Authorization successfull for ";
private static final String AUTHZ_FAILED_FOR = "Authorization failed for ";
/** /**
* Authorize the user to access the protocol being used. * Authorize the user to access the protocol being used.
* *
@ -61,10 +70,12 @@ public static void authorize(UserGroupInformation user,
" is not known."); " is not known.");
} }
if (!acl.isUserAllowed(user)) { if (!acl.isUserAllowed(user)) {
throw new AuthorizationException("User " + user.toString() + auditLOG.warn(AUTHZ_FAILED_FOR + user + " for protocol="+protocol);
throw new AuthorizationException("User " + user +
" is not authorized for protocol " + " is not authorized for protocol " +
protocol); protocol);
} }
auditLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol);
} }
public static synchronized void refresh(Configuration conf, public static synchronized void refresh(Configuration conf,

View File

@ -370,30 +370,22 @@ private void doRPCs(Configuration conf, boolean expectFailure) throws Exception
RPC.stopProxy(proxy); RPC.stopProxy(proxy);
} }
if (expectFailure) { if (expectFailure) {
assertTrue("Expected 1 but got " + assertEquals("Wrong number of authorizationFailures ", 1,
server.getRpcMetrics().authorizationFailures server.getRpcMetrics().authorizationFailures
.getCurrentIntervalValue(), .getCurrentIntervalValue());
server.getRpcMetrics().authorizationFailures
.getCurrentIntervalValue() == 1);
} else { } else {
assertTrue("Expected 1 but got " + assertEquals("Wrong number of authorizationSuccesses ", 1,
server.getRpcMetrics().authorizationSuccesses server.getRpcMetrics().authorizationSuccesses
.getCurrentIntervalValue(), .getCurrentIntervalValue());
server.getRpcMetrics().authorizationSuccesses
.getCurrentIntervalValue() == 1);
} }
//since we don't have authentication turned ON, we should see //since we don't have authentication turned ON, we should see
// >0 for the authentication successes and 0 for failure // 0 for the authentication successes and 0 for failure
assertTrue("Expected 0 but got " + assertEquals("Wrong number of authenticationFailures ", 0,
server.getRpcMetrics().authenticationFailures server.getRpcMetrics().authenticationFailures
.getCurrentIntervalValue(), .getCurrentIntervalValue());
server.getRpcMetrics().authenticationFailures assertEquals("Wrong number of authenticationSuccesses ", 0,
.getCurrentIntervalValue() == 0);
assertTrue("Expected greater than 0 but got " +
server.getRpcMetrics().authenticationSuccesses server.getRpcMetrics().authenticationSuccesses
.getCurrentIntervalValue(), .getCurrentIntervalValue());
server.getRpcMetrics().authenticationSuccesses
.getCurrentIntervalValue() > 0);
} }
} }