diff --git a/CHANGES.txt b/CHANGES.txt index b5f8466d4c9..bd586abebaf 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -65,6 +65,8 @@ Trunk (unreleased changes) HADOOP-6568. Adds authorization for the default servlets. (Vinod Kumar Vavilapalli via ddas) + HADOOP-6586. Log authentication and authorization failures and successes + for RPC (boryas) IMPROVEMENTS HADOOP-6283. Improve the exception messages thrown by diff --git a/conf/log4j.properties b/conf/log4j.properties index d797df6dabe..2783524fefd 100644 --- a/conf/log4j.properties +++ b/conf/log4j.properties @@ -57,6 +57,19 @@ log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize} log4j.appender.TLA.layout=org.apache.log4j.PatternLayout log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n + +# +#Security appender +# +hadoop.security.log.file=SecurityAuth.audit +log4j.appender.DRFAS=org.apache.log4j.DailyRollingFileAppender +log4j.appender.DRFAS.File=${hadoop.log.dir}/${hadoop.security.log.file} + +log4j.appender.DRFAS.layout=org.apache.log4j.PatternLayout +log4j.appender.DRFAS.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n +#new logger +log4j.category.SecurityLogger=INFO,DRFAS + # # Rolling File Appender # diff --git a/src/java/org/apache/hadoop/ipc/Server.java b/src/java/org/apache/hadoop/ipc/Server.java index 999a3802bb2..6d7e154922a 100644 --- a/src/java/org/apache/hadoop/ipc/Server.java +++ b/src/java/org/apache/hadoop/ipc/Server.java @@ -103,7 +103,11 @@ public abstract class Server { static int INITIAL_RESP_BUF_SIZE = 10240; public static final Log LOG = LogFactory.getLog(Server.class); - + public static final Log auditLOG = + LogFactory.getLog("SecurityLogger."+Server.class.getName()); + private static final String AUTH_FAILED_FOR = "Auth failed for "; + private static final String AUTH_SUCCESSFULL_FOR = "Auth successfull for "; + private static final ThreadLocal SERVER = new ThreadLocal(); private static final Map> PROTOCOL_CACHE = @@ -718,7 +722,7 @@ public abstract class Server { } /** Reads calls from a connection and queues them for handling. */ - private class Connection { + public class Connection { private boolean rpcHeaderRead = false; // if initial rpc header is read private boolean headerRead = false; //if the connection header that //follows version is read. @@ -748,6 +752,7 @@ public abstract class Server { private ByteBuffer unwrappedDataLengthBuffer; UserGroupInformation user = null; + public UserGroupInformation attemptingUser = null; // user name before auth // Fake 'call' for failed authorization response private static final int AUTHROIZATION_FAILED_CALLID = -1; @@ -844,7 +849,7 @@ public abstract class Server { saslServer = Sasl.createSaslServer(AuthMethod.DIGEST .getMechanismName(), null, SaslRpcServer.SASL_DEFAULT_REALM, SaslRpcServer.SASL_PROPS, new SaslDigestCallbackHandler( - secretManager)); + secretManager, this)); break; default: UserGroupInformation current = UserGroupInformation @@ -884,6 +889,9 @@ public abstract class Server { replyToken = saslServer.evaluateResponse(saslToken); } catch (SaslException se) { rpcMetrics.authenticationFailures.inc(); + String clientIP = this.toString(); + // attempting user could be null + auditLOG.warn(AUTH_FAILED_FOR + clientIP + ":" + attemptingUser, se); throw se; } if (replyToken != null) { @@ -905,6 +913,8 @@ public abstract class Server { } user = getAuthorizedUgi(saslServer.getAuthorizationID()); LOG.info("SASL server successfully authenticated client: " + user); + rpcMetrics.authenticationSuccesses.inc(); + auditLOG.info(AUTH_SUCCESSFULL_FOR + user); saslContextEstablished = true; } } else { @@ -1103,7 +1113,6 @@ public abstract class Server { private void processOneRpc(byte[] buf) throws IOException, InterruptedException { - rpcMetrics.authenticationSuccesses.inc(); if (headerRead) { processData(buf); } else { diff --git a/src/java/org/apache/hadoop/security/SaslRpcServer.java b/src/java/org/apache/hadoop/security/SaslRpcServer.java index e8ed9d78223..7b69476a6af 100644 --- a/src/java/org/apache/hadoop/security/SaslRpcServer.java +++ b/src/java/org/apache/hadoop/security/SaslRpcServer.java @@ -38,6 +38,7 @@ import javax.security.sasl.Sasl; import org.apache.commons.codec.binary.Base64; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.ipc.Server; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.TokenIdentifier; @@ -125,10 +126,13 @@ public class SaslRpcServer { /** CallbackHandler for SASL DIGEST-MD5 mechanism */ public static class SaslDigestCallbackHandler implements CallbackHandler { private SecretManager secretManager; - + private Server.Connection connection; + public SaslDigestCallbackHandler( - SecretManager secretManager) { + SecretManager secretManager, + Server.Connection connection) { this.secretManager = secretManager; + this.connection = connection; } private char[] getPassword(TokenIdentifier tokenid) throws IOException { @@ -159,6 +163,10 @@ public class SaslRpcServer { if (pc != null) { TokenIdentifier tokenIdentifier = getIdentifier(nc.getDefaultName(), secretManager); char[] password = getPassword(tokenIdentifier); + UserGroupInformation user = null; + user = tokenIdentifier.getUser(); // may throw exception + connection.attemptingUser = user; + if (LOG.isDebugEnabled()) { LOG.debug("SASL server DIGEST-MD5 callback: setting password " + "for client: " + tokenIdentifier.getUser()); diff --git a/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java b/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java index 6cf9a4007dd..3cf04b99450 100644 --- a/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java +++ b/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java @@ -20,6 +20,8 @@ package org.apache.hadoop.security.authorize; import java.util.IdentityHashMap; import java.util.Map; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.security.UserGroupInformation; @@ -45,6 +47,13 @@ public class ServiceAuthorizationManager { public static final String SERVICE_AUTHORIZATION_CONFIG = "hadoop.security.authorization"; + public static final Log auditLOG = + LogFactory.getLog("SecurityLogger."+ServiceAuthorizationManager.class.getName()); + + private static final String AUTHZ_SUCCESSFULL_FOR = "Authorization successfull for "; + private static final String AUTHZ_FAILED_FOR = "Authorization failed for "; + + /** * Authorize the user to access the protocol being used. * @@ -61,10 +70,12 @@ public class ServiceAuthorizationManager { " is not known."); } if (!acl.isUserAllowed(user)) { - throw new AuthorizationException("User " + user.toString() + + auditLOG.warn(AUTHZ_FAILED_FOR + user + " for protocol="+protocol); + throw new AuthorizationException("User " + user + " is not authorized for protocol " + protocol); } + auditLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol); } public static synchronized void refresh(Configuration conf, diff --git a/src/test/core/org/apache/hadoop/ipc/TestRPC.java b/src/test/core/org/apache/hadoop/ipc/TestRPC.java index 55ab115a5f4..c7b41a4ed43 100644 --- a/src/test/core/org/apache/hadoop/ipc/TestRPC.java +++ b/src/test/core/org/apache/hadoop/ipc/TestRPC.java @@ -370,30 +370,22 @@ public class TestRPC extends TestCase { RPC.stopProxy(proxy); } if (expectFailure) { - assertTrue("Expected 1 but got " + + assertEquals("Wrong number of authorizationFailures ", 1, server.getRpcMetrics().authorizationFailures - .getCurrentIntervalValue(), - server.getRpcMetrics().authorizationFailures - .getCurrentIntervalValue() == 1); + .getCurrentIntervalValue()); } else { - assertTrue("Expected 1 but got " + + assertEquals("Wrong number of authorizationSuccesses ", 1, server.getRpcMetrics().authorizationSuccesses - .getCurrentIntervalValue(), - server.getRpcMetrics().authorizationSuccesses - .getCurrentIntervalValue() == 1); + .getCurrentIntervalValue()); } //since we don't have authentication turned ON, we should see - // >0 for the authentication successes and 0 for failure - assertTrue("Expected 0 but got " + + // 0 for the authentication successes and 0 for failure + assertEquals("Wrong number of authenticationFailures ", 0, server.getRpcMetrics().authenticationFailures - .getCurrentIntervalValue(), - server.getRpcMetrics().authenticationFailures - .getCurrentIntervalValue() == 0); - assertTrue("Expected greater than 0 but got " + + .getCurrentIntervalValue()); + assertEquals("Wrong number of authenticationSuccesses ", 0, server.getRpcMetrics().authenticationSuccesses - .getCurrentIntervalValue(), - server.getRpcMetrics().authenticationSuccesses - .getCurrentIntervalValue() > 0); + .getCurrentIntervalValue()); } }