HADOOP-6693. Add metrics to track kerberol login activity. Contributed by Suresh Srinivas.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@986469 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2010-08-17 20:03:39 +00:00
parent 9950db1817
commit 6fe92180fa
5 changed files with 99 additions and 2 deletions

View File

@ -120,6 +120,8 @@ Trunk (unreleased changes)
HADOOP-6905. Better logging messages when a delegation token is invalid.
(Kan Zhang via jghoman)
HADOOP-6693. Add metrics to track kerberol login activity. (suresh)
OPTIMIZATIONS
BUG FIXES

View File

@ -51,3 +51,18 @@ rpc.class=org.apache.hadoop.metrics.spi.NullContext
# rpc.class=org.apache.hadoop.metrics.ganglia.GangliaContext
# rpc.period=10
# rpc.servers=localhost:8649
# Configuration of the "ugi" context for null
ugi.class=org.apache.hadoop.metrics.spi.NullContext
# Configuration of the "ugi" context for file
#ugi.class=org.apache.hadoop.metrics.file.FileContext
#ugi.period=10
#ugi.fileName=/tmp/ugimetrics.log
# Configuration of the "ugi" context for ganglia
# ugi.class=org.apache.hadoop.metrics.ganglia.GangliaContext
# ugi.period=10
# ugi.servers=localhost:8649

View File

@ -52,6 +52,13 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metrics.MetricsContext;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.MetricsUtil;
import org.apache.hadoop.metrics.Updater;
import org.apache.hadoop.metrics.util.MetricsBase;
import org.apache.hadoop.metrics.util.MetricsRegistry;
import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
@ -76,6 +83,41 @@ public class UserGroupInformation {
*/
private static final float TICKET_RENEW_WINDOW = 0.80f;
/**
* UgiMetrics maintains UGI activity statistics
* and publishes them through the metrics interfaces.
*/
static class UgiMetrics implements Updater {
final MetricsTimeVaryingRate loginSuccess;
final MetricsTimeVaryingRate loginFailure;
private final MetricsRecord metricsRecord;
private final MetricsRegistry registry;
UgiMetrics() {
registry = new MetricsRegistry();
loginSuccess = new MetricsTimeVaryingRate("loginSuccess", registry,
"Rate of successful kerberos logins and time taken in milliseconds");
loginFailure = new MetricsTimeVaryingRate("loginFailure", registry,
"Rate of failed kerberos logins and time taken in milliseconds");
final MetricsContext metricsContext = MetricsUtil.getContext("ugi");
metricsRecord = MetricsUtil.createRecord(metricsContext, "ugi");
metricsContext.registerUpdater(this);
}
/**
* Push the metrics to the monitoring subsystem on doUpdate() call.
*/
@Override
public void doUpdates(final MetricsContext context) {
synchronized (this) {
for (MetricsBase m : registry.getMetricsList()) {
m.pushMetric(metricsRecord);
}
}
metricsRecord.update();
}
}
/**
* A login module that looks at the Kerberos, Unix, or Windows principal and
* adds the corresponding UserName.
@ -137,6 +179,8 @@ public class UserGroupInformation {
}
}
/** Metrics to track UGI activity */
static UgiMetrics metrics = new UgiMetrics();
/** Are the static variables that depend on configuration initialized? */
private static boolean isInitialized = false;
/** Should we use Kerberos configuration? */
@ -525,7 +569,7 @@ public class UserGroupInformation {
}
/**
* Log a user in from a keytab file. Loads a user identity from a keytab
* file and login them in. They become the currently logged-in user.
* file and logs them in. They become the currently logged-in user.
* @param user the principal name to load from the keytab
* @param path the path to the keytab file
* @throws IOException if the keytab file can't be read
@ -541,14 +585,20 @@ public class UserGroupInformation {
keytabPrincipal = user;
Subject subject = new Subject();
LoginContext login;
long start = 0;
try {
login =
new LoginContext(HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME, subject);
start = System.currentTimeMillis();
login.login();
metrics.loginSuccess.inc(System.currentTimeMillis() - start);
loginUser = new UserGroupInformation(subject);
loginUser.setLogin(login);
loginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
} catch (LoginException le) {
if (start > 0) {
metrics.loginFailure.inc(System.currentTimeMillis() - start);
}
throw new IOException("Login failure for " + user + " from keytab " +
path, le);
}
@ -558,7 +608,7 @@ public class UserGroupInformation {
/**
* Re-Login a user in from a keytab file. Loads a user identity from a keytab
* file and login them in. They become the currently logged-in user. This
* file and logs them in. They become the currently logged-in user. This
* method assumes that {@link #loginUserFromKeytab(String, String)} had
* happened already.
* The Subject field of this UserGroupInformation object is updated to have
@ -579,6 +629,7 @@ public class UserGroupInformation {
if (!hasSufficientTimeElapsed(now)) {
return;
}
long start = 0;
// register most recent relogin attempt
user.setLastLogin(now);
try {
@ -593,9 +644,14 @@ public class UserGroupInformation {
new LoginContext(HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME,
getSubject());
LOG.info("Initiating re-login for " + keytabPrincipal);
start = System.currentTimeMillis();
login.login();
metrics.loginSuccess.inc(System.currentTimeMillis() - start);
setLogin(login);
} catch (LoginException le) {
if (start > 0) {
metrics.loginFailure.inc(System.currentTimeMillis() - start);
}
throw new IOException("Login failure for " + keytabPrincipal +
" from keytab " + keytabFile, le);
}
@ -661,22 +717,29 @@ public class UserGroupInformation {
String oldKeytabFile = null;
String oldKeytabPrincipal = null;
long start = 0;
try {
oldKeytabFile = keytabFile;
oldKeytabPrincipal = keytabPrincipal;
keytabFile = path;
keytabPrincipal = user;
Subject subject = new Subject();
LoginContext login =
new LoginContext(HadoopConfiguration.KEYTAB_KERBEROS_CONFIG_NAME, subject);
start = System.currentTimeMillis();
login.login();
metrics.loginSuccess.inc(System.currentTimeMillis() - start);
UserGroupInformation newLoginUser = new UserGroupInformation(subject);
newLoginUser.setLogin(login);
newLoginUser.setAuthenticationMethod(AuthenticationMethod.KERBEROS);
return newLoginUser;
} catch (LoginException le) {
if (start > 0) {
metrics.loginFailure.inc(System.currentTimeMillis() - start);
}
throw new IOException("Login failure for " + user + " from keytab " +
path, le);
} finally {

View File

@ -49,6 +49,7 @@ import org.apache.hadoop.security.SaslInputStream;
import org.apache.hadoop.security.SaslRpcClient;
import org.apache.hadoop.security.SaslRpcServer;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.TestUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
@ -253,6 +254,7 @@ public class TestSaslRPC {
newConf.set(SERVER_PRINCIPAL_KEY, principal);
newConf.set(SERVER_KEYTAB_KEY, keytab);
SecurityUtil.login(newConf, SERVER_KEYTAB_KEY, SERVER_PRINCIPAL_KEY);
TestUserGroupInformation.verifyLoginMetrics(1, 0);
UserGroupInformation current = UserGroupInformation.getCurrentUser();
System.out.println("UGI: " + current);

View File

@ -331,4 +331,19 @@ public class TestUserGroupInformation {
// user1 and user2 must be same instances.
Assert.assertTrue(user1 == user2);
}
public static void verifyLoginMetrics(int success, int failure)
throws IOException {
// Ensure metrics related to kerberos login is updated.
UserGroupInformation.UgiMetrics metrics = UserGroupInformation.metrics;
metrics.doUpdates(null);
if (success > 0) {
assertEquals(success, metrics.loginSuccess.getPreviousIntervalNumOps());
assertTrue(metrics.loginSuccess.getPreviousIntervalAverageTime() > 0);
}
if (failure > 0) {
assertEquals(failure, metrics.loginFailure.getPreviousIntervalNumOps());
assertTrue(metrics.loginFailure.getPreviousIntervalAverageTime() > 0);
}
}
}