YARN-2397. Avoided loading two authentication filters for RM and TS web interfaces. Contributed by Varun Vasudev.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1618054 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Zhijie Shen 2014-08-14 21:17:20 +00:00
parent 399e428deb
commit a9023c2736
7 changed files with 190 additions and 33 deletions

View File

@ -193,6 +193,9 @@ Release 2.6.0 - UNRELEASED
YARN-2070. Made DistributedShell publish the short user name to the timeline
server. (Robert Kanter via zjshen)
YARN-2397. Avoided loading two authentication filters for RM and TS web
interfaces. (Varun Vasudev via zjshen)
Release 2.5.0 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -20,6 +20,7 @@
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -27,6 +28,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.source.JvmMetrics;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.service.Service;
@ -195,14 +197,31 @@ private void startWebApp() {
// the customized filter will be loaded by the timeline server to do Kerberos
// + DT authentication.
String initializers = conf.get("hadoop.http.filter.initializers");
initializers =
initializers == null || initializers.length() == 0 ? "" : ","
+ initializers;
if (!initializers.contains(
TimelineAuthenticationFilterInitializer.class.getName())) {
conf.set("hadoop.http.filter.initializers",
TimelineAuthenticationFilterInitializer.class.getName()
+ initializers);
initializers == null || initializers.length() == 0 ? "" : initializers;
if (!initializers.contains(TimelineAuthenticationFilterInitializer.class
.getName())) {
initializers =
TimelineAuthenticationFilterInitializer.class.getName() + ","
+ initializers;
}
String[] parts = initializers.split(",");
ArrayList<String> target = new ArrayList<String>();
for (String filterInitializer : parts) {
filterInitializer = filterInitializer.trim();
if (filterInitializer.equals(AuthenticationFilterInitializer.class
.getName())) {
continue;
}
target.add(filterInitializer);
}
String actualInitializers =
org.apache.commons.lang.StringUtils.join(target, ",");
if (!actualInitializers.equals(initializers)) {
conf.set("hadoop.http.filter.initializers", actualInitializers);
}
String bindAddress = WebAppUtils.getWebAppBindURL(conf,
YarnConfiguration.TIMELINE_SERVICE_BIND_HOST,

View File

@ -23,11 +23,14 @@
import static org.junit.Assert.fail;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.service.Service.STATE;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp;
import org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer;
import org.junit.After;
import org.junit.Assert;
import org.junit.Test;
public class TestApplicationHistoryServer {
@ -69,6 +72,31 @@ public void testLaunch() throws Exception {
}
}
@Test(timeout = 50000)
public void testFilteOverrides() throws Exception {
String[] filterInitializers =
{
AuthenticationFilterInitializer.class.getName(),
TimelineAuthenticationFilterInitializer.class.getName(),
AuthenticationFilterInitializer.class.getName() + ","
+ TimelineAuthenticationFilterInitializer.class.getName(),
AuthenticationFilterInitializer.class.getName() + ", "
+ TimelineAuthenticationFilterInitializer.class.getName() };
for (String filterInitializer : filterInitializers) {
historyServer = new ApplicationHistoryServer();
Configuration config = new YarnConfiguration();
config.set("hadoop.http.filter.initializers", filterInitializer);
historyServer.init(config);
historyServer.start();
Configuration tmp = historyServer.getConfig();
assertEquals(TimelineAuthenticationFilterInitializer.class.getName(),
tmp.get("hadoop.http.filter.initializers"));
historyServer.stop();
AHSWebApp.resetInstance();
}
}
@After
public void stop() {
if (historyServer != null) {

View File

@ -114,7 +114,7 @@ protected Map<String, String> createFilterConfig(Configuration conf) {
public void initFilter(FilterContainer container, Configuration conf) {
Map<String, String> filterConfig = createFilterConfig(conf);
container.addFilter("YARNAuthenticationFilter",
container.addFilter("RMAuthenticationFilter",
RMAuthenticationFilter.class.getName(), filterConfig);
}

View File

@ -22,6 +22,7 @@
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
@ -35,6 +36,7 @@
import org.apache.hadoop.http.lib.StaticUserWebFilter;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.source.JvmMetrics;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
@ -799,10 +801,11 @@ protected void startWepApp() {
// Use the customized yarn filter instead of the standard kerberos filter to
// allow users to authenticate using delegation tokens
// 3 conditions need to be satisfied -
// 4 conditions need to be satisfied -
// 1. security is enabled
// 2. http auth type is set to kerberos
// 3. "yarn.resourcemanager.webapp.use-yarn-filter" override is set to true
// 4. hadoop.http.filter.initializers container AuthenticationFilterInitializer
Configuration conf = getConfig();
boolean useYarnAuthenticationFilter =
@ -811,41 +814,66 @@ protected void startWepApp() {
YarnConfiguration.DEFAULT_RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER);
String authPrefix = "hadoop.http.authentication.";
String authTypeKey = authPrefix + "type";
String initializers = conf.get("hadoop.http.filter.initializers");
if (UserGroupInformation.isSecurityEnabled()
&& useYarnAuthenticationFilter
&& conf.get(authTypeKey, "").equalsIgnoreCase(
KerberosAuthenticationHandler.TYPE)) {
LOG.info("Using RM authentication filter(kerberos/delegation-token)"
+ " for RM webapp authentication");
RMAuthenticationHandler
.setSecretManager(getClientRMService().rmDTSecretManager);
String yarnAuthKey =
authPrefix + RMAuthenticationFilter.AUTH_HANDLER_PROPERTY;
conf.setStrings(yarnAuthKey, RMAuthenticationHandler.class.getName());
String filterInitializerConfKey = "hadoop.http.filter.initializers";
String actualInitializers = "";
Class<?>[] initializersClasses =
conf.getClasses(filterInitializerConfKey);
initializers =
initializers == null || initializers.isEmpty() ? "" : ","
+ initializers;
if (!initializers.contains(RMAuthenticationFilterInitializer.class
.getName())) {
conf.set("hadoop.http.filter.initializers",
RMAuthenticationFilterInitializer.class.getName() + initializers);
boolean hasHadoopAuthFilterInitializer = false;
boolean hasRMAuthFilterInitializer = false;
if (initializersClasses != null) {
for (Class<?> initializer : initializersClasses) {
if (initializer.getName().equals(
AuthenticationFilterInitializer.class.getName())) {
hasHadoopAuthFilterInitializer = true;
}
if (initializer.getName().equals(
RMAuthenticationFilterInitializer.class.getName())) {
hasRMAuthFilterInitializer = true;
}
}
if (UserGroupInformation.isSecurityEnabled()
&& useYarnAuthenticationFilter
&& hasHadoopAuthFilterInitializer
&& conf.get(authTypeKey, "").equals(
KerberosAuthenticationHandler.TYPE)) {
ArrayList<String> target = new ArrayList<String>();
for (Class<?> filterInitializer : initializersClasses) {
if (filterInitializer.getName().equals(
AuthenticationFilterInitializer.class.getName())) {
if (hasRMAuthFilterInitializer == false) {
target.add(RMAuthenticationFilterInitializer.class.getName());
}
continue;
}
target.add(filterInitializer.getName());
}
actualInitializers = StringUtils.join(",", target);
LOG.info("Using RM authentication filter(kerberos/delegation-token)"
+ " for RM webapp authentication");
RMAuthenticationHandler
.setSecretManager(getClientRMService().rmDTSecretManager);
String yarnAuthKey =
authPrefix + RMAuthenticationFilter.AUTH_HANDLER_PROPERTY;
conf.setStrings(yarnAuthKey, RMAuthenticationHandler.class.getName());
conf.set(filterInitializerConfKey, actualInitializers);
}
}
// if security is not enabled and the default filter initializer has been
// set, set the initializer to include the
// if security is not enabled and the default filter initializer has not
// been set, set the initializer to include the
// RMAuthenticationFilterInitializer which in turn will set up the simple
// auth filter.
String initializers = conf.get(filterInitializerConfKey);
if (!UserGroupInformation.isSecurityEnabled()) {
if (initializers == null || initializers.isEmpty()) {
conf.set("hadoop.http.filter.initializers",
if (initializersClasses == null || initializersClasses.length == 0) {
conf.set(filterInitializerConfKey,
RMAuthenticationFilterInitializer.class.getName());
conf.set(authTypeKey, "simple");
} else if (initializers.equals(StaticUserWebFilter.class.getName())) {
conf.set("hadoop.http.filter.initializers",
conf.set(filterInitializerConfKey,
RMAuthenticationFilterInitializer.class.getName() + ","
+ initializers);
conf.set(authTypeKey, "simple");

View File

@ -27,7 +27,10 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.lib.StaticUserWebFilter;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
@ -39,8 +42,10 @@
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.security.http.RMAuthenticationFilterInitializer;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -235,4 +240,75 @@ public void testNMExpiryAndHeartbeatIntervalsValidation() throws Exception {
}
}
@Test(timeout = 50000)
public void testFilterOverrides() throws Exception {
String filterInitializerConfKey = "hadoop.http.filter.initializers";
String[] filterInitializers =
{
AuthenticationFilterInitializer.class.getName(),
RMAuthenticationFilterInitializer.class.getName(),
AuthenticationFilterInitializer.class.getName() + ","
+ RMAuthenticationFilterInitializer.class.getName(),
AuthenticationFilterInitializer.class.getName() + ", "
+ RMAuthenticationFilterInitializer.class.getName(),
AuthenticationFilterInitializer.class.getName() + ", "
+ this.getClass().getName() };
for (String filterInitializer : filterInitializers) {
resourceManager = new ResourceManager();
Configuration conf = new YarnConfiguration();
conf.set(filterInitializerConfKey, filterInitializer);
conf.set("hadoop.security.authentication", "kerberos");
conf.set("hadoop.http.authentication.type", "kerberos");
try {
try {
UserGroupInformation.setConfiguration(conf);
} catch (Exception e) {
// ignore we just care about getting true for
// isSecurityEnabled()
LOG.info("Got expected exception");
}
resourceManager.init(conf);
resourceManager.startWepApp();
} catch (RuntimeException e) {
// Exceptions are expected because we didn't setup everything
// just want to test filter settings
String tmp = resourceManager.getConfig().get(filterInitializerConfKey);
if (filterInitializer.contains(this.getClass().getName())) {
Assert.assertEquals(RMAuthenticationFilterInitializer.class.getName()
+ "," + this.getClass().getName(), tmp);
} else {
Assert.assertEquals(
RMAuthenticationFilterInitializer.class.getName(), tmp);
}
resourceManager.stop();
}
}
// simple mode overrides
String[] simpleFilterInitializers =
{ "", StaticUserWebFilter.class.getName() };
for (String filterInitializer : simpleFilterInitializers) {
resourceManager = new ResourceManager();
Configuration conf = new YarnConfiguration();
conf.set(filterInitializerConfKey, filterInitializer);
try {
UserGroupInformation.setConfiguration(conf);
resourceManager.init(conf);
resourceManager.startWepApp();
} catch (RuntimeException e) {
// Exceptions are expected because we didn't setup everything
// just want to test filter settings
String tmp = resourceManager.getConfig().get(filterInitializerConfKey);
if (filterInitializer.equals(StaticUserWebFilter.class.getName())) {
Assert.assertEquals(RMAuthenticationFilterInitializer.class.getName()
+ "," + StaticUserWebFilter.class.getName(), tmp);
} else {
Assert.assertEquals(
RMAuthenticationFilterInitializer.class.getName(), tmp);
}
resourceManager.stop();
}
}
}
}

View File

@ -41,6 +41,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.KerberosTestUtils;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
@ -122,6 +123,8 @@ private static void setupAndStartRM() throws Exception {
"kerberos");
rmconf.setBoolean(YarnConfiguration.RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER,
true);
rmconf.set("hadoop.http.filter.initializers",
AuthenticationFilterInitializer.class.getName());
rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY,
httpSpnegoPrincipal);
rmconf.set(YarnConfiguration.RM_KEYTAB,