YARN-2397. Avoided loading two authentication filters for RM and TS web interfaces. Contributed by Varun Vasudev.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1618054 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Zhijie Shen 2014-08-14 21:17:20 +00:00
parent 399e428deb
commit a9023c2736
7 changed files with 190 additions and 33 deletions

View File

@ -193,6 +193,9 @@ Release 2.6.0 - UNRELEASED
YARN-2070. Made DistributedShell publish the short user name to the timeline YARN-2070. Made DistributedShell publish the short user name to the timeline
server. (Robert Kanter via zjshen) server. (Robert Kanter via zjshen)
YARN-2397. Avoided loading two authentication filters for RM and TS web
interfaces. (Varun Vasudev via zjshen)
Release 2.5.0 - UNRELEASED Release 2.5.0 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.applicationhistoryservice;
import java.io.IOException; import java.io.IOException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.ArrayList;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -27,6 +28,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.source.JvmMetrics; import org.apache.hadoop.metrics2.source.JvmMetrics;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.service.CompositeService; import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.service.Service; import org.apache.hadoop.service.Service;
@ -195,14 +197,31 @@ public class ApplicationHistoryServer extends CompositeService {
// the customized filter will be loaded by the timeline server to do Kerberos // the customized filter will be loaded by the timeline server to do Kerberos
// + DT authentication. // + DT authentication.
String initializers = conf.get("hadoop.http.filter.initializers"); String initializers = conf.get("hadoop.http.filter.initializers");
initializers = initializers =
initializers == null || initializers.length() == 0 ? "" : "," initializers == null || initializers.length() == 0 ? "" : initializers;
+ initializers;
if (!initializers.contains( if (!initializers.contains(TimelineAuthenticationFilterInitializer.class
TimelineAuthenticationFilterInitializer.class.getName())) { .getName())) {
conf.set("hadoop.http.filter.initializers", initializers =
TimelineAuthenticationFilterInitializer.class.getName() TimelineAuthenticationFilterInitializer.class.getName() + ","
+ initializers); + initializers;
}
String[] parts = initializers.split(",");
ArrayList<String> target = new ArrayList<String>();
for (String filterInitializer : parts) {
filterInitializer = filterInitializer.trim();
if (filterInitializer.equals(AuthenticationFilterInitializer.class
.getName())) {
continue;
}
target.add(filterInitializer);
}
String actualInitializers =
org.apache.commons.lang.StringUtils.join(target, ",");
if (!actualInitializers.equals(initializers)) {
conf.set("hadoop.http.filter.initializers", actualInitializers);
} }
String bindAddress = WebAppUtils.getWebAppBindURL(conf, String bindAddress = WebAppUtils.getWebAppBindURL(conf,
YarnConfiguration.TIMELINE_SERVICE_BIND_HOST, YarnConfiguration.TIMELINE_SERVICE_BIND_HOST,

View File

@ -23,11 +23,14 @@ import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.service.Service.STATE; import org.apache.hadoop.service.Service.STATE;
import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.ExitUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp; import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp;
import org.apache.hadoop.yarn.server.timeline.security.TimelineAuthenticationFilterInitializer;
import org.junit.After; import org.junit.After;
import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
public class TestApplicationHistoryServer { public class TestApplicationHistoryServer {
@ -69,6 +72,31 @@ public class TestApplicationHistoryServer {
} }
} }
@Test(timeout = 50000)
public void testFilteOverrides() throws Exception {
String[] filterInitializers =
{
AuthenticationFilterInitializer.class.getName(),
TimelineAuthenticationFilterInitializer.class.getName(),
AuthenticationFilterInitializer.class.getName() + ","
+ TimelineAuthenticationFilterInitializer.class.getName(),
AuthenticationFilterInitializer.class.getName() + ", "
+ TimelineAuthenticationFilterInitializer.class.getName() };
for (String filterInitializer : filterInitializers) {
historyServer = new ApplicationHistoryServer();
Configuration config = new YarnConfiguration();
config.set("hadoop.http.filter.initializers", filterInitializer);
historyServer.init(config);
historyServer.start();
Configuration tmp = historyServer.getConfig();
assertEquals(TimelineAuthenticationFilterInitializer.class.getName(),
tmp.get("hadoop.http.filter.initializers"));
historyServer.stop();
AHSWebApp.resetInstance();
}
}
@After @After
public void stop() { public void stop() {
if (historyServer != null) { if (historyServer != null) {

View File

@ -114,7 +114,7 @@ public class RMAuthenticationFilterInitializer extends FilterInitializer {
public void initFilter(FilterContainer container, Configuration conf) { public void initFilter(FilterContainer container, Configuration conf) {
Map<String, String> filterConfig = createFilterConfig(conf); Map<String, String> filterConfig = createFilterConfig(conf);
container.addFilter("YARNAuthenticationFilter", container.addFilter("RMAuthenticationFilter",
RMAuthenticationFilter.class.getName(), filterConfig); RMAuthenticationFilter.class.getName(), filterConfig);
} }

View File

@ -22,6 +22,7 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.concurrent.BlockingQueue; import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.LinkedBlockingQueue;
@ -35,6 +36,7 @@ import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
import org.apache.hadoop.http.lib.StaticUserWebFilter; import org.apache.hadoop.http.lib.StaticUserWebFilter;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.source.JvmMetrics; import org.apache.hadoop.metrics2.source.JvmMetrics;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
@ -799,10 +801,11 @@ public class ResourceManager extends CompositeService implements Recoverable {
// Use the customized yarn filter instead of the standard kerberos filter to // Use the customized yarn filter instead of the standard kerberos filter to
// allow users to authenticate using delegation tokens // allow users to authenticate using delegation tokens
// 3 conditions need to be satisfied - // 4 conditions need to be satisfied -
// 1. security is enabled // 1. security is enabled
// 2. http auth type is set to kerberos // 2. http auth type is set to kerberos
// 3. "yarn.resourcemanager.webapp.use-yarn-filter" override is set to true // 3. "yarn.resourcemanager.webapp.use-yarn-filter" override is set to true
// 4. hadoop.http.filter.initializers container AuthenticationFilterInitializer
Configuration conf = getConfig(); Configuration conf = getConfig();
boolean useYarnAuthenticationFilter = boolean useYarnAuthenticationFilter =
@ -811,41 +814,66 @@ public class ResourceManager extends CompositeService implements Recoverable {
YarnConfiguration.DEFAULT_RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER); YarnConfiguration.DEFAULT_RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER);
String authPrefix = "hadoop.http.authentication."; String authPrefix = "hadoop.http.authentication.";
String authTypeKey = authPrefix + "type"; String authTypeKey = authPrefix + "type";
String initializers = conf.get("hadoop.http.filter.initializers"); String filterInitializerConfKey = "hadoop.http.filter.initializers";
if (UserGroupInformation.isSecurityEnabled() String actualInitializers = "";
&& useYarnAuthenticationFilter Class<?>[] initializersClasses =
&& conf.get(authTypeKey, "").equalsIgnoreCase( conf.getClasses(filterInitializerConfKey);
KerberosAuthenticationHandler.TYPE)) {
LOG.info("Using RM authentication filter(kerberos/delegation-token)"
+ " for RM webapp authentication");
RMAuthenticationHandler
.setSecretManager(getClientRMService().rmDTSecretManager);
String yarnAuthKey =
authPrefix + RMAuthenticationFilter.AUTH_HANDLER_PROPERTY;
conf.setStrings(yarnAuthKey, RMAuthenticationHandler.class.getName());
initializers = boolean hasHadoopAuthFilterInitializer = false;
initializers == null || initializers.isEmpty() ? "" : "," boolean hasRMAuthFilterInitializer = false;
+ initializers; if (initializersClasses != null) {
if (!initializers.contains(RMAuthenticationFilterInitializer.class for (Class<?> initializer : initializersClasses) {
.getName())) { if (initializer.getName().equals(
conf.set("hadoop.http.filter.initializers", AuthenticationFilterInitializer.class.getName())) {
RMAuthenticationFilterInitializer.class.getName() + initializers); hasHadoopAuthFilterInitializer = true;
}
if (initializer.getName().equals(
RMAuthenticationFilterInitializer.class.getName())) {
hasRMAuthFilterInitializer = true;
}
}
if (UserGroupInformation.isSecurityEnabled()
&& useYarnAuthenticationFilter
&& hasHadoopAuthFilterInitializer
&& conf.get(authTypeKey, "").equals(
KerberosAuthenticationHandler.TYPE)) {
ArrayList<String> target = new ArrayList<String>();
for (Class<?> filterInitializer : initializersClasses) {
if (filterInitializer.getName().equals(
AuthenticationFilterInitializer.class.getName())) {
if (hasRMAuthFilterInitializer == false) {
target.add(RMAuthenticationFilterInitializer.class.getName());
}
continue;
}
target.add(filterInitializer.getName());
}
actualInitializers = StringUtils.join(",", target);
LOG.info("Using RM authentication filter(kerberos/delegation-token)"
+ " for RM webapp authentication");
RMAuthenticationHandler
.setSecretManager(getClientRMService().rmDTSecretManager);
String yarnAuthKey =
authPrefix + RMAuthenticationFilter.AUTH_HANDLER_PROPERTY;
conf.setStrings(yarnAuthKey, RMAuthenticationHandler.class.getName());
conf.set(filterInitializerConfKey, actualInitializers);
} }
} }
// if security is not enabled and the default filter initializer has been // if security is not enabled and the default filter initializer has not
// set, set the initializer to include the // been set, set the initializer to include the
// RMAuthenticationFilterInitializer which in turn will set up the simple // RMAuthenticationFilterInitializer which in turn will set up the simple
// auth filter. // auth filter.
String initializers = conf.get(filterInitializerConfKey);
if (!UserGroupInformation.isSecurityEnabled()) { if (!UserGroupInformation.isSecurityEnabled()) {
if (initializers == null || initializers.isEmpty()) { if (initializersClasses == null || initializersClasses.length == 0) {
conf.set("hadoop.http.filter.initializers", conf.set(filterInitializerConfKey,
RMAuthenticationFilterInitializer.class.getName()); RMAuthenticationFilterInitializer.class.getName());
conf.set(authTypeKey, "simple"); conf.set(authTypeKey, "simple");
} else if (initializers.equals(StaticUserWebFilter.class.getName())) { } else if (initializers.equals(StaticUserWebFilter.class.getName())) {
conf.set("hadoop.http.filter.initializers", conf.set(filterInitializerConfKey,
RMAuthenticationFilterInitializer.class.getName() + "," RMAuthenticationFilterInitializer.class.getName() + ","
+ initializers); + initializers);
conf.set(authTypeKey, "simple"); conf.set(authTypeKey, "simple");

View File

@ -27,7 +27,10 @@ import java.util.Collection;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.lib.StaticUserWebFilter;
import org.apache.hadoop.net.NetworkTopology; import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.ResourceRequest;
@ -39,8 +42,10 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.security.http.RMAuthenticationFilterInitializer;
import org.apache.hadoop.yarn.util.resource.Resources; import org.apache.hadoop.yarn.util.resource.Resources;
import org.junit.After; import org.junit.After;
import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -235,4 +240,75 @@ public class TestResourceManager {
} }
} }
@Test(timeout = 50000)
public void testFilterOverrides() throws Exception {
String filterInitializerConfKey = "hadoop.http.filter.initializers";
String[] filterInitializers =
{
AuthenticationFilterInitializer.class.getName(),
RMAuthenticationFilterInitializer.class.getName(),
AuthenticationFilterInitializer.class.getName() + ","
+ RMAuthenticationFilterInitializer.class.getName(),
AuthenticationFilterInitializer.class.getName() + ", "
+ RMAuthenticationFilterInitializer.class.getName(),
AuthenticationFilterInitializer.class.getName() + ", "
+ this.getClass().getName() };
for (String filterInitializer : filterInitializers) {
resourceManager = new ResourceManager();
Configuration conf = new YarnConfiguration();
conf.set(filterInitializerConfKey, filterInitializer);
conf.set("hadoop.security.authentication", "kerberos");
conf.set("hadoop.http.authentication.type", "kerberos");
try {
try {
UserGroupInformation.setConfiguration(conf);
} catch (Exception e) {
// ignore we just care about getting true for
// isSecurityEnabled()
LOG.info("Got expected exception");
}
resourceManager.init(conf);
resourceManager.startWepApp();
} catch (RuntimeException e) {
// Exceptions are expected because we didn't setup everything
// just want to test filter settings
String tmp = resourceManager.getConfig().get(filterInitializerConfKey);
if (filterInitializer.contains(this.getClass().getName())) {
Assert.assertEquals(RMAuthenticationFilterInitializer.class.getName()
+ "," + this.getClass().getName(), tmp);
} else {
Assert.assertEquals(
RMAuthenticationFilterInitializer.class.getName(), tmp);
}
resourceManager.stop();
}
}
// simple mode overrides
String[] simpleFilterInitializers =
{ "", StaticUserWebFilter.class.getName() };
for (String filterInitializer : simpleFilterInitializers) {
resourceManager = new ResourceManager();
Configuration conf = new YarnConfiguration();
conf.set(filterInitializerConfKey, filterInitializer);
try {
UserGroupInformation.setConfiguration(conf);
resourceManager.init(conf);
resourceManager.startWepApp();
} catch (RuntimeException e) {
// Exceptions are expected because we didn't setup everything
// just want to test filter settings
String tmp = resourceManager.getConfig().get(filterInitializerConfKey);
if (filterInitializer.equals(StaticUserWebFilter.class.getName())) {
Assert.assertEquals(RMAuthenticationFilterInitializer.class.getName()
+ "," + StaticUserWebFilter.class.getName(), tmp);
} else {
Assert.assertEquals(
RMAuthenticationFilterInitializer.class.getName(), tmp);
}
resourceManager.stop();
}
}
}
} }

View File

@ -41,6 +41,7 @@ import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.minikdc.MiniKdc; import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.AuthenticationFilterInitializer;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.KerberosTestUtils; import org.apache.hadoop.security.authentication.KerberosTestUtils;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
@ -122,6 +123,8 @@ public class TestRMWebServicesDelegationTokenAuthentication {
"kerberos"); "kerberos");
rmconf.setBoolean(YarnConfiguration.RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER, rmconf.setBoolean(YarnConfiguration.RM_WEBAPP_DELEGATION_TOKEN_AUTH_FILTER,
true); true);
rmconf.set("hadoop.http.filter.initializers",
AuthenticationFilterInitializer.class.getName());
rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, rmconf.set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY,
httpSpnegoPrincipal); httpSpnegoPrincipal);
rmconf.set(YarnConfiguration.RM_KEYTAB, rmconf.set(YarnConfiguration.RM_KEYTAB,