diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java index f27124cdf23..7bdb7dbd431 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java @@ -88,6 +88,8 @@ import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.WebApps; +import com.google.common.annotations.VisibleForTesting; + /** * This module is responsible for talking to the * JobClient (user facing). @@ -142,7 +144,8 @@ public class HistoryClientService extends AbstractService { super.serviceStart(); } - private void initializeWebApp(Configuration conf) { + @VisibleForTesting + protected void initializeWebApp(Configuration conf) { webApp = new HsWebApp(history); InetSocketAddress bindAddress = MRWebAppUtil.getJHSWebBindAddress(conf); // NOTE: there should be a .at(InetSocketAddress) diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java index 4fc84c96fae..9c92bed6acb 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/JobHistoryServer.java @@ -45,6 +45,8 @@ import org.apache.hadoop.yarn.event.Dispatcher; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.logaggregation.AggregatedLogDeletionService; +import com.google.common.annotations.VisibleForTesting; + /****************************************************************** * {@link JobHistoryServer} is responsible for servicing all job history * related requests from client. @@ -60,10 +62,10 @@ public class JobHistoryServer extends CompositeService { public static final long historyServerTimeStamp = System.currentTimeMillis(); private static final Log LOG = LogFactory.getLog(JobHistoryServer.class); - private HistoryContext historyContext; + protected HistoryContext historyContext; private HistoryClientService clientService; private JobHistory jobHistoryService; - private JHSDelegationTokenSecretManager jhsDTSecretManager; + protected JHSDelegationTokenSecretManager jhsDTSecretManager; private AggregatedLogDeletionService aggLogDelService; private HSAdminServer hsAdminServer; private HistoryServerStateStoreService stateStore; @@ -129,8 +131,7 @@ public class JobHistoryServer extends CompositeService { historyContext = (HistoryContext)jobHistoryService; stateStore = createStateStore(conf); this.jhsDTSecretManager = createJHSSecretManager(conf, stateStore); - clientService = new HistoryClientService(historyContext, - this.jhsDTSecretManager); + clientService = createHistoryClientService(); aggLogDelService = new AggregatedLogDeletionService(); hsAdminServer = new HSAdminServer(aggLogDelService, jobHistoryService); addService(stateStore); @@ -142,6 +143,12 @@ public class JobHistoryServer extends CompositeService { super.serviceInit(config); } + @VisibleForTesting + protected HistoryClientService createHistoryClientService() { + return new HistoryClientService(historyContext, + this.jhsDTSecretManager); + } + protected JHSDelegationTokenSecretManager createJHSSecretManager( Configuration conf, HistoryServerStateStoreService store) { long secretKeyInterval = diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestJHSSecurity.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestJHSSecurity.java index f3b9821f85d..e159652653c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestJHSSecurity.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/TestJHSSecurity.java @@ -39,6 +39,7 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenR import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.RenewDelegationTokenRequest; +import org.apache.hadoop.mapreduce.v2.hs.HistoryClientService; import org.apache.hadoop.mapreduce.v2.hs.HistoryServerStateStoreService; import org.apache.hadoop.mapreduce.v2.hs.JHSDelegationTokenSecretManager; import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer; @@ -94,6 +95,17 @@ public class TestJHSSecurity { return new JHSDelegationTokenSecretManager(initialInterval, maxLifetime, renewInterval, 3600000, store); } + + @Override + protected HistoryClientService createHistoryClientService() { + return new HistoryClientService(historyContext, + this.jhsDTSecretManager) { + @Override + protected void initializeWebApp(Configuration conf) { + // Don't need it, skip.; + } + }; + } }; // final JobHistoryServer jobHistoryServer = jhServer; jobHistoryServer.init(conf); diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index f593ac8a895..491277dd275 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -483,6 +483,9 @@ Release 2.2.0 - 2013-10-13 YARN-1278. Fixed NodeManager to not delete local resources for apps on resync command from RM - a bug caused by YARN-1149. (Hitesh Shah via vinodkv) + YARN-1463. Tests should avoid starting http-server where possible or creates + spnego keytab/principals (vinodkv via kasha) + Release 2.1.1-beta - 2013-09-23 INCOMPATIBLE CHANGES diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java index 7f70d98ded8..e0a37ea6dac 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java @@ -225,8 +225,8 @@ public class WebApps { boolean hasSpnegoConf = spnegoPrincipalKey != null && spnegoKeytabKey != null; if (hasSpnegoConf) { - builder.setUsernameConfKey(conf.get(spnegoPrincipalKey)) - .setKeytabConfKey(conf.get(spnegoKeytabKey)) + builder.setUsernameConfKey(spnegoPrincipalKey) + .setKeytabConfKey(spnegoKeytabKey) .setSecurityEnabled(UserGroupInformation.isSecurityEnabled()); } HttpServer server = builder.build(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml index 600c647f9f7..330a5685c1e 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/pom.xml @@ -50,6 +50,11 @@ test-jar test + + org.apache.hadoop + hadoop-minikdc + test + diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java index 78bbea43852..60f1e079f5b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java @@ -26,7 +26,6 @@ import java.util.Collection; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; -import com.google.common.annotations.VisibleForTesting; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -67,7 +66,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAt import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptUnregistrationEvent; import org.apache.hadoop.yarn.webapp.util.WebAppUtils; -import static org.junit.Assert.fail; +import com.google.common.annotations.VisibleForTesting; /** * Embedded Yarn minicluster for testcases that need to interact with a cluster. @@ -213,7 +212,12 @@ public class MiniYARNCluster extends CompositeService { } for (int i = 0; i < resourceManagers.length; i++) { - resourceManagers[i] = new ResourceManager(); + resourceManagers[i] = new ResourceManager() { + @Override + protected void doSecureLogin() throws IOException { + // Don't try to login using keytab in the testcases. + } + }; addService(new ResourceManagerWrapper(i)); } for(int index = 0; index < nodeManagers.length; index++) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java index 0a62f36ef7b..34582a7e667 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestContainerManagerSecurity.java @@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server; import static org.junit.Assert.fail; +import java.io.File; import java.io.IOException; import java.net.InetSocketAddress; import java.security.PrivilegedAction; @@ -34,6 +35,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.minikdc.KerberosSecurityTestcase; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.SecretManager.InvalidToken; @@ -66,21 +68,40 @@ import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManag import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.Records; +import org.junit.After; +import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; @RunWith(Parameterized.class) -public class TestContainerManagerSecurity { +public class TestContainerManagerSecurity extends KerberosSecurityTestcase { static Log LOG = LogFactory.getLog(TestContainerManagerSecurity.class); static final RecordFactory recordFactory = RecordFactoryProvider .getRecordFactory(null); private static MiniYARNCluster yarnCluster; + private static final File testRootDir = new File("target", + TestContainerManagerSecurity.class.getName() + "-root"); + private static File httpSpnegoKeytabFile = new File(testRootDir, + "httpSpnegoKeytabFile.keytab"); + private static String httpSpnegoPrincipal = "HTTP/localhost@EXAMPLE.COM"; private Configuration conf; + @Before + public void setUp() throws Exception { + testRootDir.mkdirs(); + httpSpnegoKeytabFile.deleteOnExit(); + getKdc().createPrincipal(httpSpnegoKeytabFile, httpSpnegoPrincipal); + } + + @After + public void tearDown() { + testRootDir.delete(); + } + @Parameters public static Collection configs() { Configuration configurationWithoutSecurity = new Configuration(); @@ -89,8 +110,18 @@ public class TestContainerManagerSecurity { Configuration configurationWithSecurity = new Configuration(); configurationWithSecurity.set( - CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, - "kerberos"); + CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); + configurationWithSecurity.set( + YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, httpSpnegoPrincipal); + configurationWithSecurity.set( + YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, + httpSpnegoKeytabFile.getAbsolutePath()); + configurationWithSecurity.set( + YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, httpSpnegoPrincipal); + configurationWithSecurity.set( + YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY, + httpSpnegoKeytabFile.getAbsolutePath()); + return Arrays.asList(new Object[][] { { configurationWithoutSecurity }, { configurationWithSecurity } }); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java index 56784a54dc5..dbeb49d8f6a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/TestRMNMSecretKeys.java @@ -64,6 +64,10 @@ public class TestRMNMSecretKeys { protected Dispatcher createDispatcher() { return dispatcher; } + @Override + protected void startWepApp() { + // Don't need it, skip. + } }; rm.init(conf); rm.start();