YARN-1463. Tests should avoid starting http-server where possible or creates spnego keytab/principals (vinodkv via kasha)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1553790 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
8f0bf54d34
commit
1152e23ed0
|
@ -88,6 +88,8 @@ import org.apache.hadoop.yarn.util.Records;
|
|||
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||
import org.apache.hadoop.yarn.webapp.WebApps;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* This module is responsible for talking to the
|
||||
* JobClient (user facing).
|
||||
|
@ -142,7 +144,8 @@ public class HistoryClientService extends AbstractService {
|
|||
super.serviceStart();
|
||||
}
|
||||
|
||||
private void initializeWebApp(Configuration conf) {
|
||||
@VisibleForTesting
|
||||
protected void initializeWebApp(Configuration conf) {
|
||||
webApp = new HsWebApp(history);
|
||||
InetSocketAddress bindAddress = MRWebAppUtil.getJHSWebBindAddress(conf);
|
||||
// NOTE: there should be a .at(InetSocketAddress)
|
||||
|
|
|
@ -45,6 +45,8 @@ import org.apache.hadoop.yarn.event.Dispatcher;
|
|||
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
|
||||
import org.apache.hadoop.yarn.logaggregation.AggregatedLogDeletionService;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/******************************************************************
|
||||
* {@link JobHistoryServer} is responsible for servicing all job history
|
||||
* related requests from client.
|
||||
|
@ -60,10 +62,10 @@ public class JobHistoryServer extends CompositeService {
|
|||
public static final long historyServerTimeStamp = System.currentTimeMillis();
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(JobHistoryServer.class);
|
||||
private HistoryContext historyContext;
|
||||
protected HistoryContext historyContext;
|
||||
private HistoryClientService clientService;
|
||||
private JobHistory jobHistoryService;
|
||||
private JHSDelegationTokenSecretManager jhsDTSecretManager;
|
||||
protected JHSDelegationTokenSecretManager jhsDTSecretManager;
|
||||
private AggregatedLogDeletionService aggLogDelService;
|
||||
private HSAdminServer hsAdminServer;
|
||||
private HistoryServerStateStoreService stateStore;
|
||||
|
@ -129,8 +131,7 @@ public class JobHistoryServer extends CompositeService {
|
|||
historyContext = (HistoryContext)jobHistoryService;
|
||||
stateStore = createStateStore(conf);
|
||||
this.jhsDTSecretManager = createJHSSecretManager(conf, stateStore);
|
||||
clientService = new HistoryClientService(historyContext,
|
||||
this.jhsDTSecretManager);
|
||||
clientService = createHistoryClientService();
|
||||
aggLogDelService = new AggregatedLogDeletionService();
|
||||
hsAdminServer = new HSAdminServer(aggLogDelService, jobHistoryService);
|
||||
addService(stateStore);
|
||||
|
@ -142,6 +143,12 @@ public class JobHistoryServer extends CompositeService {
|
|||
super.serviceInit(config);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
protected HistoryClientService createHistoryClientService() {
|
||||
return new HistoryClientService(historyContext,
|
||||
this.jhsDTSecretManager);
|
||||
}
|
||||
|
||||
protected JHSDelegationTokenSecretManager createJHSSecretManager(
|
||||
Configuration conf, HistoryServerStateStoreService store) {
|
||||
long secretKeyInterval =
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenR
|
|||
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest;
|
||||
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
|
||||
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.RenewDelegationTokenRequest;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.HistoryClientService;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.HistoryServerStateStoreService;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.JHSDelegationTokenSecretManager;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
|
||||
|
@ -94,6 +95,17 @@ public class TestJHSSecurity {
|
|||
return new JHSDelegationTokenSecretManager(initialInterval,
|
||||
maxLifetime, renewInterval, 3600000, store);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected HistoryClientService createHistoryClientService() {
|
||||
return new HistoryClientService(historyContext,
|
||||
this.jhsDTSecretManager) {
|
||||
@Override
|
||||
protected void initializeWebApp(Configuration conf) {
|
||||
// Don't need it, skip.;
|
||||
}
|
||||
};
|
||||
}
|
||||
};
|
||||
// final JobHistoryServer jobHistoryServer = jhServer;
|
||||
jobHistoryServer.init(conf);
|
||||
|
|
|
@ -483,6 +483,9 @@ Release 2.2.0 - 2013-10-13
|
|||
YARN-1278. Fixed NodeManager to not delete local resources for apps on resync
|
||||
command from RM - a bug caused by YARN-1149. (Hitesh Shah via vinodkv)
|
||||
|
||||
YARN-1463. Tests should avoid starting http-server where possible or creates
|
||||
spnego keytab/principals (vinodkv via kasha)
|
||||
|
||||
Release 2.1.1-beta - 2013-09-23
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -225,8 +225,8 @@ public class WebApps {
|
|||
boolean hasSpnegoConf = spnegoPrincipalKey != null
|
||||
&& spnegoKeytabKey != null;
|
||||
if (hasSpnegoConf) {
|
||||
builder.setUsernameConfKey(conf.get(spnegoPrincipalKey))
|
||||
.setKeytabConfKey(conf.get(spnegoKeytabKey))
|
||||
builder.setUsernameConfKey(spnegoPrincipalKey)
|
||||
.setKeytabConfKey(spnegoKeytabKey)
|
||||
.setSecurityEnabled(UserGroupInformation.isSecurityEnabled());
|
||||
}
|
||||
HttpServer server = builder.build();
|
||||
|
|
|
@ -50,6 +50,11 @@
|
|||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-minikdc</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
|
|
@ -26,7 +26,6 @@ import java.util.Collection;
|
|||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -67,7 +66,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAt
|
|||
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptUnregistrationEvent;
|
||||
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* Embedded Yarn minicluster for testcases that need to interact with a cluster.
|
||||
|
@ -213,7 +212,12 @@ public class MiniYARNCluster extends CompositeService {
|
|||
}
|
||||
|
||||
for (int i = 0; i < resourceManagers.length; i++) {
|
||||
resourceManagers[i] = new ResourceManager();
|
||||
resourceManagers[i] = new ResourceManager() {
|
||||
@Override
|
||||
protected void doSecureLogin() throws IOException {
|
||||
// Don't try to login using keytab in the testcases.
|
||||
}
|
||||
};
|
||||
addService(new ResourceManagerWrapper(i));
|
||||
}
|
||||
for(int index = 0; index < nodeManagers.length; index++) {
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server;
|
|||
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.security.PrivilegedAction;
|
||||
|
@ -34,6 +35,7 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||
import org.apache.hadoop.minikdc.KerberosSecurityTestcase;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
|
||||
|
@ -66,21 +68,40 @@ import org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManag
|
|||
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
|
||||
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.junit.runners.Parameterized;
|
||||
import org.junit.runners.Parameterized.Parameters;
|
||||
|
||||
@RunWith(Parameterized.class)
|
||||
public class TestContainerManagerSecurity {
|
||||
public class TestContainerManagerSecurity extends KerberosSecurityTestcase {
|
||||
|
||||
static Log LOG = LogFactory.getLog(TestContainerManagerSecurity.class);
|
||||
static final RecordFactory recordFactory = RecordFactoryProvider
|
||||
.getRecordFactory(null);
|
||||
private static MiniYARNCluster yarnCluster;
|
||||
private static final File testRootDir = new File("target",
|
||||
TestContainerManagerSecurity.class.getName() + "-root");
|
||||
private static File httpSpnegoKeytabFile = new File(testRootDir,
|
||||
"httpSpnegoKeytabFile.keytab");
|
||||
private static String httpSpnegoPrincipal = "HTTP/localhost@EXAMPLE.COM";
|
||||
|
||||
private Configuration conf;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
testRootDir.mkdirs();
|
||||
httpSpnegoKeytabFile.deleteOnExit();
|
||||
getKdc().createPrincipal(httpSpnegoKeytabFile, httpSpnegoPrincipal);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
testRootDir.delete();
|
||||
}
|
||||
|
||||
@Parameters
|
||||
public static Collection<Object[]> configs() {
|
||||
Configuration configurationWithoutSecurity = new Configuration();
|
||||
|
@ -89,8 +110,18 @@ public class TestContainerManagerSecurity {
|
|||
|
||||
Configuration configurationWithSecurity = new Configuration();
|
||||
configurationWithSecurity.set(
|
||||
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
|
||||
"kerberos");
|
||||
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
|
||||
configurationWithSecurity.set(
|
||||
YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, httpSpnegoPrincipal);
|
||||
configurationWithSecurity.set(
|
||||
YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY,
|
||||
httpSpnegoKeytabFile.getAbsolutePath());
|
||||
configurationWithSecurity.set(
|
||||
YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY, httpSpnegoPrincipal);
|
||||
configurationWithSecurity.set(
|
||||
YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY,
|
||||
httpSpnegoKeytabFile.getAbsolutePath());
|
||||
|
||||
return Arrays.asList(new Object[][] { { configurationWithoutSecurity },
|
||||
{ configurationWithSecurity } });
|
||||
}
|
||||
|
|
|
@ -64,6 +64,10 @@ public class TestRMNMSecretKeys {
|
|||
protected Dispatcher createDispatcher() {
|
||||
return dispatcher;
|
||||
}
|
||||
@Override
|
||||
protected void startWepApp() {
|
||||
// Don't need it, skip.
|
||||
}
|
||||
};
|
||||
rm.init(conf);
|
||||
rm.start();
|
||||
|
|
Loading…
Reference in New Issue