YARN-2894. Fixed a bug regarding application view acl when RM fails over. Contributed by Rohith Sharmaks

This commit is contained in:
Jian He 2014-12-02 17:16:20 -08:00
parent 75a326aaff
commit 392c3aaea8
19 changed files with 30 additions and 99 deletions

View File

@ -169,6 +169,9 @@ Release 2.7.0 - UNRELEASED
YARN-2890. MiniYARNCluster should start the timeline server based on the
configuration. (Mit Desai via zjshen)
YARN-2894. Fixed a bug regarding application view acl when RM fails over.
(Rohith Sharmaks via jianhe)
Release 2.6.0 - 2014-11-18
INCOMPATIBLE CHANGES

View File

@ -40,10 +40,8 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppAttemptInfo;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.util.resource.Resources;
@ -58,18 +56,14 @@ import com.google.inject.Inject;
public class AppBlock extends HtmlBlock {
private ApplicationACLsManager aclsManager;
private QueueACLsManager queueACLsManager;
private final Configuration conf;
private final ResourceManager rm;
@Inject
AppBlock(ResourceManager rm, ViewContext ctx,
ApplicationACLsManager aclsManager, QueueACLsManager queueACLsManager,
Configuration conf) {
AppBlock(ResourceManager rm, ViewContext ctx, Configuration conf) {
super(ctx);
this.aclsManager = aclsManager;
this.queueACLsManager = queueACLsManager;
this.conf = conf;
this.rm = rm;
}
@Override
@ -88,7 +82,7 @@ public class AppBlock extends HtmlBlock {
return;
}
RMContext context = getInstance(RMContext.class);
RMContext context = this.rm.getRMContext();
RMApp rmApp = context.getRMApps().get(appID);
if (rmApp == null) {
puts("Application not found: "+ aid);
@ -103,9 +97,9 @@ public class AppBlock extends HtmlBlock {
callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
}
if (callerUGI != null
&& !(this.aclsManager.checkAccess(callerUGI,
ApplicationAccessType.VIEW_APP, app.getUser(), appID) ||
this.queueACLsManager.checkAccess(callerUGI,
&& !(this.rm.getApplicationACLsManager().checkAccess(callerUGI,
ApplicationAccessType.VIEW_APP, app.getUser(), appID) || this.rm
.getQueueACLsManager().checkAccess(callerUGI,
QueueACL.ADMINISTER_QUEUE, app.getQueue()))) {
puts("You (User " + remoteUser
+ ") are not authorized to view application " + appID);

View File

@ -31,7 +31,7 @@ import org.apache.commons.lang.StringEscapeUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
@ -46,9 +46,10 @@ class AppsBlock extends HtmlBlock {
final ConcurrentMap<ApplicationId, RMApp> apps;
private final Configuration conf;
@Inject AppsBlock(RMContext rmContext, ViewContext ctx, Configuration conf) {
@Inject
AppsBlock(ResourceManager rm, ViewContext ctx, Configuration conf) {
super(ctx);
apps = rmContext.getRMApps();
apps = rm.getRMContext().getRMApps();
this.conf = conf;
}

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FifoSchedulerInfo;
@ -44,7 +43,8 @@ class DefaultSchedulerPage extends RmView {
static class QueueInfoBlock extends HtmlBlock {
final FifoSchedulerInfo sinfo;
@Inject QueueInfoBlock(RMContext context, ViewContext ctx, ResourceManager rm) {
@Inject
QueueInfoBlock(ViewContext ctx, ResourceManager rm) {
super(ctx);
sinfo = new FifoSchedulerInfo(rm);
}

View File

@ -34,7 +34,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
@ -58,13 +57,15 @@ public class FairSchedulerAppsBlock extends HtmlBlock {
final FairSchedulerInfo fsinfo;
final Configuration conf;
@Inject public FairSchedulerAppsBlock(RMContext rmContext,
ResourceManager rm, ViewContext ctx, Configuration conf) {
@Inject
public FairSchedulerAppsBlock(ResourceManager rm, ViewContext ctx,
Configuration conf) {
super(ctx);
FairScheduler scheduler = (FairScheduler) rm.getResourceScheduler();
fsinfo = new FairSchedulerInfo(scheduler);
apps = new ConcurrentHashMap<ApplicationId, RMApp>();
for (Map.Entry<ApplicationId, RMApp> entry : rmContext.getRMApps().entrySet()) {
for (Map.Entry<ApplicationId, RMApp> entry : rm.getRMContext().getRMApps()
.entrySet()) {
if (!(RMAppState.NEW.equals(entry.getValue().getState())
|| RMAppState.NEW_SAVING.equals(entry.getValue().getState())
|| RMAppState.SUBMITTED.equals(entry.getValue().getState()))) {

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterMetricsInfo;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.UserMetricsInfo;
@ -38,13 +37,11 @@ import com.google.inject.Inject;
public class MetricsOverviewTable extends HtmlBlock {
private static final long BYTES_IN_MB = 1024 * 1024;
private final RMContext rmContext;
private final ResourceManager rm;
@Inject
MetricsOverviewTable(RMContext context, ResourceManager rm, ViewContext ctx) {
MetricsOverviewTable(ResourceManager rm, ViewContext ctx) {
super(ctx);
this.rmContext = context;
this.rm = rm;
}
@ -56,7 +53,7 @@ public class MetricsOverviewTable extends HtmlBlock {
html.style(".metrics {margin-bottom:5px}");
ClusterMetricsInfo clusterMetrics =
new ClusterMetricsInfo(this.rm, this.rmContext);
new ClusterMetricsInfo(this.rm);
DIV<Hamlet> div = html.div().$class("metrics");
@ -110,7 +107,7 @@ public class MetricsOverviewTable extends HtmlBlock {
String user = request().getRemoteUser();
if (user != null) {
UserMetricsInfo userMetrics = new UserMetricsInfo(this.rm, this.rmContext, user);
UserMetricsInfo userMetrics = new UserMetricsInfo(this.rm, user);
if (userMetrics.metricsAvailable()) {
div.h3("User Metrics for " + user).
table("#usermetricsoverview").

View File

@ -28,7 +28,6 @@ import java.util.Collection;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
@ -46,14 +45,12 @@ import com.google.inject.Inject;
class NodesPage extends RmView {
static class NodesBlock extends HtmlBlock {
final RMContext rmContext;
final ResourceManager rm;
private static final long BYTES_IN_MB = 1024 * 1024;
@Inject
NodesBlock(RMContext context, ResourceManager rm, ViewContext ctx) {
NodesBlock(ResourceManager rm, ViewContext ctx) {
super(ctx);
this.rmContext = context;
this.rm = rm;
}
@ -85,14 +82,14 @@ class NodesPage extends RmView {
if(type != null && !type.isEmpty()) {
stateFilter = NodeState.valueOf(type.toUpperCase());
}
Collection<RMNode> rmNodes = this.rmContext.getRMNodes().values();
Collection<RMNode> rmNodes = this.rm.getRMContext().getRMNodes().values();
boolean isInactive = false;
if (stateFilter != null) {
switch (stateFilter) {
case DECOMMISSIONED:
case LOST:
case REBOOTED:
rmNodes = this.rmContext.getInactiveRMNodes().values();
rmNodes = this.rm.getRMContext().getInactiveRMNodes().values();
isInactive = true;
break;
}

View File

@ -24,11 +24,8 @@ import java.net.InetSocketAddress;
import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.util.RMHAUtils;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
@ -56,10 +53,6 @@ public class RMWebApp extends WebApp implements YarnWebParams {
if (rm != null) {
bind(ResourceManager.class).toInstance(rm);
bind(RMContext.class).toInstance(rm.getRMContext());
bind(ApplicationACLsManager.class).toInstance(
rm.getApplicationACLsManager());
bind(QueueACLsManager.class).toInstance(rm.getQueueACLsManager());
}
route("/", RmController.class);
route(pajoin("/nodes", NODE_STATE), RmController.class, "nodes");

View File

@ -205,7 +205,7 @@ public class RMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public ClusterMetricsInfo getClusterMetricsInfo() {
init();
return new ClusterMetricsInfo(this.rm, this.rm.getRMContext());
return new ClusterMetricsInfo(this.rm);
}
@GET

View File

@ -22,7 +22,6 @@ import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.yarn.server.resourcemanager.ClusterMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
@ -62,7 +61,7 @@ public class ClusterMetricsInfo {
public ClusterMetricsInfo() {
} // JAXB needs this
public ClusterMetricsInfo(final ResourceManager rm, final RMContext rmContext) {
public ClusterMetricsInfo(final ResourceManager rm) {
ResourceScheduler rs = rm.getResourceScheduler();
QueueMetrics metrics = rs.getRootQueueMetrics();
ClusterMetrics clusterMetrics = ClusterMetrics.getMetrics();

View File

@ -53,8 +53,7 @@ public class UserMetricsInfo {
public UserMetricsInfo() {
} // JAXB needs this
public UserMetricsInfo(final ResourceManager rm, final RMContext rmContext,
final String user) {
public UserMetricsInfo(final ResourceManager rm, final String user) {
ResourceScheduler rs = rm.getResourceScheduler();
QueueMetrics metrics = rs.getRootQueueMetrics();
QueueMetrics userMetrics = metrics.getUserMetrics(user);

View File

@ -29,20 +29,16 @@ import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ha.HAServiceProtocol;
import org.apache.hadoop.service.Service.STATE;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.yarn.api.records.QueueState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.ClusterMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.YarnVersionInfo;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
@ -83,10 +79,6 @@ public class TestRMWebServices extends JerseyTest {
ResourceScheduler.class);
rm = new MockRM(conf);
bind(ResourceManager.class).toInstance(rm);
bind(RMContext.class).toInstance(rm.getRMContext());
bind(ApplicationACLsManager.class).toInstance(
rm.getApplicationACLsManager());
bind(QueueACLsManager.class).toInstance(rm.getQueueACLsManager());
serve("/*").with(GuiceContainer.class);
}
});

View File

@ -38,7 +38,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.MockAM;
import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
@ -46,8 +45,6 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.codehaus.jettison.json.JSONArray;
@ -92,10 +89,6 @@ public class TestRMWebServicesApps extends JerseyTest {
ResourceScheduler.class);
rm = new MockRM(conf);
bind(ResourceManager.class).toInstance(rm);
bind(RMContext.class).toInstance(rm.getRMContext());
bind(ApplicationACLsManager.class).toInstance(
rm.getApplicationACLsManager());
bind(QueueACLsManager.class).toInstance(rm.getQueueACLsManager());
serve("/*").with(GuiceContainer.class);
}
});

View File

@ -61,7 +61,6 @@ import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
@ -69,12 +68,10 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.Capacity
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairSchedulerConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppState;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ApplicationSubmissionContextInfo;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CredentialsInfo;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.LocalResourceInfo;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
@ -175,10 +172,6 @@ public class TestRMWebServicesAppsModification extends JerseyTest {
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS);
rm = new MockRM(conf);
bind(ResourceManager.class).toInstance(rm);
bind(RMContext.class).toInstance(rm.getRMContext());
bind(ApplicationACLsManager.class).toInstance(
rm.getApplicationACLsManager());
bind(QueueACLsManager.class).toInstance(rm.getQueueACLsManager());
if (setAuthFilter) {
filter("/*").through(TestRMCustomAuthFilter.class);
}

View File

@ -30,13 +30,10 @@ import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
@ -104,10 +101,6 @@ public class TestRMWebServicesCapacitySched extends JerseyTest {
ResourceScheduler.class);
rm = new MockRM(conf);
bind(ResourceManager.class).toInstance(rm);
bind(RMContext.class).toInstance(rm.getRMContext());
bind(ApplicationACLsManager.class).toInstance(
rm.getApplicationACLsManager());
bind(QueueACLsManager.class).toInstance(rm.getQueueACLsManager());
serve("/*").with(GuiceContainer.class);
}
});

View File

@ -48,13 +48,10 @@ import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.DelegationToken;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.codehaus.jettison.json.JSONException;
@ -158,10 +155,6 @@ public class TestRMWebServicesDelegationTokens extends JerseyTest {
rmconf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
rm = new MockRM(rmconf);
bind(ResourceManager.class).toInstance(rm);
bind(RMContext.class).toInstance(rm.getRMContext());
bind(ApplicationACLsManager.class).toInstance(
rm.getApplicationACLsManager());
bind(QueueACLsManager.class).toInstance(rm.getQueueACLsManager());
if (isKerberosAuth == true) {
filter("/*").through(TestKerberosAuthFilter.class);
} else {

View File

@ -24,12 +24,9 @@ import javax.ws.rs.core.MediaType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
@ -60,10 +57,6 @@ public class TestRMWebServicesFairScheduler extends JerseyTest {
ResourceScheduler.class);
rm = new MockRM(conf);
bind(ResourceManager.class).toInstance(rm);
bind(RMContext.class).toInstance(rm.getRMContext());
bind(ApplicationACLsManager.class).toInstance(
rm.getApplicationACLsManager());
bind(QueueACLsManager.class).toInstance(rm.getQueueACLsManager());
serve("/*").with(GuiceContainer.class);
}
});

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
@ -33,7 +32,6 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeLabelsInfo;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeToLabelsInfo;
@ -84,7 +82,6 @@ public class TestRMWebServicesNodeLabels extends JerseyTest {
conf.set(YarnConfiguration.YARN_ADMIN_ACL, userName);
rm = new MockRM(conf);
bind(ResourceManager.class).toInstance(rm);
bind(RMContext.class).toInstance(rm.getRMContext());
filter("/*").through(
TestRMWebServicesAppsModification.TestRMCustomAuthFilter.class);
serve("/*").with(GuiceContainer.class);

View File

@ -36,15 +36,12 @@ import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.server.api.records.NodeHealthStatus;
import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeStatusEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.codehaus.jettison.json.JSONArray;
@ -84,10 +81,6 @@ public class TestRMWebServicesNodes extends JerseyTest {
rm.getRMContext().getContainerTokenSecretManager().rollMasterKey();
rm.getRMContext().getNMTokenSecretManager().rollMasterKey();
bind(ResourceManager.class).toInstance(rm);
bind(RMContext.class).toInstance(rm.getRMContext());
bind(ApplicationACLsManager.class).toInstance(
rm.getApplicationACLsManager());
bind(QueueACLsManager.class).toInstance(rm.getQueueACLsManager());
serve("/*").with(GuiceContainer.class);
}
});