YARN-748. Moved BuilderUtils from yarn-common to yarn-server-common for eventual retirement. Contributed by Jian He.

MAPREDUCE-5297. Updated MR App since BuilderUtils is no longer public after YARN-748. Contributed by Jian He.
svn merge --ignore-ancestry -c 1489257 ../../trunk/


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1489262 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2013-06-04 00:56:26 +00:00
parent eb7af141e4
commit cafa5d8bb5
119 changed files with 467 additions and 334 deletions

View File

@ -356,6 +356,9 @@ Release 2.1.0-beta - UNRELEASED
MAPREDUCE-5245. Added back constants to JobConf to fix incompatibilities. MAPREDUCE-5245. Added back constants to JobConf to fix incompatibilities.
(Zhijie Shen via acmurthy) (Zhijie Shen via acmurthy)
MAPREDUCE-5297. Updated MR App since BuilderUtils is no longer public
after YARN-748. (Jian He via vinodkv)
BREAKDOWN OF HADOOP-8562 SUBTASKS BREAKDOWN OF HADOOP-8562 SUBTASKS
MAPREDUCE-4739. Some MapReduce tests fail to find winutils. MAPREDUCE-4739. Some MapReduce tests fail to find winutils.

View File

@ -45,7 +45,6 @@ import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.util.BuilderUtils;
/** /**
* Allocates containers locally. Doesn't allocate a real container; * Allocates containers locally. Doesn't allocate a real container;
@ -94,7 +93,7 @@ public class LocalContainerAllocator extends RMCommunicator
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Override @Override
protected synchronized void heartbeat() throws Exception { protected synchronized void heartbeat() throws Exception {
AllocateRequest allocateRequest = BuilderUtils.newAllocateRequest( AllocateRequest allocateRequest = AllocateRequest.newInstance(
this.applicationAttemptId, this.lastResponseID, super this.applicationAttemptId, this.lastResponseID, super
.getApplicationProgress(), new ArrayList<ResourceRequest>(), .getApplicationProgress(), new ArrayList<ResourceRequest>(),
new ArrayList<ContainerId>()); new ArrayList<ContainerId>());

View File

@ -46,7 +46,6 @@ import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.util.BuilderUtils;
/** /**
@ -74,7 +73,7 @@ public abstract class RMContainerRequestor extends RMCommunicator {
// use custom comparator to make sure ResourceRequest objects differing only in // use custom comparator to make sure ResourceRequest objects differing only in
// numContainers dont end up as duplicates // numContainers dont end up as duplicates
private final Set<ResourceRequest> ask = new TreeSet<ResourceRequest>( private final Set<ResourceRequest> ask = new TreeSet<ResourceRequest>(
new org.apache.hadoop.yarn.util.BuilderUtils.ResourceRequestComparator()); new org.apache.hadoop.yarn.api.records.ResourceRequest.ResourceRequestComparator());
private final Set<ContainerId> release = new TreeSet<ContainerId>(); private final Set<ContainerId> release = new TreeSet<ContainerId>();
private boolean nodeBlacklistingEnabled; private boolean nodeBlacklistingEnabled;
@ -146,7 +145,7 @@ public abstract class RMContainerRequestor extends RMCommunicator {
} }
protected AllocateResponse makeRemoteRequest() throws IOException { protected AllocateResponse makeRemoteRequest() throws IOException {
AllocateRequest allocateRequest = BuilderUtils.newAllocateRequest( AllocateRequest allocateRequest = AllocateRequest.newInstance(
applicationAttemptId, lastResponseID, super.getApplicationProgress(), applicationAttemptId, lastResponseID, super.getApplicationProgress(),
new ArrayList<ResourceRequest>(ask), new ArrayList<ContainerId>( new ArrayList<ResourceRequest>(ask), new ArrayList<ContainerId>(
release)); release));

View File

@ -18,9 +18,13 @@
package org.apache.hadoop.mapreduce.jobhistory; package org.apache.hadoop.mapreduce.jobhistory;
import static junit.framework.Assert.*; import static junit.framework.Assert.assertTrue;
import static org.mockito.Matchers.*; import static org.mockito.Matchers.any;
import static org.mockito.Mockito.*; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -44,7 +48,6 @@ import org.apache.hadoop.yarn.YarnException;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
@ -270,7 +273,7 @@ public class TestJobHistoryEventHandler {
private class TestParams { private class TestParams {
String workDir = setupTestWorkDir(); String workDir = setupTestWorkDir();
ApplicationId appId = BuilderUtils.newApplicationId(200, 1); ApplicationId appId = ApplicationId.newInstance(200, 1);
ApplicationAttemptId appAttemptId = ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1); ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.WrappedJvmID; import org.apache.hadoop.mapred.WrappedJvmID;
import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobID;
@ -81,6 +82,7 @@ import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.Clock; import org.apache.hadoop.yarn.Clock;
import org.apache.hadoop.yarn.ClusterInfo; import org.apache.hadoop.yarn.ClusterInfo;
@ -90,15 +92,14 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.service.Service; import org.apache.hadoop.yarn.service.Service;
import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory; import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.apache.hadoop.yarn.util.BuilderUtils;
/** /**
@ -516,8 +517,7 @@ public class MRApp extends MRAppMaster {
ContainerTokenIdentifier containerTokenIdentifier = ContainerTokenIdentifier containerTokenIdentifier =
new ContainerTokenIdentifier(cId, nodeId.toString(), "user", new ContainerTokenIdentifier(cId, nodeId.toString(), "user",
resource, System.currentTimeMillis() + 10000, 42, 42); resource, System.currentTimeMillis() + 10000, 42, 42);
Token containerToken = Token containerToken = newContainerToken(nodeId, "password".getBytes(),
BuilderUtils.newContainerToken(nodeId, "password".getBytes(),
containerTokenIdentifier); containerTokenIdentifier);
Container container = Container.newInstance(cId, nodeId, Container container = Container.newInstance(cId, nodeId,
NM_HOST + ":" + NM_HTTP_PORT, resource, null, containerToken); NM_HOST + ":" + NM_HTTP_PORT, resource, null, containerToken);
@ -690,5 +690,37 @@ public class MRApp extends MRAppMaster {
} }
} }
public static Token newContainerToken(NodeId nodeId,
byte[] password, ContainerTokenIdentifier tokenIdentifier) {
// RPC layer client expects ip:port as service for tokens
InetSocketAddress addr =
NetUtils.createSocketAddrForHost(nodeId.getHost(), nodeId.getPort());
// NOTE: use SecurityUtil.setTokenService if this becomes a "real" token
Token containerToken =
Token.newInstance(tokenIdentifier.getBytes(),
ContainerTokenIdentifier.KIND.toString(), password, SecurityUtil
.buildTokenService(addr).toString());
return containerToken;
}
public static ContainerId newContainerId(int appId, int appAttemptId,
long timestamp, int containerId) {
ApplicationId applicationId = ApplicationId.newInstance(timestamp, appId);
ApplicationAttemptId applicationAttemptId =
ApplicationAttemptId.newInstance(applicationId, appAttemptId);
return ContainerId.newInstance(applicationAttemptId, containerId);
}
public static ContainerTokenIdentifier newContainerTokenIdentifier(
Token containerToken) throws IOException {
org.apache.hadoop.security.token.Token<ContainerTokenIdentifier> token =
new org.apache.hadoop.security.token.Token<ContainerTokenIdentifier>(
containerToken.getIdentifier()
.array(), containerToken.getPassword().array(), new Text(
containerToken.getKind()),
new Text(containerToken.getService()));
return token.decodeIdentifier();
}
} }

View File

@ -66,7 +66,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import com.google.common.collect.Iterators; import com.google.common.collect.Iterators;
@ -621,7 +620,7 @@ public class MockJobs extends MockApps {
private static AMInfo createAMInfo(int attempt) { private static AMInfo createAMInfo(int attempt) {
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance( ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
BuilderUtils.newApplicationId(100, 1), attempt); ApplicationId.newInstance(100, 1), attempt);
ContainerId containerId = ContainerId.newInstance(appAttemptId, 1); ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
return MRBuilderUtils.newAMInfo(appAttemptId, System.currentTimeMillis(), return MRBuilderUtils.newAMInfo(appAttemptId, System.currentTimeMillis(),
containerId, NM_HOST, NM_PORT, NM_HTTP_PORT); containerId, NM_HOST, NM_PORT, NM_HTTP_PORT);

View File

@ -63,7 +63,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;
@ -359,7 +358,7 @@ public class TestMRAppMaster {
setNewEnvironmentHack(newEnv); setNewEnvironmentHack(newEnv);
credentials.writeTokenStorageFile(tokenFilePath, conf); credentials.writeTokenStorageFile(tokenFilePath, conf);
ApplicationId appId = BuilderUtils.newApplicationId(12345, 56); ApplicationId appId = ApplicationId.newInstance(12345, 56);
ApplicationAttemptId applicationAttemptId = ApplicationAttemptId applicationAttemptId =
ApplicationAttemptId.newInstance(appId, 1); ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId = ContainerId containerId =

View File

@ -98,7 +98,6 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.After; import org.junit.After;
import org.junit.Test; import org.junit.Test;
@ -1596,7 +1595,7 @@ public class TestRMContainerAllocator {
AppContext appContext = mock(AppContext.class); AppContext appContext = mock(AppContext.class);
when(appContext.getClock()).thenReturn(clock); when(appContext.getClock()).thenReturn(clock);
when(appContext.getApplicationID()).thenReturn( when(appContext.getApplicationID()).thenReturn(
BuilderUtils.newApplicationId(1, 1)); ApplicationId.newInstance(1, 1));
RMContainerAllocator allocator = new RMContainerAllocator( RMContainerAllocator allocator = new RMContainerAllocator(
mock(ClientService.class), appContext) { mock(ClientService.class), appContext) {
@ -1654,7 +1653,7 @@ public class TestRMContainerAllocator {
TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId( TaskAttemptId attemptId = MRBuilderUtils.newTaskAttemptId(
MRBuilderUtils.newTaskId( MRBuilderUtils.newTaskId(
MRBuilderUtils.newJobId(1, 1, 1), 1, TaskType.MAP), 1); MRBuilderUtils.newJobId(1, 1, 1), 1, TaskType.MAP), 1);
ApplicationId applicationId = BuilderUtils.newApplicationId(1, 1); ApplicationId applicationId = ApplicationId.newInstance(1, 1);
ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance( ApplicationAttemptId applicationAttemptId = ApplicationAttemptId.newInstance(
applicationId, 1); applicationId, 1);
ContainerId containerId = ContainerId.newInstance(applicationAttemptId, 1); ContainerId containerId = ContainerId.newInstance(applicationAttemptId, 1);

View File

@ -99,7 +99,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
@ -1407,7 +1406,7 @@ public class TestRecovery {
private MapTaskImpl getMockMapTask(long clusterTimestamp, EventHandler eh) { private MapTaskImpl getMockMapTask(long clusterTimestamp, EventHandler eh) {
ApplicationId appId = BuilderUtils.newApplicationId(clusterTimestamp, 1); ApplicationId appId = ApplicationId.newInstance(clusterTimestamp, 1);
JobId jobId = MRBuilderUtils.newJobId(appId, 1); JobId jobId = MRBuilderUtils.newJobId(appId, 1);
int partitions = 2; int partitions = 2;

View File

@ -18,6 +18,11 @@
package org.apache.hadoop.mapreduce.v2.app; package org.apache.hadoop.mapreduce.v2.app;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@ -30,11 +35,8 @@ import org.apache.hadoop.yarn.SystemClock;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.Event;
import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
import static org.mockito.Mockito.*;
public class TestTaskHeartbeatHandler { public class TestTaskHeartbeatHandler {
@ -53,7 +55,7 @@ public class TestTaskHeartbeatHandler {
hb.init(conf); hb.init(conf);
hb.start(); hb.start();
try { try {
ApplicationId appId = BuilderUtils.newApplicationId(0l, 5); ApplicationId appId = ApplicationId.newInstance(0l, 5);
JobId jobId = MRBuilderUtils.newJobId(appId, 4); JobId jobId = MRBuilderUtils.newJobId(appId, 4);
TaskId tid = MRBuilderUtils.newTaskId(jobId, 3, TaskType.MAP); TaskId tid = MRBuilderUtils.newTaskId(jobId, 3, TaskType.MAP);
TaskAttemptId taid = MRBuilderUtils.newTaskAttemptId(tid, 2); TaskAttemptId taid = MRBuilderUtils.newTaskAttemptId(tid, 2);

View File

@ -84,7 +84,6 @@ import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.event.Event; import org.apache.hadoop.yarn.event.Event;
import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
@ -248,7 +247,7 @@ public class TestTaskAttempt{
private TaskAttemptImpl createMapTaskAttemptImplForTest( private TaskAttemptImpl createMapTaskAttemptImplForTest(
EventHandler eventHandler, TaskSplitMetaInfo taskSplitMetaInfo, Clock clock) { EventHandler eventHandler, TaskSplitMetaInfo taskSplitMetaInfo, Clock clock) {
ApplicationId appId = BuilderUtils.newApplicationId(1, 1); ApplicationId appId = ApplicationId.newInstance(1, 1);
JobId jobId = MRBuilderUtils.newJobId(appId, 1); JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP); TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
TaskAttemptListener taListener = mock(TaskAttemptListener.class); TaskAttemptListener taListener = mock(TaskAttemptListener.class);
@ -318,7 +317,7 @@ public class TestTaskAttempt{
@Test @Test
public void testLaunchFailedWhileKilling() throws Exception { public void testLaunchFailedWhileKilling() throws Exception {
ApplicationId appId = BuilderUtils.newApplicationId(1, 2); ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0); ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1); JobId jobId = MRBuilderUtils.newJobId(appId, 1);
@ -368,7 +367,7 @@ public class TestTaskAttempt{
@Test @Test
public void testContainerCleanedWhileRunning() throws Exception { public void testContainerCleanedWhileRunning() throws Exception {
ApplicationId appId = BuilderUtils.newApplicationId(1, 2); ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0); ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1); JobId jobId = MRBuilderUtils.newJobId(appId, 1);
@ -426,7 +425,7 @@ public class TestTaskAttempt{
@Test @Test
public void testContainerCleanedWhileCommitting() throws Exception { public void testContainerCleanedWhileCommitting() throws Exception {
ApplicationId appId = BuilderUtils.newApplicationId(1, 2); ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0); ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1); JobId jobId = MRBuilderUtils.newJobId(appId, 1);
@ -487,7 +486,7 @@ public class TestTaskAttempt{
@Test @Test
public void testDoubleTooManyFetchFailure() throws Exception { public void testDoubleTooManyFetchFailure() throws Exception {
ApplicationId appId = BuilderUtils.newApplicationId(1, 2); ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 0); ApplicationAttemptId.newInstance(appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1); JobId jobId = MRBuilderUtils.newJobId(appId, 1);
@ -554,7 +553,7 @@ public class TestTaskAttempt{
@Test @Test
public void testAppDiognosticEventOnUnassignedTask() throws Exception { public void testAppDiognosticEventOnUnassignedTask() throws Exception {
ApplicationId appId = BuilderUtils.newApplicationId(1, 2); ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance( ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
appId, 0); appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1); JobId jobId = MRBuilderUtils.newJobId(appId, 1);
@ -604,7 +603,7 @@ public class TestTaskAttempt{
@Test @Test
public void testAppDiognosticEventOnNewTask() throws Exception { public void testAppDiognosticEventOnNewTask() throws Exception {
ApplicationId appId = BuilderUtils.newApplicationId(1, 2); ApplicationId appId = ApplicationId.newInstance(1, 2);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance( ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
appId, 0); appId, 0);
JobId jobId = MRBuilderUtils.newJobId(appId, 1); JobId jobId = MRBuilderUtils.newJobId(appId, 1);

View File

@ -55,11 +55,8 @@ import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.SystemClock; import org.apache.hadoop.yarn.SystemClock;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType; import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
@SuppressWarnings({"rawtypes"}) @SuppressWarnings({"rawtypes"})
@ -79,7 +76,7 @@ public class TestTaskAttemptContainerRequest {
Map<ApplicationAccessType, String> acls = Map<ApplicationAccessType, String> acls =
new HashMap<ApplicationAccessType, String>(1); new HashMap<ApplicationAccessType, String>(1);
acls.put(ApplicationAccessType.VIEW_APP, "otheruser"); acls.put(ApplicationAccessType.VIEW_APP, "otheruser");
ApplicationId appId = BuilderUtils.newApplicationId(1, 1); ApplicationId appId = ApplicationId.newInstance(1, 1);
JobId jobId = MRBuilderUtils.newJobId(appId, 1); JobId jobId = MRBuilderUtils.newJobId(appId, 1);
TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP); TaskId taskId = MRBuilderUtils.newTaskId(jobId, 1, TaskType.MAP);
Path jobFile = mock(Path.class); Path jobFile = mock(Path.class);
@ -110,7 +107,6 @@ public class TestTaskAttemptContainerRequest {
new SystemClock(), null); new SystemClock(), null);
jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, taImpl.getID().toString()); jobConf.set(MRJobConfig.APPLICATION_ATTEMPT_ID, taImpl.getID().toString());
ContainerId containerId = BuilderUtils.newContainerId(1, 1, 1, 1);
ContainerLaunchContext launchCtx = ContainerLaunchContext launchCtx =
TaskAttemptImpl.createContainerLaunchContext(acls, TaskAttemptImpl.createContainerLaunchContext(acls,

View File

@ -68,7 +68,6 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC; import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC;
import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
public class TestContainerLauncher { public class TestContainerLauncher {
@ -83,7 +82,7 @@ public class TestContainerLauncher {
@Test @Test
public void testPoolSize() throws InterruptedException { public void testPoolSize() throws InterruptedException {
ApplicationId appId = BuilderUtils.newApplicationId(12345, 67); ApplicationId appId = ApplicationId.newInstance(12345, 67);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance( ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
appId, 3); appId, 3);
JobId jobId = MRBuilderUtils.newJobId(appId, 8); JobId jobId = MRBuilderUtils.newJobId(appId, 8);
@ -158,7 +157,7 @@ public class TestContainerLauncher {
@Test @Test
public void testPoolLimits() throws InterruptedException { public void testPoolLimits() throws InterruptedException {
ApplicationId appId = BuilderUtils.newApplicationId(12345, 67); ApplicationId appId = ApplicationId.newInstance(12345, 67);
ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance( ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(
appId, 3); appId, 3);
JobId jobId = MRBuilderUtils.newJobId(appId, 8); JobId jobId = MRBuilderUtils.newJobId(appId, 8);
@ -378,7 +377,7 @@ public class TestContainerLauncher {
throws IOException { throws IOException {
ContainerTokenIdentifier containerTokenIdentifier = ContainerTokenIdentifier containerTokenIdentifier =
BuilderUtils.newContainerTokenIdentifier(request.getContainerToken()); MRApp.newContainerTokenIdentifier(request.getContainerToken());
// Validate that the container is what RM is giving. // Validate that the container is what RM is giving.
Assert.assertEquals(MRApp.NM_HOST + ":" + MRApp.NM_PORT, Assert.assertEquals(MRApp.NM_HOST + ":" + MRApp.NM_PORT,

View File

@ -42,6 +42,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.MRApp;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher.EventType; import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher.EventType;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
@ -65,7 +66,6 @@ import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
@ -127,12 +127,12 @@ public class TestContainerLauncherImpl {
int id) { int id) {
return ContainerId.newInstance( return ContainerId.newInstance(
ApplicationAttemptId.newInstance( ApplicationAttemptId.newInstance(
BuilderUtils.newApplicationId(ts, appId), attemptId), id); ApplicationId.newInstance(ts, appId), attemptId), id);
} }
public static TaskAttemptId makeTaskAttemptId(long ts, int appId, int taskId, public static TaskAttemptId makeTaskAttemptId(long ts, int appId, int taskId,
TaskType taskType, int id) { TaskType taskType, int id) {
ApplicationId aID = BuilderUtils.newApplicationId(ts, appId); ApplicationId aID = ApplicationId.newInstance(ts, appId);
JobId jID = MRBuilderUtils.newJobId(aID, id); JobId jID = MRBuilderUtils.newJobId(aID, id);
TaskId tID = MRBuilderUtils.newTaskId(jID, taskId, taskType); TaskId tID = MRBuilderUtils.newTaskId(jID, taskId, taskType);
return MRBuilderUtils.newTaskAttemptId(tID, id); return MRBuilderUtils.newTaskAttemptId(tID, id);
@ -410,7 +410,7 @@ public class TestContainerLauncherImpl {
private Token createNewContainerToken(ContainerId contId, private Token createNewContainerToken(ContainerId contId,
String containerManagerAddr) { String containerManagerAddr) {
long currentTime = System.currentTimeMillis(); long currentTime = System.currentTimeMillis();
return BuilderUtils.newContainerToken(NodeId.newInstance("127.0.0.1", return MRApp.newContainerToken(NodeId.newInstance("127.0.0.1",
1234), "password".getBytes(), new ContainerTokenIdentifier( 1234), "password".getBytes(), new ContainerTokenIdentifier(
contId, containerManagerAddr, "user", contId, containerManagerAddr, "user",
Resource.newInstance(1024, 1), Resource.newInstance(1024, 1),

View File

@ -39,7 +39,6 @@ import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.ipc.RPCUtil;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -107,7 +106,7 @@ public class TestLocalContainerAllocator {
} }
private static AppContext createAppContext() { private static AppContext createAppContext() {
ApplicationId appId = BuilderUtils.newApplicationId(1, 1); ApplicationId appId = ApplicationId.newInstance(1, 1);
ApplicationAttemptId attemptId = ApplicationAttemptId attemptId =
ApplicationAttemptId.newInstance(appId, 1); ApplicationAttemptId.newInstance(appId, 1);
Job job = mock(Job.class); Job job = mock(Job.class);

View File

@ -37,7 +37,6 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenRenewer; import org.apache.hadoop.security.token.TokenRenewer;
import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
@InterfaceAudience.Private @InterfaceAudience.Private
@ -55,9 +54,10 @@ public class MRDelegationTokenRenewer extends TokenRenewer {
public long renew(Token<?> token, Configuration conf) throws IOException, public long renew(Token<?> token, Configuration conf) throws IOException,
InterruptedException { InterruptedException {
org.apache.hadoop.yarn.api.records.Token dToken = BuilderUtils.newDelegationToken( org.apache.hadoop.yarn.api.records.Token dToken =
token.getIdentifier(), token.getKind().toString(), token.getPassword(), org.apache.hadoop.yarn.api.records.Token.newInstance(
token.getService().toString()); token.getIdentifier(), token.getKind().toString(),
token.getPassword(), token.getService().toString());
MRClientProtocol histProxy = instantiateHistoryProxy(conf, MRClientProtocol histProxy = instantiateHistoryProxy(conf,
SecurityUtil.getTokenServiceAddr(token)); SecurityUtil.getTokenServiceAddr(token));
@ -76,9 +76,10 @@ public class MRDelegationTokenRenewer extends TokenRenewer {
public void cancel(Token<?> token, Configuration conf) throws IOException, public void cancel(Token<?> token, Configuration conf) throws IOException,
InterruptedException { InterruptedException {
org.apache.hadoop.yarn.api.records.Token dToken = BuilderUtils.newDelegationToken( org.apache.hadoop.yarn.api.records.Token dToken =
token.getIdentifier(), token.getKind().toString(), token.getPassword(), org.apache.hadoop.yarn.api.records.Token.newInstance(
token.getService().toString()); token.getIdentifier(), token.getKind().toString(),
token.getPassword(), token.getService().toString());
MRClientProtocol histProxy = instantiateHistoryProxy(conf, MRClientProtocol histProxy = instantiateHistoryProxy(conf,
SecurityUtil.getTokenServiceAddr(token)); SecurityUtil.getTokenServiceAddr(token));

View File

@ -30,7 +30,6 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
public class MRBuilderUtils { public class MRBuilderUtils {
@ -43,7 +42,7 @@ public class MRBuilderUtils {
} }
public static JobId newJobId(long clusterTs, int appIdInt, int id) { public static JobId newJobId(long clusterTs, int appIdInt, int id) {
ApplicationId appId = BuilderUtils.newApplicationId(clusterTs, appIdInt); ApplicationId appId = ApplicationId.newInstance(clusterTs, appIdInt);
return MRBuilderUtils.newJobId(appId, id); return MRBuilderUtils.newJobId(appId, id);
} }

View File

@ -24,7 +24,7 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils; import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.junit.Test; import org.junit.Test;
public class TestIds { public class TestIds {
@ -121,7 +121,7 @@ public class TestIds {
private JobId createJobId(long clusterTimestamp, int idInt) { private JobId createJobId(long clusterTimestamp, int idInt) {
return MRBuilderUtils.newJobId( return MRBuilderUtils.newJobId(
BuilderUtils.newApplicationId(clusterTimestamp, idInt), idInt); ApplicationId.newInstance(clusterTimestamp, idInt), idInt);
} }
private TaskId createTaskId(long clusterTimestamp, int jobIdInt, private TaskId createTaskId(long clusterTimestamp, int jobIdInt,

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.mapreduce.v2.hs.webapp; package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.junit.Assert.*; import static org.junit.Assert.assertEquals;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
@ -26,7 +26,6 @@ import java.net.URISyntaxException;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
public class TestMapReduceTrackingUriPlugin { public class TestMapReduceTrackingUriPlugin {
@ -37,7 +36,7 @@ public class TestMapReduceTrackingUriPlugin {
conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, historyAddress); conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, historyAddress);
MapReduceTrackingUriPlugin plugin = new MapReduceTrackingUriPlugin(); MapReduceTrackingUriPlugin plugin = new MapReduceTrackingUriPlugin();
plugin.setConf(conf); plugin.setConf(conf);
ApplicationId id = BuilderUtils.newApplicationId(6384623l, 5); ApplicationId id = ApplicationId.newInstance(6384623l, 5);
String jobSuffix = id.toString().replaceFirst("^application_", "job_"); String jobSuffix = id.toString().replaceFirst("^application_", "job_");
URI expected = URI expected =
new URI("http://" + historyAddress + "/jobhistory/job/" + jobSuffix); new URI("http://" + historyAddress + "/jobhistory/job/" + jobSuffix);

View File

@ -83,7 +83,6 @@ import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.service.AbstractService;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebApps; import org.apache.hadoop.yarn.webapp.WebApps;
@ -343,9 +342,10 @@ public class HistoryClientService extends AbstractService {
Token<MRDelegationTokenIdentifier> realJHSToken = Token<MRDelegationTokenIdentifier> realJHSToken =
new Token<MRDelegationTokenIdentifier>(tokenIdentifier, new Token<MRDelegationTokenIdentifier>(tokenIdentifier,
jhsDTSecretManager); jhsDTSecretManager);
org.apache.hadoop.yarn.api.records.Token mrDToken = BuilderUtils.newDelegationToken( org.apache.hadoop.yarn.api.records.Token mrDToken =
realJHSToken.getIdentifier(), realJHSToken.getKind().toString(), org.apache.hadoop.yarn.api.records.Token.newInstance(
realJHSToken.getPassword(), realJHSToken.getService().toString()); realJHSToken.getIdentifier(), realJHSToken.getKind().toString(),
realJHSToken.getPassword(), realJHSToken.getService().toString());
response.setDelegationToken(mrDToken); response.setDelegationToken(mrDToken);
return response; return response;
} }

View File

@ -43,7 +43,6 @@ import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.service.Service; import org.apache.hadoop.yarn.service.Service;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
public class TestJobHistoryEvents { public class TestJobHistoryEvents {
@ -169,7 +168,7 @@ public class TestJobHistoryEvents {
TaskAttemptState.SUCCEEDED, attempt.getState()); TaskAttemptState.SUCCEEDED, attempt.getState());
Assert.assertNotNull(attempt.getAssignedContainerID()); Assert.assertNotNull(attempt.getAssignedContainerID());
//Verify the wrong ctor is not being used. Remove after mrv1 is removed. //Verify the wrong ctor is not being used. Remove after mrv1 is removed.
ContainerId fakeCid = BuilderUtils.newContainerId(-1, -1, -1, -1); ContainerId fakeCid = MRApp.newContainerId(-1, -1, -1, -1);
Assert.assertFalse(attempt.getAssignedContainerID().equals(fakeCid)); Assert.assertFalse(attempt.getAssignedContainerID().equals(fakeCid));
//Verify complete contianerManagerAddress //Verify complete contianerManagerAddress
Assert.assertEquals(MRApp.NM_HOST + ":" + MRApp.NM_PORT, Assert.assertEquals(MRApp.NM_HOST + ":" + MRApp.NM_PORT,

View File

@ -69,15 +69,14 @@ import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType; import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo; import org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager.HistoryFileInfo;
import org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory; import org.apache.hadoop.mapreduce.v2.hs.TestJobHistoryEvents.MRAppWithHistory;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
import org.apache.hadoop.mapreduce.v2.jobhistory.FileNameIndexUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.FileNameIndexUtils;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils; import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo; import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
import org.apache.hadoop.net.DNSToSwitchMapping; import org.apache.hadoop.net.DNSToSwitchMapping;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.service.Service; import org.apache.hadoop.yarn.service.Service;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.RackResolver; import org.apache.hadoop.yarn.util.RackResolver;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
@ -299,7 +298,7 @@ public class TestJobHistoryParsing {
Assert.assertTrue(amInfo.getStartTime() <= System.currentTimeMillis() Assert.assertTrue(amInfo.getStartTime() <= System.currentTimeMillis()
&& amInfo.getStartTime() >= amStartTimeEst); && amInfo.getStartTime() >= amStartTimeEst);
ContainerId fakeCid = BuilderUtils.newContainerId(-1, -1, -1, -1); ContainerId fakeCid = MRApp.newContainerId(-1, -1, -1, -1);
// Assert at taskAttempt level // Assert at taskAttempt level
for (TaskInfo taskInfo : allTasks.values()) { for (TaskInfo taskInfo : allTasks.values()) {
int taskAttemptCount = taskInfo.getAllTaskAttempts().size(); int taskAttemptCount = taskInfo.getAllTaskAttempts().size();

View File

@ -40,6 +40,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.MRApp;
import org.apache.hadoop.mapreduce.v2.app.MockJobs; import org.apache.hadoop.mapreduce.v2.app.MockJobs;
import org.apache.hadoop.mapreduce.v2.app.job.Job; import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.webapp.TestAMWebApp; import org.apache.hadoop.mapreduce.v2.app.webapp.TestAMWebApp;
@ -50,7 +51,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.EventHandler; import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage; import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage;
import org.apache.hadoop.yarn.webapp.test.WebAppTests; import org.apache.hadoop.yarn.webapp.test.WebAppTests;
import org.junit.Test; import org.junit.Test;
@ -241,7 +241,7 @@ public class TestHSWebApp {
TestAppContext ctx = new TestAppContext(); TestAppContext ctx = new TestAppContext();
Map<String, String> params = new HashMap<String, String>(); Map<String, String> params = new HashMap<String, String>();
params.put(CONTAINER_ID, BuilderUtils.newContainerId(1, 1, 333, 1) params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString()); .toString());
params.put(NM_NODENAME, params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString()); NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
@ -269,7 +269,7 @@ public class TestHSWebApp {
params.put("start", "-2048"); params.put("start", "-2048");
params.put("end", "-1024"); params.put("end", "-1024");
params.put(CONTAINER_LOG_TYPE, "syslog"); params.put(CONTAINER_LOG_TYPE, "syslog");
params.put(CONTAINER_ID, BuilderUtils.newContainerId(1, 1, 333, 1) params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString()); .toString());
params.put(NM_NODENAME, params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString()); NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());
@ -300,7 +300,7 @@ public class TestHSWebApp {
params.put("start", "foo"); params.put("start", "foo");
params.put("end", "bar"); params.put("end", "bar");
params.put(CONTAINER_ID, BuilderUtils.newContainerId(1, 1, 333, 1) params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
.toString()); .toString());
params.put(NM_NODENAME, params.put(NM_NODENAME,
NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString()); NodeId.newInstance(MockJobs.NM_HOST, MockJobs.NM_PORT).toString());

View File

@ -69,7 +69,6 @@ import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.util.BuilderUtils;
public class NotRunningJob implements MRClientProtocol { public class NotRunningJob implements MRClientProtocol {

View File

@ -57,7 +57,6 @@ import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -425,7 +424,7 @@ public class TestClientServiceDelegate {
} }
private ApplicationReport getFinishedApplicationReport() { private ApplicationReport getFinishedApplicationReport() {
ApplicationId appId = BuilderUtils.newApplicationId(1234, 5); ApplicationId appId = ApplicationId.newInstance(1234, 5);
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance( ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(
appId, 0); appId, 0);
return ApplicationReport.newInstance(appId, attemptId, "user", "queue", return ApplicationReport.newInstance(appId, attemptId, "user", "queue",
@ -435,7 +434,7 @@ public class TestClientServiceDelegate {
} }
private ApplicationReport getRunningApplicationReport(String host, int port) { private ApplicationReport getRunningApplicationReport(String host, int port) {
ApplicationId appId = BuilderUtils.newApplicationId(1234, 5); ApplicationId appId = ApplicationId.newInstance(1234, 5);
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance( ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(
appId, 0); appId, 0);
return ApplicationReport.newInstance(appId, attemptId, "user", "queue", return ApplicationReport.newInstance(appId, attemptId, "user", "queue",

View File

@ -86,7 +86,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier; import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.apache.log4j.Appender; import org.apache.log4j.Appender;
import org.apache.log4j.Layout; import org.apache.log4j.Layout;
@ -282,11 +281,12 @@ public class TestYARNRunner extends TestCase {
token.setKind(RMDelegationTokenIdentifier.KIND_NAME); token.setKind(RMDelegationTokenIdentifier.KIND_NAME);
// Setup mock history token // Setup mock history token
org.apache.hadoop.yarn.api.records.Token historyToken = BuilderUtils.newDelegationToken( org.apache.hadoop.yarn.api.records.Token historyToken =
new byte[0], MRDelegationTokenIdentifier.KIND_NAME.toString(), org.apache.hadoop.yarn.api.records.Token.newInstance(new byte[0],
new byte[0], hsTokenSevice.toString()); MRDelegationTokenIdentifier.KIND_NAME.toString(), new byte[0],
GetDelegationTokenResponse getDtResponse = Records hsTokenSevice.toString());
.newRecord(GetDelegationTokenResponse.class); GetDelegationTokenResponse getDtResponse =
Records.newRecord(GetDelegationTokenResponse.class);
getDtResponse.setDelegationToken(historyToken); getDtResponse.setDelegationToken(historyToken);
// mock services // mock services

View File

@ -65,7 +65,6 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelHandlerContext;
@ -285,7 +284,7 @@ public class TestShuffleHandler {
File absLogDir = new File("target", File absLogDir = new File("target",
TestShuffleHandler.class.getSimpleName() + "LocDir").getAbsoluteFile(); TestShuffleHandler.class.getSimpleName() + "LocDir").getAbsoluteFile();
conf.set(YarnConfiguration.NM_LOCAL_DIRS, absLogDir.getAbsolutePath()); conf.set(YarnConfiguration.NM_LOCAL_DIRS, absLogDir.getAbsolutePath());
ApplicationId appId = BuilderUtils.newApplicationId(12345, 1); ApplicationId appId = ApplicationId.newInstance(12345, 1);
System.out.println(appId.toString()); System.out.println(appId.toString());
String appAttemptId = "attempt_12345_1_m_1_0"; String appAttemptId = "attempt_12345_1_m_1_0";
String user = "randomUser"; String user = "randomUser";

View File

@ -67,6 +67,9 @@ Release 2.1.0-beta - UNRELEASED
YARN-720. container-log4j.properties should not refer to mapreduce YARN-720. container-log4j.properties should not refer to mapreduce
property names. (Zhijie Shen via sseth) property names. (Zhijie Shen via sseth)
YARN-748. Moved BuilderUtils from yarn-common to yarn-server-common for
eventual retirement. (Jian He via vinodkv)
NEW FEATURES NEW FEATURES
YARN-482. FS: Extend SchedulingMode to intermediate queues. YARN-482. FS: Extend SchedulingMode to intermediate queues.

View File

@ -27,6 +27,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.util.Records;
/** /**
* <p>The core request sent by the <code>ApplicationMaster</code> to the * <p>The core request sent by the <code>ApplicationMaster</code> to the
@ -55,7 +56,20 @@ import org.apache.hadoop.yarn.api.records.ResourceRequest;
*/ */
@Public @Public
@Stable @Stable
public interface AllocateRequest { public abstract class AllocateRequest {
public static AllocateRequest newInstance(
ApplicationAttemptId applicationAttemptId, int responseID,
float appProgress, List<ResourceRequest> resourceAsk,
List<ContainerId> containersToBeReleased) {
AllocateRequest allocateRequest = Records.newRecord(AllocateRequest.class);
allocateRequest.setApplicationAttemptId(applicationAttemptId);
allocateRequest.setResponseId(responseID);
allocateRequest.setProgress(appProgress);
allocateRequest.setAskList(resourceAsk);
allocateRequest.setReleaseList(containersToBeReleased);
return allocateRequest;
}
/** /**
* Get the <code>ApplicationAttemptId</code> being managed by the * Get the <code>ApplicationAttemptId</code> being managed by the
@ -65,7 +79,7 @@ public interface AllocateRequest {
*/ */
@Public @Public
@Stable @Stable
ApplicationAttemptId getApplicationAttemptId(); public abstract ApplicationAttemptId getApplicationAttemptId();
/** /**
* Set the <code>ApplicationAttemptId</code> being managed by the * Set the <code>ApplicationAttemptId</code> being managed by the
@ -75,7 +89,7 @@ public interface AllocateRequest {
*/ */
@Public @Public
@Stable @Stable
void setApplicationAttemptId(ApplicationAttemptId applicationAttemptId); public abstract void setApplicationAttemptId(ApplicationAttemptId applicationAttemptId);
/** /**
* Get the <em>response id</em> used to track duplicate responses. * Get the <em>response id</em> used to track duplicate responses.
@ -83,7 +97,7 @@ public interface AllocateRequest {
*/ */
@Public @Public
@Stable @Stable
int getResponseId(); public abstract int getResponseId();
/** /**
* Set the <em>response id</em> used to track duplicate responses. * Set the <em>response id</em> used to track duplicate responses.
@ -91,7 +105,7 @@ public interface AllocateRequest {
*/ */
@Public @Public
@Stable @Stable
void setResponseId(int id); public abstract void setResponseId(int id);
/** /**
* Get the <em>current progress</em> of application. * Get the <em>current progress</em> of application.
@ -99,7 +113,7 @@ public interface AllocateRequest {
*/ */
@Public @Public
@Stable @Stable
float getProgress(); public abstract float getProgress();
/** /**
* Set the <em>current progress</em> of application * Set the <em>current progress</em> of application
@ -107,7 +121,7 @@ public interface AllocateRequest {
*/ */
@Public @Public
@Stable @Stable
void setProgress(float progress); public abstract void setProgress(float progress);
/** /**
* Get the list of <code>ResourceRequest</code> to update the * Get the list of <code>ResourceRequest</code> to update the
@ -116,7 +130,7 @@ public interface AllocateRequest {
*/ */
@Public @Public
@Stable @Stable
List<ResourceRequest> getAskList(); public abstract List<ResourceRequest> getAskList();
/** /**
* Set list of <code>ResourceRequest</code> to update the * Set list of <code>ResourceRequest</code> to update the
@ -127,7 +141,7 @@ public interface AllocateRequest {
*/ */
@Public @Public
@Stable @Stable
void setAskList(List<ResourceRequest> resourceRequests); public abstract void setAskList(List<ResourceRequest> resourceRequests);
/** /**
* Get the list of <code>ContainerId</code> of containers being * Get the list of <code>ContainerId</code> of containers being
@ -137,7 +151,7 @@ public interface AllocateRequest {
*/ */
@Public @Public
@Stable @Stable
List<ContainerId> getReleaseList(); public abstract List<ContainerId> getReleaseList();
/** /**
* Set the list of <code>ContainerId</code> of containers being * Set the list of <code>ContainerId</code> of containers being
@ -148,5 +162,5 @@ public interface AllocateRequest {
*/ */
@Public @Public
@Stable @Stable
void setReleaseList(List<ContainerId> releaseContainers); public abstract void setReleaseList(List<ContainerId> releaseContainers);
} }

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.util.Records;
/** /**
* <p>The response sent by the <code>ResourceManager</code> the * <p>The response sent by the <code>ResourceManager</code> the
@ -58,7 +59,25 @@ import org.apache.hadoop.yarn.api.records.Token;
*/ */
@Public @Public
@Stable @Stable
public interface AllocateResponse { public abstract class AllocateResponse {
public static AllocateResponse newInstance(int responseId,
List<ContainerStatus> completedContainers,
List<Container> allocatedContainers, List<NodeReport> updatedNodes,
Resource availResources, boolean reboot, int numClusterNodes,
PreemptionMessage preempt) {
AllocateResponse response = Records.newRecord(AllocateResponse.class);
response.setNumClusterNodes(numClusterNodes);
response.setResponseId(responseId);
response.setCompletedContainersStatuses(completedContainers);
response.setAllocatedContainers(allocatedContainers);
response.setUpdatedNodes(updatedNodes);
response.setAvailableResources(availResources);
response.setReboot(reboot);
response.setPreemptionMessage(preempt);
return response;
}
/** /**
* Should the <code>ApplicationMaster</code> reboot for being horribly * Should the <code>ApplicationMaster</code> reboot for being horribly
* out-of-sync with the <code>ResourceManager</code> as deigned by * out-of-sync with the <code>ResourceManager</code> as deigned by
@ -69,11 +88,11 @@ public interface AllocateResponse {
*/ */
@Public @Public
@Stable @Stable
public boolean getReboot(); public abstract boolean getReboot();
@Private @Private
@Unstable @Unstable
public void setReboot(boolean reboot); public abstract void setReboot(boolean reboot);
/** /**
* Get the <em>last response id</em>. * Get the <em>last response id</em>.
@ -81,11 +100,11 @@ public interface AllocateResponse {
*/ */
@Public @Public
@Stable @Stable
public int getResponseId(); public abstract int getResponseId();
@Private @Private
@Unstable @Unstable
public void setResponseId(int responseId); public abstract void setResponseId(int responseId);
/** /**
* Get the list of <em>newly allocated</em> <code>Container</code> by the * Get the list of <em>newly allocated</em> <code>Container</code> by the
@ -94,7 +113,7 @@ public interface AllocateResponse {
*/ */
@Public @Public
@Stable @Stable
public List<Container> getAllocatedContainers(); public abstract List<Container> getAllocatedContainers();
/** /**
* Set the list of <em>newly allocated</em> <code>Container</code> by the * Set the list of <em>newly allocated</em> <code>Container</code> by the
@ -103,7 +122,7 @@ public interface AllocateResponse {
*/ */
@Public @Public
@Stable @Stable
public void setAllocatedContainers(List<Container> containers); public abstract void setAllocatedContainers(List<Container> containers);
/** /**
* Get the <em>available headroom</em> for resources in the cluster for the * Get the <em>available headroom</em> for resources in the cluster for the
@ -113,11 +132,11 @@ public interface AllocateResponse {
*/ */
@Public @Public
@Stable @Stable
public Resource getAvailableResources(); public abstract Resource getAvailableResources();
@Private @Private
@Unstable @Unstable
public void setAvailableResources(Resource limit); public abstract void setAvailableResources(Resource limit);
/** /**
* Get the list of <em>completed containers' statuses</em>. * Get the list of <em>completed containers' statuses</em>.
@ -125,11 +144,11 @@ public interface AllocateResponse {
*/ */
@Public @Public
@Stable @Stable
public List<ContainerStatus> getCompletedContainersStatuses(); public abstract List<ContainerStatus> getCompletedContainersStatuses();
@Private @Private
@Unstable @Unstable
public void setCompletedContainersStatuses(List<ContainerStatus> containers); public abstract void setCompletedContainersStatuses(List<ContainerStatus> containers);
/** /**
* Get the list of <em>updated <code>NodeReport</code>s</em>. Updates could * Get the list of <em>updated <code>NodeReport</code>s</em>. Updates could
@ -138,11 +157,11 @@ public interface AllocateResponse {
*/ */
@Public @Public
@Unstable @Unstable
public List<NodeReport> getUpdatedNodes(); public abstract List<NodeReport> getUpdatedNodes();
@Private @Private
@Unstable @Unstable
public void setUpdatedNodes(final List<NodeReport> updatedNodes); public abstract void setUpdatedNodes(final List<NodeReport> updatedNodes);
/** /**
* Get the number of hosts available on the cluster. * Get the number of hosts available on the cluster.
@ -150,11 +169,11 @@ public interface AllocateResponse {
*/ */
@Public @Public
@Stable @Stable
public int getNumClusterNodes(); public abstract int getNumClusterNodes();
@Private @Private
@Unstable @Unstable
public void setNumClusterNodes(int numNodes); public abstract void setNumClusterNodes(int numNodes);
/** /**
* Get the description of containers owned by the AM, but requested back by * Get the description of containers owned by the AM, but requested back by
@ -172,15 +191,15 @@ public interface AllocateResponse {
*/ */
@Public @Public
@Evolving @Evolving
public PreemptionMessage getPreemptionMessage(); public abstract PreemptionMessage getPreemptionMessage();
@Private @Private
@Unstable @Unstable
public void setPreemptionMessage(PreemptionMessage request); public abstract void setPreemptionMessage(PreemptionMessage request);
@Public @Public
@Stable @Stable
public void setNMTokens(List<Token> nmTokens); public abstract void setNMTokens(List<Token> nmTokens);
/** /**
* Get the list of NMTokens required for communicating with NM. New NMTokens * Get the list of NMTokens required for communicating with NM. New NMTokens
@ -195,6 +214,6 @@ public interface AllocateResponse {
*/ */
@Public @Public
@Stable @Stable
public List<Token> getNMTokens(); public abstract List<Token> getNMTokens();
} }

View File

@ -26,7 +26,6 @@ import java.util.List;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest; import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
@ -39,7 +38,7 @@ import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateRequestProtoOrBuil
public class AllocateRequestPBImpl extends ProtoBase<AllocateRequestProto> implements AllocateRequest { public class AllocateRequestPBImpl extends AllocateRequest {
AllocateRequestProto proto = AllocateRequestProto.getDefaultInstance(); AllocateRequestProto proto = AllocateRequestProto.getDefaultInstance();
AllocateRequestProto.Builder builder = null; AllocateRequestProto.Builder builder = null;
boolean viaProto = false; boolean viaProto = false;
@ -65,6 +64,26 @@ public class AllocateRequestPBImpl extends ProtoBase<AllocateRequestProto> imple
return proto; return proto;
} }
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return getProto().toString().replaceAll("\\n", ", ").replaceAll("\\s+", " ");
}
private void mergeLocalToBuilder() { private void mergeLocalToBuilder() {
if (this.applicationAttemptID != null) { if (this.applicationAttemptID != null) {
builder.setApplicationAttemptId(convertToProtoFormat(this.applicationAttemptID)); builder.setApplicationAttemptId(convertToProtoFormat(this.applicationAttemptID));

View File

@ -29,7 +29,6 @@ import org.apache.hadoop.yarn.api.protocolrecords.PreemptionMessage;
import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.Token; import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerPBImpl; import org.apache.hadoop.yarn.api.records.impl.pb.ContainerPBImpl;
@ -46,8 +45,7 @@ import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProtoOrBui
import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionMessageProto; import org.apache.hadoop.yarn.proto.YarnServiceProtos.PreemptionMessageProto;
public class AllocateResponsePBImpl extends ProtoBase<AllocateResponseProto> public class AllocateResponsePBImpl extends AllocateResponse {
implements AllocateResponse {
AllocateResponseProto proto = AllocateResponseProto.getDefaultInstance(); AllocateResponseProto proto = AllocateResponseProto.getDefaultInstance();
AllocateResponseProto.Builder builder = null; AllocateResponseProto.Builder builder = null;
boolean viaProto = false; boolean viaProto = false;
@ -78,6 +76,26 @@ public class AllocateResponsePBImpl extends ProtoBase<AllocateResponseProto>
return proto; return proto;
} }
@Override
public int hashCode() {
return getProto().hashCode();
}
@Override
public boolean equals(Object other) {
if (other == null)
return false;
if (other.getClass().isAssignableFrom(this.getClass())) {
return this.getProto().equals(this.getClass().cast(other).getProto());
}
return false;
}
@Override
public String toString() {
return getProto().toString().replaceAll("\\n", ", ").replaceAll("\\s+", " ");
}
private synchronized void mergeLocalToBuilder() { private synchronized void mergeLocalToBuilder() {
if (this.allocatedContainers != null) { if (this.allocatedContainers != null) {
builder.clearAllocatedContainers(); builder.clearAllocatedContainers();

View File

@ -18,6 +18,8 @@
package org.apache.hadoop.yarn.api.records; package org.apache.hadoop.yarn.api.records;
import java.io.Serializable;
import org.apache.hadoop.classification.InterfaceAudience.Public; import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Stable; import org.apache.hadoop.classification.InterfaceStability.Stable;
import org.apache.hadoop.yarn.api.AMRMProtocol; import org.apache.hadoop.yarn.api.AMRMProtocol;
@ -63,6 +65,25 @@ public abstract class ResourceRequest implements Comparable<ResourceRequest> {
return request; return request;
} }
public static class ResourceRequestComparator implements
java.util.Comparator<ResourceRequest>, Serializable {
@Override
public int compare(ResourceRequest r1, ResourceRequest r2) {
// Compare priority, host and capability
int ret = r1.getPriority().compareTo(r2.getPriority());
if (ret == 0) {
String h1 = r1.getResourceName();
String h2 = r2.getResourceName();
ret = h1.compareTo(h2);
}
if (ret == 0) {
ret = r1.getCapability().compareTo(r2.getCapability());
}
return ret;
}
}
/** /**
* The constant string representing no locality. * The constant string representing no locality.
* It should be used by all references that want to pass an arbitrary host * It should be used by all references that want to pass an arbitrary host

View File

@ -60,7 +60,6 @@ import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.service.AbstractService;
import org.apache.hadoop.yarn.util.BuilderUtils;
// TODO check inputs for null etc. YARN-654 // TODO check inputs for null etc. YARN-654
@ -86,7 +85,7 @@ public class AMRMClientImpl<T extends ContainerRequest>
ResourceRequestInfo(Priority priority, String resourceName, ResourceRequestInfo(Priority priority, String resourceName,
Resource capability) { Resource capability) {
remoteRequest = BuilderUtils.newResourceRequest(priority, resourceName, remoteRequest = ResourceRequest.newInstance(priority, resourceName,
capability, 0); capability, 0);
containerRequests = new LinkedHashSet<T>(); containerRequests = new LinkedHashSet<T>();
} }
@ -143,7 +142,7 @@ public class AMRMClientImpl<T extends ContainerRequest>
new TreeMap<Priority, Map<String, TreeMap<Resource, ResourceRequestInfo>>>(); new TreeMap<Priority, Map<String, TreeMap<Resource, ResourceRequestInfo>>>();
protected final Set<ResourceRequest> ask = new TreeSet<ResourceRequest>( protected final Set<ResourceRequest> ask = new TreeSet<ResourceRequest>(
new org.apache.hadoop.yarn.util.BuilderUtils.ResourceRequestComparator()); new org.apache.hadoop.yarn.api.records.ResourceRequest.ResourceRequestComparator());
protected final Set<ContainerId> release = new TreeSet<ContainerId>(); protected final Set<ContainerId> release = new TreeSet<ContainerId>();
public AMRMClientImpl(ApplicationAttemptId appAttemptId) { public AMRMClientImpl(ApplicationAttemptId appAttemptId) {
@ -227,9 +226,9 @@ public class AMRMClientImpl<T extends ContainerRequest>
// optimistically clear this collection assuming no RPC failure // optimistically clear this collection assuming no RPC failure
ask.clear(); ask.clear();
release.clear(); release.clear();
allocateRequest = BuilderUtils allocateRequest =
.newAllocateRequest(appAttemptId, lastResponseId, progressIndicator, AllocateRequest.newInstance(appAttemptId, lastResponseId,
askList, releaseList); progressIndicator, askList, releaseList);
} }
allocateResponse = rmClient.allocate(allocateRequest); allocateResponse = rmClient.allocate(allocateRequest);

View File

@ -56,7 +56,6 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.server.MiniYARNCluster; import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.apache.hadoop.yarn.service.Service.STATE; import org.apache.hadoop.yarn.service.Service.STATE;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -97,8 +96,9 @@ public class TestAMRMClient {
// get node info // get node info
nodeReports = yarnClient.getNodeReports(); nodeReports = yarnClient.getNodeReports();
priority = BuilderUtils.newPriority(1); priority = Priority.newInstance(1);
capability = BuilderUtils.newResource(1024, 1); capability = Resource.newInstance(1024, 1);
node = nodeReports.get(0).getNodeId().getHost(); node = nodeReports.get(0).getNodeId().getHost();
rack = nodeReports.get(0).getRackName(); rack = nodeReports.get(0).getRackName();
nodes = new String[]{ node }; nodes = new String[]{ node };
@ -171,12 +171,12 @@ public class TestAMRMClient {
amClient.start(); amClient.start();
amClient.registerApplicationMaster("Host", 10000, ""); amClient.registerApplicationMaster("Host", 10000, "");
Resource capability1 = BuilderUtils.newResource(1024, 2); Resource capability1 = Resource.newInstance(1024, 2);
Resource capability2 = BuilderUtils.newResource(1024, 1); Resource capability2 = Resource.newInstance(1024, 1);
Resource capability3 = BuilderUtils.newResource(1000, 2); Resource capability3 = Resource.newInstance(1000, 2);
Resource capability4 = BuilderUtils.newResource(2000, 1); Resource capability4 = Resource.newInstance(2000, 1);
Resource capability5 = BuilderUtils.newResource(1000, 3); Resource capability5 = Resource.newInstance(1000, 3);
Resource capability6 = BuilderUtils.newResource(2000, 1); Resource capability6 = Resource.newInstance(2000, 1);
StoredContainerRequest storedContainer1 = StoredContainerRequest storedContainer1 =
new StoredContainerRequest(capability1, nodes, racks, priority); new StoredContainerRequest(capability1, nodes, racks, priority);
@ -201,7 +201,7 @@ public class TestAMRMClient {
List<? extends Collection<StoredContainerRequest>> matches; List<? extends Collection<StoredContainerRequest>> matches;
StoredContainerRequest storedRequest; StoredContainerRequest storedRequest;
// exact match // exact match
Resource testCapability1 = BuilderUtils.newResource(1024, 2); Resource testCapability1 = Resource.newInstance(1024, 2);
matches = amClient.getMatchingRequests(priority, node, testCapability1); matches = amClient.getMatchingRequests(priority, node, testCapability1);
verifyMatches(matches, 1); verifyMatches(matches, 1);
storedRequest = matches.get(0).iterator().next(); storedRequest = matches.get(0).iterator().next();
@ -209,7 +209,7 @@ public class TestAMRMClient {
amClient.removeContainerRequest(storedContainer1); amClient.removeContainerRequest(storedContainer1);
// exact matching with order maintained // exact matching with order maintained
Resource testCapability2 = BuilderUtils.newResource(2000, 1); Resource testCapability2 = Resource.newInstance(2000, 1);
matches = amClient.getMatchingRequests(priority, node, testCapability2); matches = amClient.getMatchingRequests(priority, node, testCapability2);
verifyMatches(matches, 2); verifyMatches(matches, 2);
// must be returned in the order they were made // must be returned in the order they were made
@ -224,11 +224,11 @@ public class TestAMRMClient {
amClient.removeContainerRequest(storedContainer6); amClient.removeContainerRequest(storedContainer6);
// matching with larger container. all requests returned // matching with larger container. all requests returned
Resource testCapability3 = BuilderUtils.newResource(4000, 4); Resource testCapability3 = Resource.newInstance(4000, 4);
matches = amClient.getMatchingRequests(priority, node, testCapability3); matches = amClient.getMatchingRequests(priority, node, testCapability3);
assert(matches.size() == 4); assert(matches.size() == 4);
Resource testCapability4 = BuilderUtils.newResource(1024, 2); Resource testCapability4 = Resource.newInstance(1024, 2);
matches = amClient.getMatchingRequests(priority, node, testCapability4); matches = amClient.getMatchingRequests(priority, node, testCapability4);
assert(matches.size() == 2); assert(matches.size() == 2);
// verify non-fitting containers are not returned and fitting ones are // verify non-fitting containers are not returned and fitting ones are
@ -241,7 +241,7 @@ public class TestAMRMClient {
testRequest == storedContainer3); testRequest == storedContainer3);
} }
Resource testCapability5 = BuilderUtils.newResource(512, 4); Resource testCapability5 = Resource.newInstance(512, 4);
matches = amClient.getMatchingRequests(priority, node, testCapability5); matches = amClient.getMatchingRequests(priority, node, testCapability5);
assert(matches.size() == 0); assert(matches.size() == 0);

View File

@ -36,14 +36,16 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse; import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerState; import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest; import org.apache.hadoop.yarn.client.AMRMClient.ContainerRequest;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
import org.mockito.invocation.InvocationOnMock; import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;
@ -58,11 +60,10 @@ public class TestAMRMClientAsync {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
final AtomicBoolean heartbeatBlock = new AtomicBoolean(true); final AtomicBoolean heartbeatBlock = new AtomicBoolean(true);
List<ContainerStatus> completed1 = Arrays.asList( List<ContainerStatus> completed1 = Arrays.asList(
BuilderUtils.newContainerStatus( ContainerStatus.newInstance(newContainerId(0, 0, 0, 0),
BuilderUtils.newContainerId(0, 0, 0, 0),
ContainerState.COMPLETE, "", 0)); ContainerState.COMPLETE, "", 0));
List<Container> allocated1 = Arrays.asList( List<Container> allocated1 = Arrays.asList(
BuilderUtils.newContainer(null, null, null, null, null, null)); Container.newInstance(null, null, null, null, null, null));
final AllocateResponse response1 = createAllocateResponse( final AllocateResponse response1 = createAllocateResponse(
new ArrayList<ContainerStatus>(), allocated1); new ArrayList<ContainerStatus>(), allocated1);
final AllocateResponse response2 = createAllocateResponse(completed1, final AllocateResponse response2 = createAllocateResponse(completed1,
@ -214,11 +215,19 @@ public class TestAMRMClientAsync {
private AllocateResponse createAllocateResponse( private AllocateResponse createAllocateResponse(
List<ContainerStatus> completed, List<Container> allocated) { List<ContainerStatus> completed, List<Container> allocated) {
AllocateResponse response = BuilderUtils.newAllocateResponse(0, completed, allocated, AllocateResponse response = AllocateResponse.newInstance(0, completed, allocated,
new ArrayList<NodeReport>(), null, false, 1, null); new ArrayList<NodeReport>(), null, false, 1, null);
return response; return response;
} }
public static ContainerId newContainerId(int appId, int appAttemptId,
long timestamp, int containerId) {
ApplicationId applicationId = ApplicationId.newInstance(timestamp, appId);
ApplicationAttemptId applicationAttemptId =
ApplicationAttemptId.newInstance(applicationId, appAttemptId);
return ContainerId.newInstance(applicationAttemptId, containerId);
}
private class TestCallbackHandler implements AMRMClientAsync.CallbackHandler { private class TestCallbackHandler implements AMRMClientAsync.CallbackHandler {
private volatile List<ContainerStatus> completedContainers; private volatile List<ContainerStatus> completedContainers;
private volatile List<Container> allocatedContainers; private volatile List<Container> allocatedContainers;

View File

@ -45,14 +45,13 @@ import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext; import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerStatus; import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.ipc.RPCUtil;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
@ -528,14 +527,14 @@ public class TestNMClientAsync {
private Container mockContainer(int i) { private Container mockContainer(int i) {
ApplicationId appId = ApplicationId appId =
BuilderUtils.newApplicationId(System.currentTimeMillis(), 1); ApplicationId.newInstance(System.currentTimeMillis(), 1);
ApplicationAttemptId attemptId = ApplicationAttemptId attemptId =
ApplicationAttemptId.newInstance(appId, 1); ApplicationAttemptId.newInstance(appId, 1);
ContainerId containerId = ContainerId.newInstance(attemptId, i); ContainerId containerId = ContainerId.newInstance(attemptId, i);
nodeId = NodeId.newInstance("localhost", 0); nodeId = NodeId.newInstance("localhost", 0);
// Create an empty record // Create an empty record
containerToken = recordFactory.newRecordInstance(Token.class); containerToken = recordFactory.newRecordInstance(Token.class);
return BuilderUtils.newContainer(containerId, nodeId, null, null, null, return Container.newInstance(containerId, nodeId, null, null, null,
containerToken); containerToken);
} }
} }

View File

@ -37,6 +37,7 @@ import java.util.List;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.commons.lang.time.DateFormatUtils; import org.apache.commons.lang.time.DateFormatUtils;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
@ -47,7 +48,6 @@ import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.YarnClient; import org.apache.hadoop.yarn.client.YarnClient;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -71,9 +71,9 @@ public class TestYarnCLI {
@Test @Test
public void testGetApplicationReport() throws Exception { public void testGetApplicationReport() throws Exception {
ApplicationCLI cli = createAndGetAppCLI(); ApplicationCLI cli = createAndGetAppCLI();
ApplicationId applicationId = BuilderUtils.newApplicationId(1234, 5); ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
ApplicationReport newApplicationReport = BuilderUtils.newApplicationReport( ApplicationReport newApplicationReport = ApplicationReport.newInstance(
applicationId, BuilderUtils.newApplicationAttemptId(applicationId, 1), applicationId, ApplicationAttemptId.newInstance(applicationId, 1),
"user", "queue", "appname", "host", 124, null, "user", "queue", "appname", "host", 124, null,
YarnApplicationState.FINISHED, "diagnostics", "url", 0, 0, YarnApplicationState.FINISHED, "diagnostics", "url", 0, 0,
FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.53789f, "YARN"); FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.53789f, "YARN");
@ -108,9 +108,9 @@ public class TestYarnCLI {
@Test @Test
public void testGetAllApplications() throws Exception { public void testGetAllApplications() throws Exception {
ApplicationCLI cli = createAndGetAppCLI(); ApplicationCLI cli = createAndGetAppCLI();
ApplicationId applicationId = BuilderUtils.newApplicationId(1234, 5); ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
ApplicationReport newApplicationReport = BuilderUtils.newApplicationReport( ApplicationReport newApplicationReport = ApplicationReport.newInstance(
applicationId, BuilderUtils.newApplicationAttemptId(applicationId, 1), applicationId, ApplicationAttemptId.newInstance(applicationId, 1),
"user", "queue", "appname", "host", 124, null, "user", "queue", "appname", "host", 124, null,
YarnApplicationState.FINISHED, "diagnostics", "url", 0, 0, YarnApplicationState.FINISHED, "diagnostics", "url", 0, 0,
FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.53789f, "YARN"); FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.53789f, "YARN");
@ -143,7 +143,7 @@ public class TestYarnCLI {
@Test @Test
public void testKillApplication() throws Exception { public void testKillApplication() throws Exception {
ApplicationCLI cli = createAndGetAppCLI(); ApplicationCLI cli = createAndGetAppCLI();
ApplicationId applicationId = BuilderUtils.newApplicationId(1234, 5); ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
int result = cli.run(new String[] { "-kill", applicationId.toString() }); int result = cli.run(new String[] { "-kill", applicationId.toString() });
assertEquals(0, result); assertEquals(0, result);
verify(client).killApplication(any(ApplicationId.class)); verify(client).killApplication(any(ApplicationId.class));
@ -178,7 +178,7 @@ public class TestYarnCLI {
@Test @Test
public void testNodeStatus() throws Exception { public void testNodeStatus() throws Exception {
NodeId nodeId = BuilderUtils.newNodeId("host0", 0); NodeId nodeId = NodeId.newInstance("host0", 0);
NodeCLI cli = new NodeCLI(); NodeCLI cli = new NodeCLI();
when(client.getNodeReports()).thenReturn(getNodeReports(3)); when(client.getNodeReports()).thenReturn(getNodeReports(3));
cli.setClient(client); cli.setClient(client);
@ -209,7 +209,7 @@ public class TestYarnCLI {
@Test @Test
public void testAbsentNodeStatus() throws Exception { public void testAbsentNodeStatus() throws Exception {
NodeId nodeId = BuilderUtils.newNodeId("Absenthost0", 0); NodeId nodeId = NodeId.newInstance("Absenthost0", 0);
NodeCLI cli = new NodeCLI(); NodeCLI cli = new NodeCLI();
when(client.getNodeReports()).thenReturn(getNodeReports(0)); when(client.getNodeReports()).thenReturn(getNodeReports(0));
cli.setClient(client); cli.setClient(client);
@ -243,8 +243,8 @@ public class TestYarnCLI {
List<NodeReport> nodeReports = new ArrayList<NodeReport>(); List<NodeReport> nodeReports = new ArrayList<NodeReport>();
for (int i = 0; i < noOfNodes; i++) { for (int i = 0; i < noOfNodes; i++) {
NodeReport nodeReport = BuilderUtils.newNodeReport(BuilderUtils NodeReport nodeReport = NodeReport.newInstance(NodeId
.newNodeId("host" + i, 0), NodeState.RUNNING, "host" + 1 + ":8888", .newInstance("host" + i, 0), NodeState.RUNNING, "host" + 1 + ":8888",
"rack1", Records.newRecord(Resource.class), Records "rack1", Records.newRecord(Resource.class), Records
.newRecord(Resource.class), 0, Records .newRecord(Resource.class), 0, Records
.newRecord(NodeHealthStatus.class)); .newRecord(NodeHealthStatus.class));

View File

@ -30,7 +30,6 @@ import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.util.BuilderUtils;
/** /**
* ApplicationTokenIdentifier is the TokenIdentifier to be used by * ApplicationTokenIdentifier is the TokenIdentifier to be used by
@ -69,9 +68,9 @@ public class ApplicationTokenIdentifier extends TokenIdentifier {
int appId = in.readInt(); int appId = in.readInt();
int attemptId = in.readInt(); int attemptId = in.readInt();
ApplicationId applicationId = ApplicationId applicationId =
BuilderUtils.newApplicationId(clusterTimeStamp, appId); ApplicationId.newInstance(clusterTimeStamp, appId);
this.applicationAttemptId = this.applicationAttemptId =
BuilderUtils.newApplicationAttemptId(applicationId, attemptId); ApplicationAttemptId.newInstance(applicationId, attemptId);
} }
@Override @Override

View File

@ -33,7 +33,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.util.BuilderUtils;
/** /**
* TokenIdentifier for a container. Encodes {@link ContainerId}, * TokenIdentifier for a container. Encodes {@link ContainerId},
@ -125,17 +124,17 @@ public class ContainerTokenIdentifier extends TokenIdentifier {
@Override @Override
public void readFields(DataInput in) throws IOException { public void readFields(DataInput in) throws IOException {
ApplicationId applicationId = BuilderUtils.newApplicationId( ApplicationId applicationId =
in.readLong(), in.readInt()); ApplicationId.newInstance(in.readLong(), in.readInt());
ApplicationAttemptId applicationAttemptId = BuilderUtils ApplicationAttemptId applicationAttemptId =
.newApplicationAttemptId(applicationId, in.readInt()); ApplicationAttemptId.newInstance(applicationId, in.readInt());
this.containerId = BuilderUtils.newContainerId(applicationAttemptId, in this.containerId =
.readInt()); ContainerId.newInstance(applicationAttemptId, in.readInt());
this.nmHostAddr = in.readUTF(); this.nmHostAddr = in.readUTF();
this.appSubmitter = in.readUTF(); this.appSubmitter = in.readUTF();
int memory = in.readInt(); int memory = in.readInt();
int vCores = in.readInt(); int vCores = in.readInt();
this.resource = BuilderUtils.newResource(memory, vCores); this.resource = Resource.newInstance(memory, vCores);
this.expiryTimeStamp = in.readLong(); this.expiryTimeStamp = in.readLong();
this.masterKeyId = in.readInt(); this.masterKeyId = in.readInt();
this.rmIdentifier = in.readLong(); this.rmIdentifier = in.readLong();

View File

@ -28,7 +28,7 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.api.records.ApplicationId;
public class ClientTokenIdentifier extends TokenIdentifier { public class ClientTokenIdentifier extends TokenIdentifier {
@ -62,9 +62,8 @@ public class ClientTokenIdentifier extends TokenIdentifier {
@Override @Override
public void readFields(DataInput in) throws IOException { public void readFields(DataInput in) throws IOException {
this.applicationAttemptId = this.applicationAttemptId =
BuilderUtils.newApplicationAttemptId( ApplicationAttemptId.newInstance(
BuilderUtils.newApplicationId(in.readLong(), in.readInt()), ApplicationId.newInstance(in.readLong(), in.readInt()), in.readInt());
in.readInt());
} }
@Override @Override

View File

@ -39,7 +39,6 @@ import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest; import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException; import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
/** /**
@ -165,9 +164,9 @@ public class RMDelegationTokenIdentifier extends AbstractDelegationTokenIdentifi
private static org.apache.hadoop.yarn.api.records.Token private static org.apache.hadoop.yarn.api.records.Token
convertToProtoToken(Token<?> token) { convertToProtoToken(Token<?> token) {
return BuilderUtils.newDelegationToken( return org.apache.hadoop.yarn.api.records.Token.newInstance(
token.getIdentifier(), token.getKind().toString(), token.getIdentifier(), token.getKind().toString(), token.getPassword(),
token.getPassword(), token.getService().toString()); token.getService().toString());
} }
} }
} }

View File

@ -147,7 +147,7 @@ public class ConverterUtils {
} }
try { try {
NodeId nodeId = NodeId nodeId =
BuilderUtils.newNodeId(parts[0], Integer.parseInt(parts[1])); NodeId.newInstance(parts[0], Integer.parseInt(parts[1]));
return nodeId; return nodeId;
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
throw new IllegalArgumentException("Invalid port: " + parts[1], e); throw new IllegalArgumentException("Invalid port: " + parts[1], e);

View File

@ -52,7 +52,6 @@ import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC; import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC;
import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
/* /*
@ -102,7 +101,7 @@ public class TestContainerLaunchRPC {
new ContainerTokenIdentifier(containerId, "localhost", "user", new ContainerTokenIdentifier(containerId, "localhost", "user",
resource, System.currentTimeMillis() + 10000, 42, 42); resource, System.currentTimeMillis() + 10000, 42, 42);
Token containerToken = Token containerToken =
BuilderUtils.newContainerToken(nodeId, "password".getBytes(), TestRPC.newContainerToken(nodeId, "password".getBytes(),
containerTokenIdentifier); containerTokenIdentifier);
StartContainerRequest scRequest = recordFactory StartContainerRequest scRequest = recordFactory

View File

@ -24,10 +24,12 @@ import java.net.InetSocketAddress;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.yarn.api.ClientRMProtocol; import org.apache.hadoop.yarn.api.ClientRMProtocol;
import org.apache.hadoop.yarn.api.ContainerManager; import org.apache.hadoop.yarn.api.ContainerManager;
import org.apache.hadoop.yarn.api.ContainerManagerPB; import org.apache.hadoop.yarn.api.ContainerManagerPB;
@ -55,7 +57,6 @@ import org.apache.hadoop.yarn.ipc.HadoopYarnProtoRPC;
import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.ipc.RPCUtil;
import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.junit.Test; import org.junit.Test;
@ -129,8 +130,7 @@ public class TestRPC {
ContainerTokenIdentifier containerTokenIdentifier = ContainerTokenIdentifier containerTokenIdentifier =
new ContainerTokenIdentifier(containerId, "localhost", "user", new ContainerTokenIdentifier(containerId, "localhost", "user",
resource, System.currentTimeMillis() + 10000, 42, 42); resource, System.currentTimeMillis() + 10000, 42, 42);
Token containerToken = Token containerToken = newContainerToken(nodeId, "password".getBytes(),
BuilderUtils.newContainerToken(nodeId, "password".getBytes(),
containerTokenIdentifier); containerTokenIdentifier);
scRequest.setContainerToken(containerToken); scRequest.setContainerToken(containerToken);
proxy.startContainer(scRequest); proxy.startContainer(scRequest);
@ -183,7 +183,8 @@ public class TestRPC {
ContainerTokenIdentifier tokenId = null; ContainerTokenIdentifier tokenId = null;
try { try {
tokenId = BuilderUtils.newContainerTokenIdentifier(containerToken); tokenId = newContainerTokenIdentifier(containerToken);
tokenId = new ContainerTokenIdentifier();
} catch (IOException e) { } catch (IOException e) {
throw RPCUtil.getRemoteException(e); throw RPCUtil.getRemoteException(e);
} }
@ -204,4 +205,28 @@ public class TestRPC {
throw new YarnRemoteException(e); throw new YarnRemoteException(e);
} }
} }
public static ContainerTokenIdentifier newContainerTokenIdentifier(
Token containerToken) throws IOException {
org.apache.hadoop.security.token.Token<ContainerTokenIdentifier> token =
new org.apache.hadoop.security.token.Token<ContainerTokenIdentifier>(
containerToken.getIdentifier()
.array(), containerToken.getPassword().array(), new Text(
containerToken.getKind()),
new Text(containerToken.getService()));
return token.decodeIdentifier();
}
public static Token newContainerToken(NodeId nodeId, byte[] password,
ContainerTokenIdentifier tokenIdentifier) {
// RPC layer client expects ip:port as service for tokens
InetSocketAddress addr =
NetUtils.createSocketAddrForHost(nodeId.getHost(), nodeId.getPort());
// NOTE: use SecurityUtil.setTokenService if this becomes a "real" token
Token containerToken =
Token.newInstance(tokenIdentifier.getBytes(),
ContainerTokenIdentifier.KIND.toString(), password, SecurityUtil
.buildTokenService(addr).toString());
return containerToken;
}
} }

View File

@ -23,7 +23,6 @@ import junit.framework.Assert;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
public class TestApplicationAttemptId { public class TestApplicationAttemptId {
@ -59,8 +58,8 @@ public class TestApplicationAttemptId {
private ApplicationAttemptId createAppAttemptId( private ApplicationAttemptId createAppAttemptId(
long clusterTimeStamp, int id, int attemptId) { long clusterTimeStamp, int id, int attemptId) {
ApplicationId appId = BuilderUtils.newApplicationId(clusterTimeStamp, id); ApplicationId appId = ApplicationId.newInstance(clusterTimeStamp, id);
return BuilderUtils.newApplicationAttemptId(appId, attemptId); return ApplicationAttemptId.newInstance(appId, attemptId);
} }
public static void main(String[] args) throws Exception { public static void main(String[] args) throws Exception {

View File

@ -21,17 +21,16 @@ package org.apache.hadoop.yarn.api;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
public class TestApplicationId { public class TestApplicationId {
@Test @Test
public void testApplicationId() { public void testApplicationId() {
ApplicationId a1 = BuilderUtils.newApplicationId(10l, 1); ApplicationId a1 = ApplicationId.newInstance(10l, 1);
ApplicationId a2 = BuilderUtils.newApplicationId(10l, 2); ApplicationId a2 = ApplicationId.newInstance(10l, 2);
ApplicationId a3 = BuilderUtils.newApplicationId(10l, 1); ApplicationId a3 = ApplicationId.newInstance(10l, 1);
ApplicationId a4 = BuilderUtils.newApplicationId(8l, 3); ApplicationId a4 = ApplicationId.newInstance(8l, 3);
Assert.assertFalse(a1.equals(a2)); Assert.assertFalse(a1.equals(a2));
Assert.assertFalse(a1.equals(a4)); Assert.assertFalse(a1.equals(a4));
@ -46,8 +45,7 @@ public class TestApplicationId {
Assert.assertFalse(a2.hashCode() == a4.hashCode()); Assert.assertFalse(a2.hashCode() == a4.hashCode());
long ts = System.currentTimeMillis(); long ts = System.currentTimeMillis();
ApplicationId a5 = ApplicationId a5 = ApplicationId.newInstance(ts, 45436343);
BuilderUtils.newApplicationId(ts, 45436343);
Assert.assertEquals("application_10_0001", a1.toString()); Assert.assertEquals("application_10_0001", a1.toString());
Assert.assertEquals("application_" + ts + "_45436343", a5.toString()); Assert.assertEquals("application_" + ts + "_45436343", a5.toString());
} }

View File

@ -24,7 +24,6 @@ import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -51,11 +50,11 @@ public class TestApplicatonReport {
protected static ApplicationReport createApplicationReport( protected static ApplicationReport createApplicationReport(
int appIdInt, int appAttemptIdInt, long timestamp) { int appIdInt, int appAttemptIdInt, long timestamp) {
ApplicationId appId = BuilderUtils.newApplicationId(timestamp, appIdInt); ApplicationId appId = ApplicationId.newInstance(timestamp, appIdInt);
ApplicationAttemptId appAttemptId = ApplicationAttemptId appAttemptId =
BuilderUtils.newApplicationAttemptId(appId, appAttemptIdInt); ApplicationAttemptId.newInstance(appId, appAttemptIdInt);
ApplicationReport appReport = ApplicationReport appReport =
BuilderUtils.newApplicationReport(appId, appAttemptId, "user", "queue", ApplicationReport.newInstance(appId, appAttemptId, "user", "queue",
"appname", "host", 124, null, YarnApplicationState.FINISHED, "appname", "host", 124, null, YarnApplicationState.FINISHED,
"diagnostics", "url", 0, 0, FinalApplicationStatus.SUCCEEDED, null, "diagnostics", "url", 0, 0, FinalApplicationStatus.SUCCEEDED, null,
"N/A", 0.53789f, YarnConfiguration.DEFAULT_APPLICATION_TYPE); "N/A", 0.53789f, YarnConfiguration.DEFAULT_APPLICATION_TYPE);

View File

@ -21,19 +21,20 @@ package org.apache.hadoop.yarn.api;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
public class TestContainerId { public class TestContainerId {
@Test @Test
public void testContainerId() { public void testContainerId() {
ContainerId c1 = BuilderUtils.newContainerId(1, 1, 10l, 1); ContainerId c1 = newContainerId(1, 1, 10l, 1);
ContainerId c2 = BuilderUtils.newContainerId(1, 1, 10l, 2); ContainerId c2 = newContainerId(1, 1, 10l, 2);
ContainerId c3 = BuilderUtils.newContainerId(1, 1, 10l, 1); ContainerId c3 = newContainerId(1, 1, 10l, 1);
ContainerId c4 = BuilderUtils.newContainerId(1, 3, 10l, 1); ContainerId c4 = newContainerId(1, 3, 10l, 1);
ContainerId c5 = BuilderUtils.newContainerId(1, 3, 8l, 1); ContainerId c5 = newContainerId(1, 3, 8l, 1);
Assert.assertTrue(c1.equals(c3)); Assert.assertTrue(c1.equals(c3));
Assert.assertFalse(c1.equals(c2)); Assert.assertFalse(c1.equals(c2));
@ -51,10 +52,17 @@ public class TestContainerId {
Assert.assertFalse(c1.hashCode() == c5.hashCode()); Assert.assertFalse(c1.hashCode() == c5.hashCode());
long ts = System.currentTimeMillis(); long ts = System.currentTimeMillis();
ContainerId c6 = BuilderUtils.newContainerId(36473, 4365472, ts, 25645811); ContainerId c6 = newContainerId(36473, 4365472, ts, 25645811);
Assert.assertEquals("container_10_0001_01_000001", c1.toString()); Assert.assertEquals("container_10_0001_01_000001", c1.toString());
Assert.assertEquals("container_" + ts + "_36473_4365472_25645811", Assert.assertEquals("container_" + ts + "_36473_4365472_25645811",
c6.toString()); c6.toString());
} }
public static ContainerId newContainerId(int appId, int appAttemptId,
long timestamp, int containerId) {
ApplicationId applicationId = ApplicationId.newInstance(timestamp, appId);
ApplicationAttemptId applicationAttemptId =
ApplicationAttemptId.newInstance(applicationId, appAttemptId);
return ContainerId.newInstance(applicationAttemptId, containerId);
}
} }

View File

@ -21,16 +21,15 @@ package org.apache.hadoop.yarn.api;
import junit.framework.Assert; import junit.framework.Assert;
import org.apache.hadoop.yarn.api.records.NodeId; import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test; import org.junit.Test;
public class TestNodeId { public class TestNodeId {
@Test @Test
public void testNodeId() { public void testNodeId() {
NodeId nodeId1 = BuilderUtils.newNodeId("10.18.52.124", 8041); NodeId nodeId1 = NodeId.newInstance("10.18.52.124", 8041);
NodeId nodeId2 = BuilderUtils.newNodeId("10.18.52.125", 8038); NodeId nodeId2 = NodeId.newInstance("10.18.52.125", 8038);
NodeId nodeId3 = BuilderUtils.newNodeId("10.18.52.124", 8041); NodeId nodeId3 = NodeId.newInstance("10.18.52.124", 8041);
NodeId nodeId4 = BuilderUtils.newNodeId("10.18.52.124", 8039); NodeId nodeId4 = NodeId.newInstance("10.18.52.124", 8039);
Assert.assertTrue(nodeId1.equals(nodeId3)); Assert.assertTrue(nodeId1.equals(nodeId3));
Assert.assertFalse(nodeId1.equals(nodeId2)); Assert.assertFalse(nodeId1.equals(nodeId2));

View File

@ -48,12 +48,14 @@ import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.io.nativeio.NativeIO; import org.apache.hadoop.io.nativeio.NativeIO;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.TestContainerId;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter; import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.After; import org.junit.After;
import org.junit.Assume; import org.junit.Assume;
import org.junit.Before; import org.junit.Before;
@ -93,7 +95,7 @@ public class TestAggregatedLogFormat {
Path remoteAppLogFile = Path remoteAppLogFile =
new Path(workDir.getAbsolutePath(), "aggregatedLogFile"); new Path(workDir.getAbsolutePath(), "aggregatedLogFile");
Path srcFileRoot = new Path(workDir.getAbsolutePath(), "srcFiles"); Path srcFileRoot = new Path(workDir.getAbsolutePath(), "srcFiles");
ContainerId testContainerId = BuilderUtils.newContainerId(1, 1, 1, 1); ContainerId testContainerId = TestContainerId.newContainerId(1, 1, 1, 1);
Path t = Path t =
new Path(srcFileRoot, testContainerId.getApplicationAttemptId() new Path(srcFileRoot, testContainerId.getApplicationAttemptId()
.getApplicationId().toString()); .getApplicationId().toString());
@ -160,7 +162,11 @@ public class TestAggregatedLogFormat {
String data = "Log File content for container : "; String data = "Log File content for container : ";
// Creating files for container1. Log aggregator will try to read log files // Creating files for container1. Log aggregator will try to read log files
// with illegal user. // with illegal user.
ContainerId testContainerId1 = BuilderUtils.newContainerId(1, 1, 1, 1); ApplicationId applicationId = ApplicationId.newInstance(1, 1);
ApplicationAttemptId applicationAttemptId =
ApplicationAttemptId.newInstance(applicationId, 1);
ContainerId testContainerId1 =
ContainerId.newInstance(applicationAttemptId, 1);
Path appDir = Path appDir =
new Path(srcFileRoot, testContainerId1.getApplicationAttemptId() new Path(srcFileRoot, testContainerId1.getApplicationAttemptId()
.getApplicationId().toString()); .getApplicationId().toString());

View File

@ -17,11 +17,13 @@
*/ */
package org.apache.hadoop.yarn.util; package org.apache.hadoop.yarn.util;
import static org.junit.Assert.*; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.TestContainerId;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.api.records.URL;
import org.junit.Test; import org.junit.Test;
@ -38,7 +40,7 @@ public class TestConverterUtils {
@Test @Test
public void testContainerId() throws URISyntaxException { public void testContainerId() throws URISyntaxException {
ContainerId id = BuilderUtils.newContainerId(0, 0, 0, 0); ContainerId id = TestContainerId.newContainerId(0, 0, 0, 0);
String cid = ConverterUtils.toString(id); String cid = ConverterUtils.toString(id);
assertEquals("container_0_0000_00_000000", cid); assertEquals("container_0_0000_00_000000", cid);
ContainerId gen = ConverterUtils.toContainerId(cid); ContainerId gen = ConverterUtils.toContainerId(cid);

View File

@ -16,9 +16,10 @@
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.yarn.util; package org.apache.hadoop.yarn.server.utils;
import java.io.IOException; import java.io.IOException;
import java.io.Serializable;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.URI; import java.net.URI;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -60,6 +61,7 @@ import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier; import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.util.ConverterUtils;
/** /**
* Builder utilities to construct various objects. * Builder utilities to construct various objects.
@ -71,7 +73,7 @@ public class BuilderUtils {
.getRecordFactory(null); .getRecordFactory(null);
public static class ApplicationIdComparator implements public static class ApplicationIdComparator implements
Comparator<ApplicationId> { Comparator<ApplicationId>, Serializable {
@Override @Override
public int compare(ApplicationId a1, ApplicationId a2) { public int compare(ApplicationId a1, ApplicationId a2) {
return a1.compareTo(a2); return a1.compareTo(a2);
@ -79,7 +81,7 @@ public class BuilderUtils {
} }
public static class ContainerIdComparator implements public static class ContainerIdComparator implements
java.util.Comparator<ContainerId> { java.util.Comparator<ContainerId>, Serializable {
@Override @Override
public int compare(ContainerId c1, public int compare(ContainerId c1,
@ -88,26 +90,6 @@ public class BuilderUtils {
} }
} }
public static class ResourceRequestComparator
implements java.util.Comparator<org.apache.hadoop.yarn.api.records.ResourceRequest> {
@Override
public int compare(org.apache.hadoop.yarn.api.records.ResourceRequest r1,
org.apache.hadoop.yarn.api.records.ResourceRequest r2) {
// Compare priority, host and capability
int ret = r1.getPriority().compareTo(r2.getPriority());
if (ret == 0) {
String h1 = r1.getResourceName();
String h2 = r2.getResourceName();
ret = h1.compareTo(h2);
}
if (ret == 0) {
ret = r1.getCapability().compareTo(r2.getCapability());
}
return ret;
}
}
public static LocalResource newLocalResource(URL url, LocalResourceType type, public static LocalResource newLocalResource(URL url, LocalResourceType type,
LocalResourceVisibility visibility, long size, long timestamp) { LocalResourceVisibility visibility, long size, long timestamp) {
LocalResource resource = LocalResource resource =

View File

@ -99,10 +99,10 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.Contai
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics; import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.security.authorize.NMPolicyProvider; import org.apache.hadoop.yarn.server.nodemanager.security.authorize.NMPolicyProvider;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.service.CompositeService; import org.apache.hadoop.yarn.service.CompositeService;
import org.apache.hadoop.yarn.service.Service; import org.apache.hadoop.yarn.service.Service;
import org.apache.hadoop.yarn.service.ServiceStateChangeListener; import org.apache.hadoop.yarn.service.ServiceStateChangeListener;
import org.apache.hadoop.yarn.util.BuilderUtils;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;

View File

@ -62,12 +62,12 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.eve
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerStartMonitoringEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerStartMonitoringEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerStopMonitoringEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainerStopMonitoringEvent;
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics; import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.state.InvalidStateTransitonException; import org.apache.hadoop.yarn.state.InvalidStateTransitonException;
import org.apache.hadoop.yarn.state.MultipleArcTransition; import org.apache.hadoop.yarn.state.MultipleArcTransition;
import org.apache.hadoop.yarn.state.SingleArcTransition; import org.apache.hadoop.yarn.state.SingleArcTransition;
import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory; import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
public class ContainerImpl implements Container { public class ContainerImpl implements Container {

View File

@ -44,7 +44,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerM
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics; import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager; import org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.junit.Test; import org.junit.Test;

View File

@ -60,7 +60,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Cont
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.junit.After; import org.junit.After;

View File

@ -62,7 +62,7 @@ import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.YarnRPC; import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.server.api.records.MasterKey; import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;

View File

@ -76,10 +76,10 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Cont
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics; import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager; import org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.server.utils.YarnServerBuilderUtils; import org.apache.hadoop.yarn.server.utils.YarnServerBuilderUtils;
import org.apache.hadoop.yarn.service.Service; import org.apache.hadoop.yarn.service.Service;
import org.apache.hadoop.yarn.service.Service.STATE; import org.apache.hadoop.yarn.service.Service.STATE;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;

View File

@ -59,7 +59,7 @@ import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.junit.Test; import org.junit.Test;

View File

@ -62,7 +62,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.Contai
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEventType;
import org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager; import org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.junit.Test; import org.junit.Test;
import org.mockito.ArgumentMatcher; import org.mockito.ArgumentMatcher;

View File

@ -72,7 +72,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.eve
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.monitor.ContainersMonitorEventType;
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics; import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.junit.Test; import org.junit.Test;
import org.mockito.ArgumentMatcher; import org.mockito.ArgumentMatcher;

View File

@ -62,7 +62,7 @@ import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest; import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch; import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin; import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin;
import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin; import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;

View File

@ -54,7 +54,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.even
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceLocalizedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceLocalizedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceReleaseEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceReleaseEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRequestEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceRequestEvent;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.junit.Test; import org.junit.Test;
public class TestLocalResourcesTrackerImpl { public class TestLocalResourcesTrackerImpl {

View File

@ -117,8 +117,8 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.even
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEventType; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerResourceRequestEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizerResourceRequestEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceFailedLocalizationEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ResourceFailedLocalizationEvent;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.server.utils.YarnServerBuilderUtils; import org.apache.hadoop.yarn.server.utils.YarnServerBuilderUtils;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;

View File

@ -95,7 +95,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Ap
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppStartedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppStartedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerContainerFinishedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerContainerFinishedEvent;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.junit.Test; import org.junit.Test;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;

View File

@ -43,7 +43,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Ap
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppStartedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppStartedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerContainerFinishedEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerContainerFinishedEvent;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.junit.Test; import org.junit.Test;
import org.mockito.exceptions.verification.WantedButNotInvoked; import org.mockito.exceptions.verification.WantedButNotInvoked;

View File

@ -62,7 +62,7 @@ import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode;
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.Signal; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.Signal;
import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest; import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin; import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin;
import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree; import org.apache.hadoop.yarn.util.ProcfsBasedProcessTree;

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Ap
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState; import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
public class MockApp implements Application { public class MockApp implements Application {

View File

@ -39,7 +39,7 @@ import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEvent; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
public class MockContainer implements Container { public class MockContainer implements Container {

View File

@ -51,7 +51,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Ap
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.webapp.ContainerLogsPage.ContainersLogsBlock; import org.apache.hadoop.yarn.server.nodemanager.webapp.ContainerLogsPage.ContainersLogsBlock;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.webapp.YarnWebParams; import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.test.WebAppTests; import org.apache.hadoop.yarn.webapp.test.WebAppTests;
import org.junit.Assert; import org.junit.Assert;

View File

@ -49,7 +49,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Cont
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState;
import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics; import org.apache.hadoop.yarn.server.nodemanager.metrics.NodeManagerMetrics;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;

View File

@ -48,7 +48,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Ap
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer.NMWebApp; import org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer.NMWebApp;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils; import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;

View File

@ -48,7 +48,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Ap
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer.NMWebApp; import org.apache.hadoop.yarn.server.nodemanager.webapp.WebServer.NMWebApp;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp; import org.apache.hadoop.yarn.webapp.WebApp;

View File

@ -74,8 +74,8 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeRepo
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.authorize.RMPolicyProvider; import org.apache.hadoop.yarn.server.resourcemanager.security.authorize.RMPolicyProvider;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.service.AbstractService;
import org.apache.hadoop.yarn.util.BuilderUtils;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@Private @Private

View File

@ -88,8 +88,8 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMDelegationTokenSecretManager; import org.apache.hadoop.yarn.server.resourcemanager.security.RMDelegationTokenSecretManager;
import org.apache.hadoop.yarn.server.resourcemanager.security.authorize.RMPolicyProvider; import org.apache.hadoop.yarn.server.resourcemanager.security.authorize.RMPolicyProvider;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.service.AbstractService; import org.apache.hadoop.yarn.service.AbstractService;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
@ -195,7 +195,7 @@ public class ClientRMService extends AbstractService implements
} }
ApplicationId getNewApplicationId() { ApplicationId getNewApplicationId() {
ApplicationId applicationId = org.apache.hadoop.yarn.util.BuilderUtils ApplicationId applicationId = org.apache.hadoop.yarn.server.utils.BuilderUtils
.newApplicationId(recordFactory, ResourceManager.clusterTimeStamp, .newApplicationId(recordFactory, ResourceManager.clusterTimeStamp,
applicationCounter.incrementAndGet()); applicationCounter.incrementAndGet());
LOG.info("Allocated new applicationId: " + applicationId.getId()); LOG.info("Allocated new applicationId: " + applicationId.getId());

View File

@ -52,7 +52,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.InvalidResourceRe
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerUtils;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
/** /**
* This class manages the list of applications for the resource manager. * This class manages the list of applications for the resource manager.

View File

@ -64,12 +64,12 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptI
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeCleanAppEvent; import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeCleanAppEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.state.InvalidStateTransitonException; import org.apache.hadoop.yarn.state.InvalidStateTransitonException;
import org.apache.hadoop.yarn.state.MultipleArcTransition; import org.apache.hadoop.yarn.state.MultipleArcTransition;
import org.apache.hadoop.yarn.state.SingleArcTransition; import org.apache.hadoop.yarn.state.SingleArcTransition;
import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory; import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.apache.hadoop.yarn.util.BuilderUtils;
public class RMAppImpl implements RMApp, Recoverable { public class RMAppImpl implements RMApp, Recoverable {

View File

@ -94,13 +94,13 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppRepor
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.server.webproxy.ProxyUriUtils; import org.apache.hadoop.yarn.server.webproxy.ProxyUriUtils;
import org.apache.hadoop.yarn.state.InvalidStateTransitonException; import org.apache.hadoop.yarn.state.InvalidStateTransitonException;
import org.apache.hadoop.yarn.state.MultipleArcTransition; import org.apache.hadoop.yarn.state.MultipleArcTransition;
import org.apache.hadoop.yarn.state.SingleArcTransition; import org.apache.hadoop.yarn.state.SingleArcTransition;
import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory; import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.apache.hadoop.yarn.util.BuilderUtils;
@SuppressWarnings({"unchecked", "rawtypes"}) @SuppressWarnings({"unchecked", "rawtypes"})
public class RMAppAttemptImpl implements RMAppAttempt, Recoverable { public class RMAppAttemptImpl implements RMAppAttempt, Recoverable {

View File

@ -54,12 +54,12 @@ import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.utils.BuilderUtils.ContainerIdComparator;
import org.apache.hadoop.yarn.state.InvalidStateTransitonException; import org.apache.hadoop.yarn.state.InvalidStateTransitonException;
import org.apache.hadoop.yarn.state.MultipleArcTransition; import org.apache.hadoop.yarn.state.MultipleArcTransition;
import org.apache.hadoop.yarn.state.SingleArcTransition; import org.apache.hadoop.yarn.state.SingleArcTransition;
import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory; import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.apache.hadoop.yarn.util.BuilderUtils.ContainerIdComparator;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;

View File

@ -43,7 +43,7 @@ import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources; import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;

View File

@ -64,7 +64,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerApp;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
@Private @Private
@Unstable @Unstable

View File

@ -40,7 +40,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.NodeType;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager; import org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
@Private @Private
@Unstable @Unstable

View File

@ -27,7 +27,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources; import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
@Private @Private
@Evolving @Evolving

View File

@ -86,7 +86,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSc
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
@LimitedPrivate("yarn") @LimitedPrivate("yarn")
@Evolving @Evolving

View File

@ -34,7 +34,7 @@ import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.api.records.MasterKey; import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager; import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.security.BaseContainerTokenSecretManager; import org.apache.hadoop.yarn.server.security.BaseContainerTokenSecretManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
/** /**
* SecretManager for ContainerTokens. This is RM-specific and rolls the * SecretManager for ContainerTokens. This is RM-specific and rolls the

View File

@ -81,7 +81,7 @@ public class Application {
final private Set<ResourceRequest> ask = final private Set<ResourceRequest> ask =
new TreeSet<ResourceRequest>( new TreeSet<ResourceRequest>(
new org.apache.hadoop.yarn.util.BuilderUtils.ResourceRequestComparator()); new org.apache.hadoop.yarn.api.records.ResourceRequest.ResourceRequestComparator());
final private Map<String, NodeManager> nodes = final private Map<String, NodeManager> nodes =
new HashMap<String, NodeManager>(); new HashMap<String, NodeManager>();
@ -230,7 +230,7 @@ public class Application {
ResourceRequest request = requests.get(resourceName); ResourceRequest request = requests.get(resourceName);
if (request == null) { if (request == null) {
request = request =
org.apache.hadoop.yarn.util.BuilderUtils.newResourceRequest( org.apache.hadoop.yarn.server.utils.BuilderUtils.newResourceRequest(
priority, resourceName, capability, 1); priority, resourceName, capability, 1);
requests.put(resourceName, request); requests.put(resourceName, request);
} else { } else {
@ -240,7 +240,7 @@ public class Application {
// Note this down for next interaction with ResourceManager // Note this down for next interaction with ResourceManager
ask.remove(request); ask.remove(request);
ask.add( ask.add(
org.apache.hadoop.yarn.util.BuilderUtils.newResourceRequest( org.apache.hadoop.yarn.server.utils.BuilderUtils.newResourceRequest(
request)); // clone to ensure the RM doesn't manipulate the same obj request)); // clone to ensure the RM doesn't manipulate the same obj
if(LOG.isDebugEnabled()) { if(LOG.isDebugEnabled()) {
@ -388,7 +388,7 @@ public class Application {
// Note this for next interaction with ResourceManager // Note this for next interaction with ResourceManager
ask.remove(request); ask.remove(request);
ask.add( ask.add(
org.apache.hadoop.yarn.util.BuilderUtils.newResourceRequest( org.apache.hadoop.yarn.server.utils.BuilderUtils.newResourceRequest(
request)); // clone to ensure the RM doesn't manipulate the same obj request)); // clone to ensure the RM doesn't manipulate the same obj
if(LOG.isDebugEnabled()) { if(LOG.isDebugEnabled()) {

View File

@ -29,15 +29,14 @@ import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest; import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.Priority; import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
public class MockAM { public class MockAM {
@ -144,7 +143,7 @@ public class MockAM {
public AllocateResponse allocate( public AllocateResponse allocate(
List<ResourceRequest> resourceRequest, List<ContainerId> releases) List<ResourceRequest> resourceRequest, List<ContainerId> releases)
throws Exception { throws Exception {
AllocateRequest req = BuilderUtils.newAllocateRequest(attemptId, AllocateRequest req = AllocateRequest.newInstance(attemptId,
++responseId, 0F, resourceRequest, releases); ++responseId, 0F, resourceRequest, releases);
return amRMProtocol.allocate(req); return amRMProtocol.allocate(req);
} }

View File

@ -37,7 +37,7 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequ
import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse; import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResponse;
import org.apache.hadoop.yarn.server.api.records.MasterKey; import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.apache.hadoop.yarn.server.api.records.NodeStatus; import org.apache.hadoop.yarn.server.api.records.NodeStatus;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
public class MockNM { public class MockNM {

View File

@ -57,7 +57,7 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerRequ
import org.apache.hadoop.yarn.server.api.records.NodeStatus; import org.apache.hadoop.yarn.server.api.records.NodeStatus;
import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources; import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.common.fica.FiCaSchedulerNode;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
@Private @Private
public class NodeManager implements ContainerManager { public class NodeManager implements ContainerManager {

View File

@ -50,7 +50,7 @@ import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;

View File

@ -53,8 +53,8 @@ import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStore;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStoreFactory; import org.apache.hadoop.yarn.server.resourcemanager.recovery.RMStateStoreFactory;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.service.Service.STATE; import org.apache.hadoop.yarn.service.Service.STATE;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;

View File

@ -41,7 +41,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;

View File

@ -42,7 +42,7 @@ import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;

View File

@ -73,7 +73,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMDelegationTokenSecretManager; import org.apache.hadoop.yarn.server.resourcemanager.security.RMDelegationTokenSecretManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;

View File

@ -65,7 +65,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.recovery.NullRMStateStore;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMDelegationTokenSecretManager; import org.apache.hadoop.yarn.server.resourcemanager.security.RMDelegationTokenSecretManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager; import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ProtoUtils; import org.apache.hadoop.yarn.util.ProtoUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.junit.Before; import org.junit.Before;

View File

@ -48,7 +48,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemoved
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;

View File

@ -52,7 +52,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateS
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEventType;
import org.apache.hadoop.yarn.server.resourcemanager.security.DelegationTokenRenewer; import org.apache.hadoop.yarn.server.resourcemanager.security.DelegationTokenRenewer;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;

View File

@ -64,7 +64,7 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState; import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.DelegationTokenRenewer; import org.apache.hadoop.yarn.server.resourcemanager.security.DelegationTokenRenewer;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.ProtoUtils; import org.apache.hadoop.yarn.util.ProtoUtils;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;

View File

@ -43,7 +43,7 @@ import org.apache.hadoop.yarn.server.api.protocolrecords.RegisterNodeManagerResp
import org.apache.hadoop.yarn.server.api.records.NodeAction; import org.apache.hadoop.yarn.server.api.records.NodeAction;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent; import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.junit.After; import org.junit.After;
import org.junit.Test; import org.junit.Test;

Some files were not shown because too many files have changed in this diff Show More