MAPREDUCE-3098. svn merge -c r1177633 --ignore-ancestry ../../trunk/

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1177639 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2011-09-30 12:54:53 +00:00
parent be51b71dee
commit 908f1d5d01
45 changed files with 864 additions and 580 deletions

View File

@ -295,6 +295,9 @@ Release 0.23.0 - Unreleased
MAPREDUCE-3001. Added task-specific counters to AppMaster and JobHistory
web-UIs. (Robert Joseph Evans via vinodkv)
MAPREDUCE-3098. Fixed RM and MR AM to report YarnApplicationState and
application's FinalStatus separately. (Hitesh Shah via vinodkv)
OPTIMIZATIONS
MAPREDUCE-2026. Make JobTracker.getJobCounters() and

View File

@ -46,6 +46,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -169,14 +170,14 @@ public abstract class RMCommunicator extends AbstractService {
protected void unregister() {
try {
String finalState = "RUNNING";
FinalApplicationStatus finishState = FinalApplicationStatus.UNDEFINED;
if (job.getState() == JobState.SUCCEEDED) {
finalState = "SUCCEEDED";
finishState = FinalApplicationStatus.SUCCEEDED;
} else if (job.getState() == JobState.KILLED) {
finalState = "KILLED";
finishState = FinalApplicationStatus.KILLED;
} else if (job.getState() == JobState.FAILED
|| job.getState() == JobState.ERROR) {
finalState = "FAILED";
finishState = FinalApplicationStatus.FAILED;
}
StringBuffer sb = new StringBuffer();
for (String s : job.getDiagnostics()) {
@ -191,7 +192,7 @@ public abstract class RMCommunicator extends AbstractService {
FinishApplicationMasterRequest request =
recordFactory.newRecordInstance(FinishApplicationMasterRequest.class);
request.setAppAttemptId(this.applicationAttemptId);
request.setFinalState(finalState.toString());
request.setFinishApplicationStatus(finishState);
request.setDiagnostics(sb.toString());
request.setTrackingUrl(historyUrl);
scheduler.finishApplicationMaster(request);

View File

@ -45,7 +45,7 @@ import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.yarn.YarnException;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueState;
@ -378,13 +378,13 @@ public class TypeConverter {
return reports;
}
public static JobStatus.State fromYarn(ApplicationState state) {
public static JobStatus.State fromYarn(YarnApplicationState state) {
switch (state) {
case SUBMITTED:
return State.PREP;
case RUNNING:
return State.RUNNING;
case SUCCEEDED:
case FINISHED:
return State.SUCCEEDED;
case FAILED:
return State.FAILED;
@ -417,7 +417,7 @@ public class TypeConverter {
new JobStatus(
TypeConverter.fromYarn(application.getApplicationId()),
0.0f, 0.0f, 0.0f, 0.0f,
TypeConverter.fromYarn(application.getState()),
TypeConverter.fromYarn(application.getYarnApplicationState()),
org.apache.hadoop.mapreduce.JobPriority.NORMAL,
application.getUser(), application.getName(),
application.getQueue(), jobFile, trackingUrl

View File

@ -21,7 +21,7 @@ import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationReportPBImpl;
@ -35,11 +35,11 @@ public class TestTypeConverter {
@Test
public void testFromYarn() throws Exception {
int appStartTime = 612354;
ApplicationState state = ApplicationState.RUNNING;
YarnApplicationState state = YarnApplicationState.RUNNING;
ApplicationId applicationId = new ApplicationIdPBImpl();
ApplicationReportPBImpl applicationReport = new ApplicationReportPBImpl();
applicationReport.setApplicationId(applicationId);
applicationReport.setState(state);
applicationReport.setYarnApplicationState(state);
applicationReport.setStartTime(appStartTime);
applicationReport.setUser("TestTypeConverter-user");
JobStatus jobStatus = TypeConverter.fromYarn(applicationReport, "dummy-jobfile");
@ -56,7 +56,7 @@ public class TestTypeConverter {
ApplicationReport mockReport = mock(ApplicationReport.class);
when(mockReport.getTrackingUrl()).thenReturn("dummy-tracking-url");
when(mockReport.getApplicationId()).thenReturn(mockAppId);
when(mockReport.getState()).thenReturn(ApplicationState.KILLED);
when(mockReport.getYarnApplicationState()).thenReturn(YarnApplicationState.KILLED);
when(mockReport.getUser()).thenReturn("dummy-user");
when(mockReport.getQueue()).thenReturn("dummy-queue");
String jobFile = "dummy-path/job.xml";

View File

@ -61,7 +61,7 @@ import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.YarnException;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.factories.RecordFactory;
@ -135,7 +135,7 @@ public class ClientServiceDelegate {
trackingUrl = application.getTrackingUrl();
}
String serviceAddr = null;
while (application == null || ApplicationState.RUNNING.equals(application.getState())) {
while (application == null || YarnApplicationState.RUNNING.equals(application.getYarnApplicationState())) {
if (application == null) {
LOG.info("Could not get Job info from RM for job " + jobId
+ ". Redirecting to job history server.");
@ -146,7 +146,7 @@ public class ClientServiceDelegate {
LOG.debug("AM not assigned to Job. Waiting to get the AM ...");
Thread.sleep(2000);
LOG.debug("Application state is " + application.getState());
LOG.debug("Application state is " + application.getYarnApplicationState());
application = rm.getApplicationReport(appId);
continue;
}
@ -197,27 +197,28 @@ public class ClientServiceDelegate {
if (user == null) {
throw RPCUtil.getRemoteException("User is not set in the application report");
}
if (application.getState() == ApplicationState.NEW ||
application.getState() == ApplicationState.SUBMITTED) {
if (application.getYarnApplicationState() == YarnApplicationState.NEW ||
application.getYarnApplicationState() == YarnApplicationState.SUBMITTED) {
realProxy = null;
return getNotRunningJob(application, JobState.NEW);
}
if (application.getState() == ApplicationState.FAILED) {
if (application.getYarnApplicationState() == YarnApplicationState.FAILED) {
realProxy = null;
return getNotRunningJob(application, JobState.FAILED);
}
if (application.getState() == ApplicationState.KILLED) {
if (application.getYarnApplicationState() == YarnApplicationState.KILLED) {
realProxy = null;
return getNotRunningJob(application, JobState.KILLED);
}
//History server can serve a job only if application
//succeeded.
if (application.getState() == ApplicationState.SUCCEEDED) {
LOG.info("Application state is completed. " +
"Redirecting to job history server");
if (application.getYarnApplicationState() == YarnApplicationState.FINISHED) {
LOG.info("Application state is completed. FinalApplicationStatus="
+ application.getFinalApplicationStatus().toString()
+ ". Redirecting to job history server");
realProxy = checkAndGetHSProxy(application, JobState.SUCCEEDED);
}
return realProxy;

View File

@ -22,8 +22,6 @@ import java.util.ArrayList;
import java.util.HashMap;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
@ -55,15 +53,17 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.util.BuilderUtils;
public class NotRunningJob implements MRClientProtocol {
private static final Log LOG = LogFactory.getLog(NotRunningJob.class);
private RecordFactory recordFactory =
RecordFactoryProvider.getRecordFactory(null);
@ -72,18 +72,12 @@ public class NotRunningJob implements MRClientProtocol {
private ApplicationReport getUnknownApplicationReport() {
ApplicationReport unknown =
recordFactory.newRecordInstance(ApplicationReport.class);
unknown.setUser("N/A");
unknown.setHost("N/A");
unknown.setName("N/A");
unknown.setQueue("N/A");
unknown.setStartTime(0);
unknown.setFinishTime(0);
unknown.setTrackingUrl("N/A");
unknown.setDiagnostics("N/A");
LOG.info("getUnknownApplicationReport");
return unknown;
ApplicationId unknownAppId = recordFactory.newRecordInstance(ApplicationId.class);
// Setting AppState to NEW and finalStatus to UNDEFINED as they are never used
// for a non running job
return BuilderUtils.newApplicationReport(unknownAppId, "N/A", "N/A", "N/A", "N/A", 0, "",
YarnApplicationState.NEW, "N/A", "N/A", 0, 0, FinalApplicationStatus.UNDEFINED);
}
NotRunningJob(ApplicationReport applicationReport, JobState jobState) {

View File

@ -62,7 +62,6 @@ import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.LocalResource;
@ -70,6 +69,7 @@ import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@ -241,8 +241,8 @@ public class YARNRunner implements ClientProtocol {
String diagnostics =
(appMaster == null ?
"application report is null" : appMaster.getDiagnostics());
if (appMaster == null || appMaster.getState() == ApplicationState.FAILED
|| appMaster.getState() == ApplicationState.KILLED) {
if (appMaster == null || appMaster.getYarnApplicationState() == YarnApplicationState.FAILED
|| appMaster.getYarnApplicationState() == YarnApplicationState.KILLED) {
throw new IOException("Failed to run job : " +
diagnostics);
}

View File

@ -88,7 +88,8 @@ import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.factories.RecordFactory;
@ -256,12 +257,14 @@ public class TestClientRedirect {
ApplicationReport application = recordFactory
.newRecordInstance(ApplicationReport.class);
application.setApplicationId(applicationId);
application.setFinalApplicationStatus(FinalApplicationStatus.UNDEFINED);
if (amRunning) {
application.setState(ApplicationState.RUNNING);
application.setYarnApplicationState(YarnApplicationState.RUNNING);
} else if (amRestarting) {
application.setState(ApplicationState.SUBMITTED);
application.setYarnApplicationState(YarnApplicationState.SUBMITTED);
} else {
application.setState(ApplicationState.SUCCEEDED);
application.setYarnApplicationState(YarnApplicationState.FINISHED);
application.setFinalApplicationStatus(FinalApplicationStatus.SUCCEEDED);
}
String[] split = AMHOSTADDRESS.split(":");
application.setHost(split[0]);

View File

@ -32,8 +32,9 @@ import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.ipc.RPCUtil;
@ -163,7 +164,7 @@ public class TestClientServiceDelegate {
private ApplicationReport getApplicationReport() {
ApplicationReport applicationReport = Records
.newRecord(ApplicationReport.class);
applicationReport.setState(ApplicationState.SUCCEEDED);
applicationReport.setYarnApplicationState(YarnApplicationState.FINISHED);
applicationReport.setUser("root");
applicationReport.setHost("N/A");
applicationReport.setName("N/A");
@ -172,6 +173,7 @@ public class TestClientServiceDelegate {
applicationReport.setFinishTime(0);
applicationReport.setTrackingUrl("N/A");
applicationReport.setDiagnostics("N/A");
applicationReport.setFinalApplicationStatus(FinalApplicationStatus.SUCCEEDED);
return applicationReport;
}

View File

@ -36,7 +36,6 @@ import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.junit.Before;
import org.junit.After;

View File

@ -64,10 +64,10 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@ -159,7 +159,7 @@ public class TestYARNRunner extends TestCase {
ApplicationReport report = mock(ApplicationReport.class);
when(report.getApplicationId()).thenReturn(appId);
when(report.getDiagnostics()).thenReturn(failString);
when(report.getState()).thenReturn(ApplicationState.FAILED);
when(report.getYarnApplicationState()).thenReturn(YarnApplicationState.FAILED);
when(resourceMgrDelegate.getApplicationReport(appId)).thenReturn(report);
Credentials credentials = new Credentials();
File jobxml = new File(testWorkDir, "job.xml");

View File

@ -22,6 +22,7 @@ import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Stable;
import org.apache.hadoop.yarn.api.AMRMProtocol;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
/**
* <p>The finalization request sent by the <code>ApplicationMaster</code> to
@ -72,15 +73,15 @@ public interface FinishApplicationMasterRequest {
*/
@Public
@Stable
String getFinalState();
FinalApplicationStatus getFinalApplicationStatus();
/**
* Set <em>final state</em> of the <code>ApplicationMaster</code>
* @param finalState <em>final state</em> of the <code>ApplicationMaster</code>
* Set the <em>finish state</em> of the <code>ApplicationMaster</code>
* @param finishState <em>finish state</em> of the <code>ApplicationMaster</code>
*/
@Public
@Stable
void setFinalState(String finalState);
void setFinishApplicationStatus(FinalApplicationStatus finishState);
/**
* Get <em>diagnostic information</em> on application failure.

View File

@ -21,13 +21,14 @@ package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProtoOrBuilder;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProtoOrBuilder;
import org.apache.hadoop.yarn.util.ProtoUtils;
public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplicationMasterRequestProto> implements FinishApplicationMasterRequest {
@ -75,7 +76,6 @@ public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplic
viaProto = false;
}
@Override
public ApplicationAttemptId getApplicationAttemptId() {
FinishApplicationMasterRequestProtoOrBuilder p = viaProto ? proto : builder;
@ -122,15 +122,22 @@ public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplic
}
@Override
public String getFinalState() {
public FinalApplicationStatus getFinalApplicationStatus() {
FinishApplicationMasterRequestProtoOrBuilder p = viaProto ? proto : builder;
return p.getFinalState();
if (!p.hasFinalApplicationStatus()) {
return null;
}
return convertFromProtoFormat(p.getFinalApplicationStatus());
}
@Override
public void setFinalState(String state) {
public void setFinishApplicationStatus(FinalApplicationStatus finishState) {
maybeInitBuilder();
builder.setFinalState(state);
if (finishState == null) {
builder.clearFinalApplicationStatus();
return;
}
builder.setFinalApplicationStatus(convertToProtoFormat(finishState));
}
private ApplicationAttemptIdPBImpl convertFromProtoFormat(ApplicationAttemptIdProto p) {
@ -141,6 +148,13 @@ public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplic
return ((ApplicationAttemptIdPBImpl)t).getProto();
}
private FinalApplicationStatus convertFromProtoFormat(FinalApplicationStatusProto s) {
return ProtoUtils.convertFromProtoFormat(s);
}
private FinalApplicationStatusProto convertToProtoFormat(FinalApplicationStatus s) {
return ProtoUtils.convertToProtoFormat(s);
}
}

View File

@ -23,13 +23,11 @@ import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRespo
import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl;
import org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder;
public class RegisterApplicationMasterResponsePBImpl
extends ProtoBase<RegisterApplicationMasterResponseProto>
implements RegisterApplicationMasterResponse {

View File

@ -42,8 +42,8 @@ public interface ApplicationMaster {
ApplicationStatus getStatus();
void setStatus(ApplicationStatus status);
ApplicationState getState();
void setState(ApplicationState state);
YarnApplicationState getState();
void setState(YarnApplicationState state);
String getClientToken();
void setClientToken(String clientToken);

View File

@ -36,7 +36,7 @@ import org.apache.hadoop.yarn.api.ClientRMProtocol;
* <li>Host on which the <code>ApplicationMaster</code>is running.</li>
* <li>RPC port of the <code>ApplicationMaster</code>.</li>
* <li>Tracking URL.</li>
* <li>{@link ApplicationState} of the application.</li>
* <li>{@link YarnApplicationState} of the application.</li>
* <li>Diagnostic information in case of errors.</li>
* <li>Start time of the application.</li>
* <li>Client token of the application (if security is enabled).</li>
@ -138,16 +138,16 @@ public interface ApplicationReport {
void setClientToken(String clientToken);
/**
* Get the <code>ApplicationState</code> of the application.
* @return <code>ApplicationState</code> of the application
* Get the <code>YarnApplicationState</code> of the application.
* @return <code>YarnApplicationState</code> of the application
*/
@Public
@Stable
ApplicationState getState();
YarnApplicationState getYarnApplicationState();
@Private
@Unstable
void setState(ApplicationState state);
void setYarnApplicationState(YarnApplicationState state);
/**
* Get the <em>diagnositic information</em> of the application in case of
@ -198,4 +198,17 @@ public interface ApplicationReport {
@Private
@Unstable
void setFinishTime(long finishTime);
/**
* Get the <em>final finish status</em> of the application.
*/
@Public
@Stable
FinalApplicationStatus getFinalApplicationStatus();
@Private
@Unstable
void setFinalApplicationStatus(FinalApplicationStatus finishState);
}

View File

@ -0,0 +1,42 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.api.records;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Stable;
/**
* Enumeration of various final states of an <code>Application</code>.
*/
@Public
@Stable
public enum FinalApplicationStatus {
/** Undefined state when either the application has not yet finished */
UNDEFINED,
/** Application which finished successfully. */
SUCCEEDED,
/** Application which failed. */
FAILED,
/** Application which was terminated by a user or admin. */
KILLED
}

View File

@ -22,11 +22,11 @@ import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability.Stable;
/**
* Ennumeration of various states of an <code>Application</code>.
* Ennumeration of various states of an <code>ApplicationMaster</code>.
*/
@Public
@Stable
public enum ApplicationState {
public enum YarnApplicationState {
/** Application which was just created. */
NEW,
@ -36,8 +36,8 @@ public enum ApplicationState {
/** Application which is currently running. */
RUNNING,
/** Application which completed successfully. */
SUCCEEDED,
/** Application which finished successfully. */
FINISHED,
/** Application which failed. */
FAILED,

View File

@ -19,22 +19,19 @@
package org.apache.hadoop.yarn.api.records.impl.pb;
import java.util.List;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationMaster;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationStatus;
import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationMasterProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationMasterProtoOrBuilder;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationStateProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationStatusProto;
import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto;
import org.apache.hadoop.yarn.util.ProtoUtils;
public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> implements ApplicationMaster {
ApplicationMasterProto proto = ApplicationMasterProto.getDefaultInstance();
ApplicationMasterProto.Builder builder = null;
@ -89,7 +86,7 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
@Override
public ApplicationState getState() {
public YarnApplicationState getState() {
ApplicationMasterProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasState()) {
return null;
@ -98,7 +95,7 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
}
@Override
public void setState(ApplicationState state) {
public void setState(YarnApplicationState state) {
maybeInitBuilder();
if (state == null) {
builder.clearState();
@ -250,11 +247,11 @@ public class ApplicationMasterPBImpl extends ProtoBase<ApplicationMasterProto> i
builder.setDiagnostics(diagnostics);
}
private ApplicationStateProto convertToProtoFormat(ApplicationState e) {
private YarnApplicationStateProto convertToProtoFormat(YarnApplicationState e) {
return ProtoUtils.convertToProtoFormat(e);
}
private ApplicationState convertFromProtoFormat(ApplicationStateProto e) {
private YarnApplicationState convertFromProtoFormat(YarnApplicationStateProto e) {
return ProtoUtils.convertFromProtoFormat(e);
}

View File

@ -18,14 +18,16 @@
package org.apache.hadoop.yarn.api.records.impl.pb;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.ProtoBase;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationStateProto;
import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto;
import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto;
import org.apache.hadoop.yarn.util.ProtoUtils;
public class ApplicationReportPBImpl extends ProtoBase<ApplicationReportProto>
@ -87,12 +89,12 @@ implements ApplicationReport {
}
@Override
public ApplicationState getState() {
public YarnApplicationState getYarnApplicationState() {
ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasState()) {
if (!p.hasYarnApplicationState()) {
return null;
}
return convertFromProtoFormat(p.getState());
return convertFromProtoFormat(p.getYarnApplicationState());
}
@Override
@ -138,6 +140,27 @@ implements ApplicationReport {
return p.getDiagnostics();
}
@Override
public long getStartTime() {
ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
return p.getStartTime();
}
@Override
public long getFinishTime() {
ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
return p.getFinishTime();
}
@Override
public FinalApplicationStatus getFinalApplicationStatus() {
ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
if (!p.hasFinalApplicationStatus()) {
return null;
}
return convertFromProtoFormat(p.getFinalApplicationStatus());
}
@Override
public void setApplicationId(ApplicationId applicationId) {
maybeInitBuilder();
@ -177,13 +200,13 @@ implements ApplicationReport {
}
@Override
public void setState(ApplicationState state) {
public void setYarnApplicationState(YarnApplicationState state) {
maybeInitBuilder();
if (state == null) {
builder.clearState();
builder.clearYarnApplicationState();
return;
}
builder.setState(convertToProtoFormat(state));
builder.setYarnApplicationState(convertToProtoFormat(state));
}
@Override
@ -232,38 +255,36 @@ implements ApplicationReport {
builder.setDiagnostics(diagnostics);
}
@Override
public ApplicationReportProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
@Override
public long getStartTime() {
ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
return p.getStartTime();
}
@Override
public void setStartTime(long startTime) {
maybeInitBuilder();
builder.setStartTime(startTime);
}
@Override
public long getFinishTime() {
ApplicationReportProtoOrBuilder p = viaProto ? proto : builder;
return p.getFinishTime();
}
@Override
public void setFinishTime(long finishTime) {
maybeInitBuilder();
builder.setFinishTime(finishTime);
}
@Override
public void setFinalApplicationStatus(FinalApplicationStatus finishState) {
maybeInitBuilder();
if (finishState == null) {
builder.clearFinalApplicationStatus();
return;
}
builder.setFinalApplicationStatus(convertToProtoFormat(finishState));
}
@Override
public ApplicationReportProto getProto() {
mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
private void mergeLocalToBuilder() {
if (this.applicationId != null
&& !((ApplicationIdPBImpl) this.applicationId).getProto().equals(
@ -291,16 +312,25 @@ implements ApplicationReport {
return ((ApplicationIdPBImpl) t).getProto();
}
private ApplicationState convertFromProtoFormat(ApplicationStateProto s) {
return ProtoUtils.convertFromProtoFormat(s);
}
private ApplicationStateProto convertToProtoFormat(ApplicationState s) {
return ProtoUtils.convertToProtoFormat(s);
}
private ApplicationIdPBImpl convertFromProtoFormat(
ApplicationIdProto applicationId) {
return new ApplicationIdPBImpl(applicationId);
}
private YarnApplicationState convertFromProtoFormat(YarnApplicationStateProto s) {
return ProtoUtils.convertFromProtoFormat(s);
}
private YarnApplicationStateProto convertToProtoFormat(YarnApplicationState s) {
return ProtoUtils.convertToProtoFormat(s);
}
private FinalApplicationStatus convertFromProtoFormat(FinalApplicationStatusProto s) {
return ProtoUtils.convertFromProtoFormat(s);
}
private FinalApplicationStatusProto convertToProtoFormat(FinalApplicationStatus s) {
return ProtoUtils.convertToProtoFormat(s);
}
}

View File

@ -20,18 +20,20 @@ package org.apache.hadoop.yarn.util;
import java.nio.ByteBuffer;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueState;
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationStateProto;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto;
import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto;
import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto;
import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto;
import org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto;
import org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto;
import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto;
import com.google.protobuf.ByteString;
@ -51,13 +53,24 @@ public class ProtoUtils {
/*
* ApplicationState
* YarnApplicationState
*/
public static ApplicationStateProto convertToProtoFormat(ApplicationState e) {
return ApplicationStateProto.valueOf(e.name());
public static YarnApplicationStateProto convertToProtoFormat(YarnApplicationState e) {
return YarnApplicationStateProto.valueOf(e.name());
}
public static ApplicationState convertFromProtoFormat(ApplicationStateProto e) {
return ApplicationState.valueOf(e.name());
public static YarnApplicationState convertFromProtoFormat(YarnApplicationStateProto e) {
return YarnApplicationState.valueOf(e.name());
}
/*
* FinalApplicationStatus
*/
private static String FINAL_APPLICATION_STATUS_PREFIX = "APP_";
public static FinalApplicationStatusProto convertToProtoFormat(FinalApplicationStatus e) {
return FinalApplicationStatusProto.valueOf(FINAL_APPLICATION_STATUS_PREFIX + e.name());
}
public static FinalApplicationStatus convertFromProtoFormat(FinalApplicationStatusProto e) {
return FinalApplicationStatus.valueOf(e.name().replace(FINAL_APPLICATION_STATUS_PREFIX, ""));
}
/*

View File

@ -76,14 +76,20 @@ message ContainerProto {
optional ContainerStatusProto container_status = 8;
}
enum ApplicationStateProto {
enum YarnApplicationStateProto {
NEW = 1;
SUBMITTED = 2;
RUNNING = 3;
RESTARTING = 4;
SUCCEEDED = 5;
FAILED = 6;
KILLED = 7;
FINISHED = 4;
FAILED = 5;
KILLED = 6;
}
enum FinalApplicationStatusProto {
APP_UNDEFINED = 0;
APP_SUCCEEDED = 1;
APP_FAILED = 2;
APP_KILLED = 3;
}
message ApplicationStatusProto {
@ -98,7 +104,7 @@ message ApplicationMasterProto {
optional int32 rpc_port = 3;
optional string trackingUrl = 4;
optional ApplicationStatusProto status = 5;
optional ApplicationStateProto state = 6;
optional YarnApplicationStateProto state = 6;
optional string client_token = 7;
optional int32 containerCount = 8;
optional int32 amFailCount = 9;
@ -140,12 +146,13 @@ message ApplicationReportProto {
optional int32 rpc_port = 6;
optional string client_token = 7;
optional ApplicationStatusProto status = 8;
optional ApplicationStateProto state = 9;
optional YarnApplicationStateProto yarn_application_state = 9;
optional ContainerProto masterContainer = 10;
optional string trackingUrl = 11;
optional string diagnostics = 12 [default = "N/A"];
optional int64 startTime = 13;
optional int64 finishTime = 14;
optional FinalApplicationStatusProto final_application_status = 15;
}
message NodeIdProto {

View File

@ -42,7 +42,7 @@ message FinishApplicationMasterRequestProto {
optional ApplicationAttemptIdProto application_attempt_id = 1;
optional string diagnostics = 2;
optional string tracking_url = 3;
optional string final_state = 4;
optional FinalApplicationStatusProto final_application_status = 4;
}
message FinishApplicationMasterResponseProto {

View File

@ -24,9 +24,10 @@ import java.util.List;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerState;
@ -242,8 +243,9 @@ public class BuilderUtils {
public static ApplicationReport newApplicationReport(
ApplicationId applicationId, String user, String queue, String name,
String host, int rpcPort, String clientToken, ApplicationState state,
String diagnostics, String url, long startTime, long finishTime) {
String host, int rpcPort, String clientToken, YarnApplicationState state,
String diagnostics, String url, long startTime, long finishTime,
FinalApplicationStatus finalStatus) {
ApplicationReport report = recordFactory
.newRecordInstance(ApplicationReport.class);
report.setApplicationId(applicationId);
@ -253,11 +255,12 @@ public class BuilderUtils {
report.setHost(host);
report.setRpcPort(rpcPort);
report.setClientToken(clientToken);
report.setState(state);
report.setYarnApplicationState(state);
report.setDiagnostics(diagnostics);
report.setTrackingUrl(url);
report.setStartTime(startTime);
report.setFinishTime(finishTime);
report.setFinalApplicationStatus(finalStatus);
return report;
}

View File

@ -33,7 +33,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@ -181,4 +183,5 @@ public class ConverterUtils {
+ applicationAttmeptIdStr, n);
}
}
}

View File

@ -24,7 +24,8 @@ import java.util.List;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.util.Records;
import com.google.common.collect.Iterators;
@ -39,8 +40,8 @@ public class MockApps {
"I18nApp<☯>");
static final Iterator<String> USERS = Iterators.cycle("dorothy", "tinman",
"scarecrow", "glinda", "nikko", "toto", "winkie", "zeke", "gulch");
static final Iterator<ApplicationState> STATES = Iterators.cycle(
ApplicationState.values());
static final Iterator<YarnApplicationState> STATES = Iterators.cycle(
YarnApplicationState.values());
static final Iterator<String> QUEUES = Iterators.cycle("a.a1", "a.a2",
"b.b1", "b.b2", "b.b3", "c.c1.c11", "c.c1.c12", "c.c1.c13",
"c.c2", "c.c3", "c.c4");
@ -74,18 +75,19 @@ public class MockApps {
public static ApplicationReport newApp(int i) {
final ApplicationId id = newAppID(i);
final ApplicationState state = newAppState();
final YarnApplicationState state = newAppState();
final String user = newUserName();
final String name = newAppName();
final String queue = newQueue();
final FinalApplicationStatus finishState = FinalApplicationStatus.UNDEFINED;
return new ApplicationReport() {
@Override public ApplicationId getApplicationId() { return id; }
@Override public String getUser() { return user; }
@Override public String getName() { return name; }
@Override public ApplicationState getState() { return state; }
@Override public YarnApplicationState getYarnApplicationState() { return state; }
@Override public String getQueue() { return queue; }
@Override public String getTrackingUrl() { return ""; }
@Override
@Override public FinalApplicationStatus getFinalApplicationStatus() { return finishState; }
public void setApplicationId(ApplicationId applicationId) {
// TODO Auto-generated method stub
@ -106,7 +108,7 @@ public class MockApps {
}
@Override
public void setState(ApplicationState state) {
public void setYarnApplicationState(YarnApplicationState state) {
// TODO Auto-generated method stub
}
@ -155,7 +157,6 @@ public class MockApps {
// TODO Auto-generated method stub
}
@Override
public long getStartTime() {
// TODO Auto-generated method stub
@ -177,6 +178,10 @@ public class MockApps {
// TODO Auto-generated method stub
}
@Override
public void setFinalApplicationStatus(FinalApplicationStatus finishState) {
// TODO Auto-generated method stub
}
};
}
@ -194,9 +199,10 @@ public class MockApps {
return id;
}
public static ApplicationState newAppState() {
public static YarnApplicationState newAppState() {
synchronized(STATES) {
return STATES.next();
}
}
}

View File

@ -176,7 +176,7 @@ public class ApplicationMasterService extends AbstractService implements
rmContext.getDispatcher().getEventHandler().handle(
new RMAppAttemptUnregistrationEvent(applicationAttemptId, request
.getTrackingUrl(), request.getFinalState(), request
.getTrackingUrl(), request.getFinalApplicationStatus(), request
.getDiagnostics()));
FinishApplicationMasterResponse response = recordFactory

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.yarn.server.resourcemanager.rmapp;
import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
@ -127,10 +128,10 @@ public interface RMApp extends EventHandler<RMAppEvent> {
StringBuilder getDiagnostics();
/**
* The final state of the AM when unregistering as in
* {@link FinishApplicationMasterRequest#setFinalState(String)}.
* @return the final state of the AM as set in
* {@link FinishApplicationMasterRequest#setFinalState(String)}.
* The final finish state of the AM when unregistering as in
* {@link FinishApplicationMasterRequest#setFinishApplicationStatus(FinalApplicationStatus)}.
* @return the final finish state of the AM as set in
* {@link FinishApplicationMasterRequest#setFinishApplicationStatus(FinalApplicationStatus)}.
*/
String getAMFinalState();
FinalApplicationStatus getFinalApplicationStatus();
}

View File

@ -32,9 +32,10 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.YarnException;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@ -194,13 +195,18 @@ public class RMAppImpl implements RMApp {
}
@Override
public String getAMFinalState() {
public FinalApplicationStatus getFinalApplicationStatus() {
this.readLock.lock();
try {
if (currentAttempt != null) {
return currentAttempt.getAMFinalState();
// finish state is obtained based on the state machine's current state
// as a fall-back in case the application has not been unregistered
// ( or if the app never unregistered itself )
// when the report is requested
if (currentAttempt != null
&& currentAttempt.getFinalApplicationStatus() != null) {
return currentAttempt.getFinalApplicationStatus();
}
return "UNKNOWN";
return createFinalApplicationStatus(this.stateMachine.getCurrentState());
} finally {
this.readLock.unlock();
}
@ -273,25 +279,43 @@ public class RMAppImpl implements RMApp {
return this.appStore;
}
private ApplicationState createApplicationState(RMAppState rmAppState) {
private YarnApplicationState createApplicationState(RMAppState rmAppState) {
switch(rmAppState) {
case NEW:
return ApplicationState.NEW;
return YarnApplicationState.NEW;
case SUBMITTED:
case ACCEPTED:
return ApplicationState.SUBMITTED;
return YarnApplicationState.SUBMITTED;
case RUNNING:
return ApplicationState.RUNNING;
return YarnApplicationState.RUNNING;
case FINISHED:
return ApplicationState.SUCCEEDED;
return YarnApplicationState.FINISHED;
case KILLED:
return ApplicationState.KILLED;
return YarnApplicationState.KILLED;
case FAILED:
return ApplicationState.FAILED;
return YarnApplicationState.FAILED;
}
throw new YarnException("Unknown state passed!");
}
private FinalApplicationStatus createFinalApplicationStatus(RMAppState state) {
switch(state) {
case NEW:
case SUBMITTED:
case ACCEPTED:
case RUNNING:
return FinalApplicationStatus.UNDEFINED;
// finished without a proper final state is the same as failed
case FINISHED:
case FAILED:
return FinalApplicationStatus.FAILED;
case KILLED:
return FinalApplicationStatus.KILLED;
}
throw new YarnException("Unknown state passed!");
}
@Override
public ApplicationReport createAndGetApplicationReport() {
this.readLock.lock();
@ -301,6 +325,7 @@ public class RMAppImpl implements RMApp {
String trackingUrl = "N/A";
String host = "N/A";
int rpcPort = -1;
FinalApplicationStatus finishState = getFinalApplicationStatus();
if (this.currentAttempt != null) {
trackingUrl = this.currentAttempt.getTrackingUrl();
clientToken = this.currentAttempt.getClientToken();
@ -311,7 +336,7 @@ public class RMAppImpl implements RMApp {
this.queue, this.name, host, rpcPort, clientToken,
createApplicationState(this.stateMachine.getCurrentState()),
this.diagnostics.toString(), trackingUrl,
this.startTime, this.finishTime);
this.startTime, this.finishTime, finishState);
} finally {
this.readLock.unlock();
}

View File

@ -22,6 +22,7 @@ import java.util.List;
import java.util.Set;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
@ -88,10 +89,11 @@ public interface RMAppAttempt extends EventHandler<RMAppAttemptEvent> {
float getProgress();
/**
* The final state set by the AM.
* @return the final state that is set by the AM when unregistering itself.
* The final status set by the AM.
* @return the final status that is set by the AM when unregistering itself. Can return a null
* if the AM has not unregistered itself.
*/
String getAMFinalState();
FinalApplicationStatus getFinalApplicationStatus();
/**
* Nodes on which the containers for this {@link RMAppAttempt} ran.

View File

@ -31,6 +31,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.Container;
@ -109,7 +110,9 @@ public class RMAppAttemptImpl implements RMAppAttempt {
private String host = "N/A";
private int rpcPort;
private String trackingUrl = "N/A";
private String finalState = "N/A";
// Set to null initially. Will eventually get set
// if an RMAppAttemptUnregistrationEvent occurs
private FinalApplicationStatus finalStatus = null;
private final StringBuilder diagnostics = new StringBuilder();
private static final StateMachineFactory<RMAppAttemptImpl,
@ -268,10 +271,10 @@ public class RMAppAttemptImpl implements RMAppAttempt {
}
@Override
public String getAMFinalState() {
public FinalApplicationStatus getFinalApplicationStatus() {
this.readLock.lock();
try {
return this.finalState;
return this.finalStatus;
} finally {
this.readLock.unlock();
}
@ -730,10 +733,9 @@ public class RMAppAttemptImpl implements RMAppAttempt {
RMAppAttemptUnregistrationEvent unregisterEvent
= (RMAppAttemptUnregistrationEvent) event;
unregisterEvent.getFinalState();
appAttempt.diagnostics.append(unregisterEvent.getDiagnostics());
appAttempt.trackingUrl = unregisterEvent.getTrackingUrl();
appAttempt.finalState = unregisterEvent.getFinalState();
appAttempt.finalStatus = unregisterEvent.getFinalApplicationStatus();
// Tell the app and the scheduler
super.transition(appAttempt, event);

View File

@ -19,20 +19,21 @@
package org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType;
public class RMAppAttemptUnregistrationEvent extends RMAppAttemptEvent {
private final String trackingUrl;
private final String finalState;
private final FinalApplicationStatus finalStatus;
private final String diagnostics;
public RMAppAttemptUnregistrationEvent(ApplicationAttemptId appAttemptId,
String trackingUrl, String finalState, String diagnostics) {
String trackingUrl, FinalApplicationStatus finalStatus, String diagnostics) {
super(appAttemptId, RMAppAttemptEventType.UNREGISTERED);
this.trackingUrl = trackingUrl;
this.finalState = finalState;
this.finalStatus = finalStatus;
this.diagnostics = diagnostics;
}
@ -40,8 +41,8 @@ public class RMAppAttemptUnregistrationEvent extends RMAppAttemptEvent {
return this.trackingUrl;
}
public String getFinalState() {
return this.finalState;
public FinalApplicationStatus getFinalApplicationStatus() {
return this.finalStatus;
}
public String getDiagnostics() {

View File

@ -50,6 +50,7 @@ class AppsBlock extends HtmlBlock {
th(".name", "Name").
th(".queue", "Queue").
th(".state", "State").
th(".finalstatus", "FinalStatus").
th(".progress", "Progress").
th(".ui", "Tracking UI").
th(".note", "Note")._()._().
@ -70,8 +71,8 @@ class AppsBlock extends HtmlBlock {
td(app.getUser().toString()).
td(app.getName().toString()).
td(app.getQueue().toString()).
td(app.getState() == RMAppState.FINISHED ? app.getAMFinalState() :
app.getState().toString()).
td(app.getState().toString()).
td(app.getFinalApplicationStatus().toString()).
td().
br().$title(percent)._(). // for sorting
div(_PROGRESSBAR).

View File

@ -89,9 +89,8 @@ public class RmController extends Controller {
ResponseInfo info = info("Application Overview").
_("User:", app.getUser()).
_("Name:", app.getName()).
_("State:", (app.getState() == RMAppState.FINISHED ?
app.getAMFinalState() : app.getState().toString())
).
_("State:", app.getState().toString()).
_("FinalStatus:", app.getFinalApplicationStatus().toString()).
_("Started:", Times.format(app.getStartTime())).
_("Elapsed:", StringUtils.formatTime(
Times.elapsed(app.getStartTime(), app.getFinishTime()))).

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest
import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
import org.apache.hadoop.yarn.api.records.AMResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
@ -100,7 +100,7 @@ public class MockAM {
public AMResponse allocate(
String host, int memory, int numContainers,
List<ContainerId> releases) throws Exception {
List reqs = createReq(new String[]{host}, memory, 1, numContainers);
List<ResourceRequest> reqs = createReq(new String[]{host}, memory, 1, numContainers);
return allocate(reqs, releases);
}
@ -151,7 +151,7 @@ public class MockAM {
FinishApplicationMasterRequest req = Records.newRecord(FinishApplicationMasterRequest.class);
req.setAppAttemptId(attemptId);
req.setDiagnostics("");
req.setFinalState("");
req.setFinishApplicationStatus(FinalApplicationStatus.SUCCEEDED);
req.setTrackingUrl("");
amRMProtocol.finishApplicationMaster(req);
}

View File

@ -22,10 +22,11 @@ import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.yarn.MockApps;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationMaster;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationStatus;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
@ -69,7 +70,7 @@ public abstract class MockAsm extends MockApps {
}
@Override
public ApplicationState getState() {
public YarnApplicationState getState() {
throw new UnsupportedOperationException("Not supported yet.");
}
@ -119,7 +120,7 @@ public abstract class MockAsm extends MockApps {
}
@Override
public void setState(ApplicationState state) {
public void setState(YarnApplicationState state) {
throw new UnsupportedOperationException("Not supported yet.");
}
@ -211,7 +212,7 @@ public abstract class MockAsm extends MockApps {
}
@Override
public String getAMFinalState() {
public FinalApplicationStatus getFinalApplicationStatus() {
throw new UnsupportedOperationException("Not supported yet.");
}
}
@ -274,6 +275,11 @@ public abstract class MockAsm extends MockApps {
public float getProgress() {
return (float)Math.random();
}
@Override
public FinalApplicationStatus getFinalApplicationStatus() {
return FinalApplicationStatus.UNDEFINED;
}
};
}

View File

@ -32,7 +32,7 @@ import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationMaster;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.Priority;

View File

@ -33,7 +33,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationMaster;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;

View File

@ -26,7 +26,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.event.EventHandler;

View File

@ -26,7 +26,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.EventHandler;

View File

@ -28,7 +28,7 @@ import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationMaster;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.server.resourcemanager.rmapp;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.MockApps;
@ -167,8 +168,8 @@ public class MockRMApp implements RMApp {
}
@Override
public String getAMFinalState() {
return "UNKNOWN";
public FinalApplicationStatus getFinalApplicationStatus() {
return FinalApplicationStatus.UNDEFINED;
};
}

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.MockApps;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
@ -192,10 +193,15 @@ public class TestRMAppTransitions {
}
private static void assertAppState(RMAppState state, RMApp application) {
Assert.assertEquals("application state should have been" + state,
Assert.assertEquals("application state should have been " + state,
state, application.getState());
}
private static void assertFinalAppStatus(FinalApplicationStatus status, RMApp application) {
Assert.assertEquals("Final application status should have been " + status,
status, application.getFinalApplicationStatus());
}
// test to make sure times are set when app finishes
private static void assertTimesAtFinish(RMApp application) {
assertStartTimeSet(application);
@ -208,6 +214,7 @@ public class TestRMAppTransitions {
private static void assertKilled(RMApp application) {
assertTimesAtFinish(application);
assertAppState(RMAppState.KILLED, application);
assertFinalAppStatus(FinalApplicationStatus.KILLED, application);
StringBuilder diag = application.getDiagnostics();
Assert.assertEquals("application diagnostics is not correct",
"Application killed by user.", diag.toString());
@ -224,6 +231,7 @@ public class TestRMAppTransitions {
private static void assertFailed(RMApp application, String regex) {
assertTimesAtFinish(application);
assertAppState(RMAppState.FAILED, application);
assertFinalAppStatus(FinalApplicationStatus.FAILED, application);
StringBuilder diag = application.getDiagnostics();
Assert.assertTrue("application diagnostics is not correct",
diag.toString().matches(regex));
@ -261,6 +269,7 @@ public class TestRMAppTransitions {
application.handle(event);
assertStartTimeSet(application);
assertAppState(RMAppState.RUNNING, application);
assertFinalAppStatus(FinalApplicationStatus.UNDEFINED, application);
return application;
}
@ -273,6 +282,8 @@ public class TestRMAppTransitions {
application.handle(event);
assertAppState(RMAppState.FINISHED, application);
assertTimesAtFinish(application);
// finished without a proper unregister implies failed
assertFinalAppStatus(FinalApplicationStatus.FAILED, application);
return application;
}

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
@ -52,7 +53,9 @@ import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppFailedAttemptEve
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppRejectedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptContainerAllocatedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptLaunchFailedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptRegistrationEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptRejectedEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptUnregistrationEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.ContainerAllocationExpirer;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
@ -199,6 +202,7 @@ public class TestRMAppAttemptTransitions {
assertNull(applicationAttempt.getMasterContainer());
assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
assertEquals(0, applicationAttempt.getRanNodes().size());
assertNull(applicationAttempt.getFinalApplicationStatus());
}
/**
@ -212,6 +216,7 @@ public class TestRMAppAttemptTransitions {
assertNull(applicationAttempt.getMasterContainer());
assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
assertEquals(0, applicationAttempt.getRanNodes().size());
assertNull(applicationAttempt.getFinalApplicationStatus());
// Check events
verify(masterService).
@ -230,6 +235,7 @@ public class TestRMAppAttemptTransitions {
assertNull(applicationAttempt.getMasterContainer());
assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
assertEquals(0, applicationAttempt.getRanNodes().size());
assertNull(applicationAttempt.getFinalApplicationStatus());
// Check events
verify(application).handle(any(RMAppRejectedEvent.class));
@ -247,6 +253,7 @@ public class TestRMAppAttemptTransitions {
assertEquals(amContainer, applicationAttempt.getMasterContainer());
assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
assertEquals(0, applicationAttempt.getRanNodes().size());
assertNull(applicationAttempt.getFinalApplicationStatus());
}
/**
@ -259,6 +266,7 @@ public class TestRMAppAttemptTransitions {
assertNull(applicationAttempt.getMasterContainer());
assertEquals(0.0, (double)applicationAttempt.getProgress(), 0.0001);
assertEquals(0, applicationAttempt.getRanNodes().size());
assertNull(applicationAttempt.getFinalApplicationStatus());
// Check events
verify(application).handle(any(RMAppEvent.class));
@ -299,6 +307,49 @@ public class TestRMAppAttemptTransitions {
verify(application, times(2)).handle(any(RMAppFailedAttemptEvent.class));
}
/**
* {@link RMAppAttemptState#LAUNCH}
*/
private void testAppAttemptLaunchedState(Container container) {
assertEquals(RMAppAttemptState.LAUNCHED,
applicationAttempt.getAppAttemptState());
assertEquals(container, applicationAttempt.getMasterContainer());
// TODO - need to add more checks relevant to this state
}
/**
* {@link RMAppAttemptState#RUNNING}
*/
private void testAppAttemptRunningState(Container container,
String host, int rpcPort, String trackingUrl) {
assertEquals(RMAppAttemptState.RUNNING,
applicationAttempt.getAppAttemptState());
assertEquals(container, applicationAttempt.getMasterContainer());
assertEquals(host, applicationAttempt.getHost());
assertEquals(rpcPort, applicationAttempt.getRpcPort());
assertEquals(trackingUrl, applicationAttempt.getTrackingUrl());
// TODO - need to add more checks relevant to this state
}
/**
* {@link RMAppAttemptState#FINISHED}
*/
private void testAppAttemptFinishedState(Container container,
FinalApplicationStatus finalStatus,
String trackingUrl,
String diagnostics) {
assertEquals(RMAppAttemptState.FINISHED,
applicationAttempt.getAppAttemptState());
assertEquals(diagnostics, applicationAttempt.getDiagnostics());
assertEquals(trackingUrl, applicationAttempt.getTrackingUrl());
assertEquals(0,applicationAttempt.getJustFinishedContainers().size());
assertEquals(container, applicationAttempt.getMasterContainer());
assertEquals(finalStatus, applicationAttempt.getFinalApplicationStatus());
}
private void submitApplicationAttempt() {
ApplicationAttemptId appAttemptId = applicationAttempt.getAppAttemptId();
applicationAttempt.handle(
@ -340,6 +391,27 @@ public class TestRMAppAttemptTransitions {
return container;
}
private void launchApplicationAttempt(Container container) {
applicationAttempt.handle(
new RMAppAttemptEvent(applicationAttempt.getAppAttemptId(),
RMAppAttemptEventType.LAUNCHED));
testAppAttemptLaunchedState(container);
}
private void runApplicationAttempt(Container container,
String host,
int rpcPort,
String trackingUrl) {
applicationAttempt.handle(
new RMAppAttemptRegistrationEvent(
applicationAttempt.getAppAttemptId(),
host, rpcPort, trackingUrl));
testAppAttemptRunningState(container, host, rpcPort, trackingUrl);
}
@Test
public void testNewToKilled() {
applicationAttempt.handle(
@ -400,4 +472,37 @@ public class TestRMAppAttemptTransitions {
testAppAttemptFailedState(amContainer, diagnostics);
}
@Test
public void testUnregisterToKilledFinish() {
Container amContainer = allocateApplicationAttempt();
launchApplicationAttempt(amContainer);
runApplicationAttempt(amContainer, "host", 9999, "oldtrackingurl");
String trackingUrl = "newtrackingurl";
String diagnostics = "Killed by user";
FinalApplicationStatus finalStatus = FinalApplicationStatus.KILLED;
applicationAttempt.handle(
new RMAppAttemptUnregistrationEvent(
applicationAttempt.getAppAttemptId(),
trackingUrl, finalStatus, diagnostics));
testAppAttemptFinishedState(amContainer, finalStatus,
trackingUrl, diagnostics);
}
@Test
public void testUnregisterToSuccessfulFinish() {
Container amContainer = allocateApplicationAttempt();
launchApplicationAttempt(amContainer);
runApplicationAttempt(amContainer, "host", 9999, "oldtrackingurl");
String trackingUrl = "mytrackingurl";
String diagnostics = "Successful";
FinalApplicationStatus finalStatus = FinalApplicationStatus.SUCCEEDED;
applicationAttempt.handle(
new RMAppAttemptUnregistrationEvent(
applicationAttempt.getAppAttemptId(),
trackingUrl, finalStatus, diagnostics));
testAppAttemptFinishedState(amContainer, finalStatus,
trackingUrl, diagnostics);
}
}

View File

@ -25,7 +25,7 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import static org.apache.hadoop.test.MetricsAsserts.*;
import static org.apache.hadoop.test.MockitoMaker.*;
import org.apache.hadoop.yarn.api.records.ApplicationState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.server.resourcemanager.resource.Resource;
import org.apache.hadoop.yarn.server.resourcemanager.resource.Resources;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;