MAPREDUCE-5729. mapred job -list throws NPE (kasha)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1559811 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
602f71a8da
commit
c5a241f1dd
|
@ -280,6 +280,8 @@ Release 2.4.0 - UNRELEASED
|
||||||
MAPREDUCE-5724. JobHistoryServer does not start if HDFS is not running.
|
MAPREDUCE-5724. JobHistoryServer does not start if HDFS is not running.
|
||||||
(tucu)
|
(tucu)
|
||||||
|
|
||||||
|
MAPREDUCE-5729. mapred job -list throws NPE (kasha)
|
||||||
|
|
||||||
Release 2.3.0 - UNRELEASED
|
Release 2.3.0 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -43,6 +43,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
|
||||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||||
import org.apache.hadoop.yarn.api.records.QueueACL;
|
import org.apache.hadoop.yarn.api.records.QueueACL;
|
||||||
|
@ -445,11 +446,18 @@ public class TypeConverter {
|
||||||
jobStatus.setStartTime(application.getStartTime());
|
jobStatus.setStartTime(application.getStartTime());
|
||||||
jobStatus.setFinishTime(application.getFinishTime());
|
jobStatus.setFinishTime(application.getFinishTime());
|
||||||
jobStatus.setFailureInfo(application.getDiagnostics());
|
jobStatus.setFailureInfo(application.getDiagnostics());
|
||||||
jobStatus.setNeededMem(application.getApplicationResourceUsageReport().getNeededResources().getMemory());
|
ApplicationResourceUsageReport resourceUsageReport =
|
||||||
jobStatus.setNumReservedSlots(application.getApplicationResourceUsageReport().getNumReservedContainers());
|
application.getApplicationResourceUsageReport();
|
||||||
jobStatus.setNumUsedSlots(application.getApplicationResourceUsageReport().getNumUsedContainers());
|
if (resourceUsageReport != null) {
|
||||||
jobStatus.setReservedMem(application.getApplicationResourceUsageReport().getReservedResources().getMemory());
|
jobStatus.setNeededMem(
|
||||||
jobStatus.setUsedMem(application.getApplicationResourceUsageReport().getUsedResources().getMemory());
|
resourceUsageReport.getNeededResources().getMemory());
|
||||||
|
jobStatus.setNumReservedSlots(
|
||||||
|
resourceUsageReport.getNumReservedContainers());
|
||||||
|
jobStatus.setNumUsedSlots(resourceUsageReport.getNumUsedContainers());
|
||||||
|
jobStatus.setReservedMem(
|
||||||
|
resourceUsageReport.getReservedResources().getMemory());
|
||||||
|
jobStatus.setUsedMem(resourceUsageReport.getUsedResources().getMemory());
|
||||||
|
}
|
||||||
return jobStatus;
|
return jobStatus;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,8 +23,6 @@ import static org.mockito.Mockito.when;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import junit.framework.Assert;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.JobStatus.State;
|
import org.apache.hadoop.mapreduce.JobStatus.State;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
@ -40,6 +38,7 @@ import org.apache.hadoop.yarn.api.records.QueueState;
|
||||||
import org.apache.hadoop.yarn.api.records.Resource;
|
import org.apache.hadoop.yarn.api.records.Resource;
|
||||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||||
import org.apache.hadoop.yarn.util.Records;
|
import org.apache.hadoop.yarn.util.Records;
|
||||||
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
|
@ -112,6 +111,14 @@ public class TestTypeConverter {
|
||||||
when(mockReport.getUser()).thenReturn("dummy-user");
|
when(mockReport.getUser()).thenReturn("dummy-user");
|
||||||
when(mockReport.getQueue()).thenReturn("dummy-queue");
|
when(mockReport.getQueue()).thenReturn("dummy-queue");
|
||||||
String jobFile = "dummy-path/job.xml";
|
String jobFile = "dummy-path/job.xml";
|
||||||
|
|
||||||
|
try {
|
||||||
|
JobStatus status = TypeConverter.fromYarn(mockReport, jobFile);
|
||||||
|
} catch (NullPointerException npe) {
|
||||||
|
Assert.fail("Type converstion from YARN fails for jobs without " +
|
||||||
|
"ApplicationUsageReport");
|
||||||
|
}
|
||||||
|
|
||||||
ApplicationResourceUsageReport appUsageRpt = Records
|
ApplicationResourceUsageReport appUsageRpt = Records
|
||||||
.newRecord(ApplicationResourceUsageReport.class);
|
.newRecord(ApplicationResourceUsageReport.class);
|
||||||
Resource r = Records.newRecord(Resource.class);
|
Resource r = Records.newRecord(Resource.class);
|
||||||
|
|
Loading…
Reference in New Issue