MAPREDUCE-5729. mapred job -list throws NPE (kasha)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1559812 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Karthik Kambatla 2014-01-20 19:30:12 +00:00
parent f1756982de
commit 57adfd42d4
3 changed files with 24 additions and 7 deletions

View File

@ -132,6 +132,8 @@ Release 2.4.0 - UNRELEASED
MAPREDUCE-5724. JobHistoryServer does not start if HDFS is not running. MAPREDUCE-5724. JobHistoryServer does not start if HDFS is not running.
(tucu) (tucu)
MAPREDUCE-5729. mapred job -list throws NPE (kasha)
Release 2.3.0 - UNRELEASED Release 2.3.0 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus; import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.NodeReport; import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.QueueACL; import org.apache.hadoop.yarn.api.records.QueueACL;
@ -445,11 +446,18 @@ public class TypeConverter {
jobStatus.setStartTime(application.getStartTime()); jobStatus.setStartTime(application.getStartTime());
jobStatus.setFinishTime(application.getFinishTime()); jobStatus.setFinishTime(application.getFinishTime());
jobStatus.setFailureInfo(application.getDiagnostics()); jobStatus.setFailureInfo(application.getDiagnostics());
jobStatus.setNeededMem(application.getApplicationResourceUsageReport().getNeededResources().getMemory()); ApplicationResourceUsageReport resourceUsageReport =
jobStatus.setNumReservedSlots(application.getApplicationResourceUsageReport().getNumReservedContainers()); application.getApplicationResourceUsageReport();
jobStatus.setNumUsedSlots(application.getApplicationResourceUsageReport().getNumUsedContainers()); if (resourceUsageReport != null) {
jobStatus.setReservedMem(application.getApplicationResourceUsageReport().getReservedResources().getMemory()); jobStatus.setNeededMem(
jobStatus.setUsedMem(application.getApplicationResourceUsageReport().getUsedResources().getMemory()); resourceUsageReport.getNeededResources().getMemory());
jobStatus.setNumReservedSlots(
resourceUsageReport.getNumReservedContainers());
jobStatus.setNumUsedSlots(resourceUsageReport.getNumUsedContainers());
jobStatus.setReservedMem(
resourceUsageReport.getReservedResources().getMemory());
jobStatus.setUsedMem(resourceUsageReport.getUsedResources().getMemory());
}
return jobStatus; return jobStatus;
} }

View File

@ -23,8 +23,6 @@ import static org.mockito.Mockito.when;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobStatus.State; import org.apache.hadoop.mapreduce.JobStatus.State;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@ -40,6 +38,7 @@ import org.apache.hadoop.yarn.api.records.QueueState;
import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.util.Records; import org.apache.hadoop.yarn.util.Records;
import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
@ -112,6 +111,14 @@ public class TestTypeConverter {
when(mockReport.getUser()).thenReturn("dummy-user"); when(mockReport.getUser()).thenReturn("dummy-user");
when(mockReport.getQueue()).thenReturn("dummy-queue"); when(mockReport.getQueue()).thenReturn("dummy-queue");
String jobFile = "dummy-path/job.xml"; String jobFile = "dummy-path/job.xml";
try {
JobStatus status = TypeConverter.fromYarn(mockReport, jobFile);
} catch (NullPointerException npe) {
Assert.fail("Type converstion from YARN fails for jobs without " +
"ApplicationUsageReport");
}
ApplicationResourceUsageReport appUsageRpt = Records ApplicationResourceUsageReport appUsageRpt = Records
.newRecord(ApplicationResourceUsageReport.class); .newRecord(ApplicationResourceUsageReport.class);
Resource r = Records.newRecord(Resource.class); Resource r = Records.newRecord(Resource.class);