MAPREDUCE-4927. Historyserver 500 error due to NPE when accessing specific counters page for failed job. Contributed by Ashwin Shankar

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1483974 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jason Darrell Lowe 2013-05-17 20:19:48 +00:00
parent 0b668c21b2
commit ca2265b581
3 changed files with 14 additions and 2 deletions

View File

@ -426,6 +426,9 @@ Release 2.0.5-beta - UNRELEASED
MAPREDUCE-5244. Two functions changed their visibility in JobStatus. MAPREDUCE-5244. Two functions changed their visibility in JobStatus.
(zjshen via tucu) (zjshen via tucu)
MAPREDUCE-4927. Historyserver 500 error due to NPE when accessing specific
counters page for failed job. (Ashwin Shankar via jlowe)
Release 2.0.4-alpha - 2013-04-25 Release 2.0.4-alpha - 2013-04-25
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES
@ -991,6 +994,9 @@ Release 0.23.8 - UNRELEASED
MAPREDUCE-5147. Maven build should create MAPREDUCE-5147. Maven build should create
hadoop-mapreduce-client-app-VERSION.jar directly (Robert Parker via tgraves) hadoop-mapreduce-client-app-VERSION.jar directly (Robert Parker via tgraves)
MAPREDUCE-4927. Historyserver 500 error due to NPE when accessing specific
counters page for failed job. (Ashwin Shankar via jlowe)
Release 0.23.7 - UNRELEASED Release 0.23.7 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -143,8 +143,9 @@ private void populateMembers(AppContext ctx) {
Map<TaskId, Task> tasks = job.getTasks(); Map<TaskId, Task> tasks = job.getTasks();
for(Map.Entry<TaskId, Task> entry : tasks.entrySet()) { for(Map.Entry<TaskId, Task> entry : tasks.entrySet()) {
long value = 0; long value = 0;
CounterGroup group = entry.getValue().getCounters() Counters counters = entry.getValue().getCounters();
.getGroup($(COUNTER_GROUP)); CounterGroup group = (counters != null) ? counters
.getGroup($(COUNTER_GROUP)) : null;
if(group != null) { if(group != null) {
Counter c = group.findCounter($(COUNTER_NAME)); Counter c = group.findCounter($(COUNTER_NAME));
if(c != null) { if(c != null) {

View File

@ -182,6 +182,11 @@ public static Map<String, String> getTaskParams(AppContext appContext) {
@Test public void testSingleCounterView() { @Test public void testSingleCounterView() {
AppContext appContext = new TestAppContext(); AppContext appContext = new TestAppContext();
Job job = appContext.getAllJobs().values().iterator().next();
// add a failed task to the job without any counters
Task failedTask = MockJobs.newTask(job.getID(), 2, 1, true);
Map<TaskId,Task> tasks = job.getTasks();
tasks.put(failedTask.getID(), failedTask);
Map<String, String> params = getJobParams(appContext); Map<String, String> params = getJobParams(appContext);
params.put(AMParams.COUNTER_GROUP, params.put(AMParams.COUNTER_GROUP,
"org.apache.hadoop.mapreduce.FileSystemCounter"); "org.apache.hadoop.mapreduce.FileSystemCounter");