From 8dc0d5af432cc4fc34630c0dcfada6823b1abd5c Mon Sep 17 00:00:00 2001 From: Jason Darrell Lowe Date: Thu, 13 Jun 2013 21:01:37 +0000 Subject: [PATCH] MAPREDUCE-4019. -list-attempt-ids is not working. Contributed by Ashwin Shankar, Devaraj K, and B Anil Kumar git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1492868 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-mapreduce-project/CHANGES.txt | 6 ++ .../apache/hadoop/mapreduce/tools/CLI.java | 41 ++++---- .../hadoop/mapreduce/tools/TestCLI.java | 95 +++++++++++++++++++ 3 files changed, 123 insertions(+), 19 deletions(-) create mode 100644 hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 26adf9eccc5..f33be9860ff 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -514,6 +514,9 @@ Release 2.1.0-beta - UNRELEASED MAPREDUCE-5259. TestTaskLog fails on Windows because of path separators missmatch. (Ivan Mitic via cnauroth) + MAPREDUCE-4019. -list-attempt-ids is not working (Ashwin Shankar, + Devaraj K, and B Anil Kumar via jlowe) + BREAKDOWN OF HADOOP-8562 SUBTASKS MAPREDUCE-4739. Some MapReduce tests fail to find winutils. @@ -1132,6 +1135,9 @@ Release 0.23.9 - UNRELEASED MAPREDUCE-5315. DistCp reports success even on failure. (mithun and jlowe via daryn) + MAPREDUCE-4019. -list-attempt-ids is not working (Ashwin Shankar, + Devaraj K, and B Anil Kumar via jlowe) + Release 0.23.8 - 2013-06-05 INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java index 37412e018d5..0d6a68ae331 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java @@ -26,6 +26,7 @@ import java.util.Set; import java.util.HashSet; import java.util.Arrays; +import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; @@ -67,7 +68,9 @@ public class CLI extends Configured implements Tool { protected Cluster cluster; private final Set taskStates = new HashSet( Arrays.asList("pending", "running", "completed", "failed", "killed")); - + private static final Set taskTypes = new HashSet( + Arrays.asList("MAP", "REDUCE")); + public CLI() { } @@ -219,6 +222,11 @@ public class CLI extends Configured implements Tool { taskType = argv[2]; taskState = argv[3]; displayTasks = true; + if (!taskTypes.contains(taskType.toUpperCase())) { + System.out.println("Error: Invalid task-type: "+taskType); + displayUsage(cmd); + return exitCode; + } } else if ("-logs".equals(cmd)) { if (argv.length == 2 || argv.length ==3) { logs = true; @@ -238,7 +246,7 @@ public class CLI extends Configured implements Tool { } // initialize cluster - cluster = new Cluster(getConf()); + cluster = createCluster(); // Submit the request try { @@ -371,6 +379,10 @@ public class CLI extends Configured implements Tool { return exitCode; } + Cluster createCluster() throws IOException { + return new Cluster(getConf()); + } + private String getJobPriorityNames() { StringBuffer sb = new StringBuffer(); for (JobPriority p : JobPriority.values()) { @@ -379,22 +391,18 @@ public class CLI extends Configured implements Tool { return sb.substring(0, sb.length()-1); } - private String getTaskTypess() { - StringBuffer sb = new StringBuffer(); - for (TaskType t : TaskType.values()) { - sb.append(t.name()).append(" "); - } - return sb.substring(0, sb.length()-1); + private String getTaskTypes() { + return StringUtils.join(taskTypes, " "); } - + /** * Display usage of the command-line tool and terminate execution. */ private void displayUsage(String cmd) { String prefix = "Usage: CLI "; String jobPriorityValues = getJobPriorityNames(); - String taskTypes = getTaskTypess(); String taskStates = "running, completed"; + if ("-submit".equals(cmd)) { System.err.println(prefix + "[" + cmd + " ]"); } else if ("-status".equals(cmd) || "-kill".equals(cmd)) { @@ -422,7 +430,7 @@ public class CLI extends Configured implements Tool { } else if ("-list-attempt-ids".equals(cmd)) { System.err.println(prefix + "[" + cmd + " ]. " + - "Valid values for are " + taskTypes + ". " + + "Valid values for are " + getTaskTypes() + ". " + "Valid values for are " + taskStates); } else if ("-logs".equals(cmd)) { System.err.println(prefix + "[" + cmd + @@ -443,7 +451,7 @@ public class CLI extends Configured implements Tool { System.err.printf("\t[-list-blacklisted-trackers]%n"); System.err.println("\t[-list-attempt-ids " + "]. " + - "Valid values for are " + taskTypes + ". " + + "Valid values for are " + getTaskTypes() + ". " + "Valid values for are " + taskStates); System.err.printf("\t[-kill-task ]%n"); System.err.printf("\t[-fail-task ]%n"); @@ -563,18 +571,13 @@ public class CLI extends Configured implements Tool { */ protected void displayTasks(Job job, String type, String state) throws IOException, InterruptedException { - + if (!taskStates.contains(state)) { throw new java.lang.IllegalArgumentException("Invalid state: " + state + ". Valid states for task are: pending, running, completed, failed, killed."); } TaskReport[] reports=null; - try{ - reports = job.getTaskReports(TaskType.valueOf(type)); - }catch(IllegalArgumentException e){ - throw new IllegalArgumentException("Invalid type: " + type + - ". Valid types for task are: MAP, REDUCE, JOB_SETUP, JOB_CLEANUP, TASK_CLEANUP."); - } + reports = job.getTaskReports(TaskType.valueOf(type.toUpperCase())); for (TaskReport report : reports) { TIPStatus status = report.getCurrentStatus(); if ((state.equals("pending") && status ==TIPStatus.PENDING) || diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java new file mode 100644 index 00000000000..8693defd1a2 --- /dev/null +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/tools/TestCLI.java @@ -0,0 +1,95 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.mapreduce.tools; + +import static org.junit.Assert.*; + +import org.apache.hadoop.mapreduce.Cluster; +import org.apache.hadoop.mapreduce.Job; +import org.apache.hadoop.mapreduce.JobID; +import org.apache.hadoop.mapreduce.TaskReport; +import org.apache.hadoop.mapreduce.TaskType; +import org.junit.Test; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.doReturn; + +public class TestCLI { + private static String jobIdStr = "job_1015298225799_0015"; + + @Test + public void testListAttemptIdsWithValidInput() throws Exception { + JobID jobId = JobID.forName(jobIdStr); + Cluster mockCluster = mock(Cluster.class); + Job job = mock(Job.class); + CLI cli = spy(new CLI()); + + doReturn(mockCluster).when(cli).createCluster(); + when(job.getTaskReports(TaskType.MAP)).thenReturn( + getTaskReports(jobId, TaskType.MAP)); + when(job.getTaskReports(TaskType.REDUCE)).thenReturn( + getTaskReports(jobId, TaskType.REDUCE)); + when(mockCluster.getJob(jobId)).thenReturn(job); + + int retCode_MAP = cli.run(new String[] { "-list-attempt-ids", jobIdStr, + "MAP", "running" }); + // testing case insensitive behavior + int retCode_map = cli.run(new String[] { "-list-attempt-ids", jobIdStr, + "map", "running" }); + + int retCode_REDUCE = cli.run(new String[] { "-list-attempt-ids", jobIdStr, + "REDUCE", "running" }); + + assertEquals("MAP is a valid input,exit code should be 0", 0, retCode_MAP); + assertEquals("map is a valid input,exit code should be 0", 0, retCode_map); + assertEquals("REDUCE is a valid input,exit code should be 0", 0, + retCode_REDUCE); + + verify(job, times(2)).getTaskReports(TaskType.MAP); + verify(job, times(1)).getTaskReports(TaskType.REDUCE); + } + + @Test + public void testListAttemptIdsWithInvalidInputs() throws Exception { + JobID jobId = JobID.forName(jobIdStr); + Cluster mockCluster = mock(Cluster.class); + Job job = mock(Job.class); + CLI cli = spy(new CLI()); + + doReturn(mockCluster).when(cli).createCluster(); + when(mockCluster.getJob(jobId)).thenReturn(job); + + int retCode_JOB_SETUP = cli.run(new String[] { "-list-attempt-ids", + jobIdStr, "JOB_SETUP", "running" }); + int retCode_JOB_CLEANUP = cli.run(new String[] { "-list-attempt-ids", + jobIdStr, "JOB_CLEANUP", "running" }); + + assertEquals("JOB_SETUP is a invalid input,exit code should be -1", -1, + retCode_JOB_SETUP); + assertEquals("JOB_CLEANUP is a invalid input,exit code should be -1", -1, + retCode_JOB_CLEANUP); + + } + + private TaskReport[] getTaskReports(JobID jobId, TaskType type) { + return new TaskReport[] { new TaskReport(), new TaskReport() }; + } +}