MAPREDUCE-3518. mapred queue -info <queue> -showJobs throws NPE. (Jonathan Eagles via mahadev) - Merging r1213464 from trunk
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1213493 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c24b4809de
commit
62edad08e8
|
@ -75,6 +75,9 @@ Release 0.23.1 - Unreleased
|
||||||
MAPREDUCE-3369. Migrate MR1 tests to run on MR2 using the new interfaces
|
MAPREDUCE-3369. Migrate MR1 tests to run on MR2 using the new interfaces
|
||||||
introduced in MAPREDUCE-3169. (Ahmed Radwan via tomwhite)
|
introduced in MAPREDUCE-3169. (Ahmed Radwan via tomwhite)
|
||||||
|
|
||||||
|
MAPREDUCE-3518. mapred queue -info <queue> -showJobs throws NPE.
|
||||||
|
(Jonathan Eagles via mahadev)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
|
@ -432,7 +432,6 @@ public class JobClient extends CLI {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Cluster cluster;
|
|
||||||
/**
|
/**
|
||||||
* Ugi of the client. We store this ugi when the client is created and
|
* Ugi of the client. We store this ugi when the client is created and
|
||||||
* then make sure that the same ugi is used to run the various protocols.
|
* then make sure that the same ugi is used to run the various protocols.
|
||||||
|
|
|
@ -55,7 +55,7 @@ import org.apache.hadoop.yarn.logaggregation.LogDumper;
|
||||||
@InterfaceStability.Stable
|
@InterfaceStability.Stable
|
||||||
public class CLI extends Configured implements Tool {
|
public class CLI extends Configured implements Tool {
|
||||||
private static final Log LOG = LogFactory.getLog(CLI.class);
|
private static final Log LOG = LogFactory.getLog(CLI.class);
|
||||||
private Cluster cluster;
|
protected Cluster cluster;
|
||||||
|
|
||||||
public CLI() {
|
public CLI() {
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,21 +19,41 @@
|
||||||
package org.apache.hadoop.mapred;
|
package org.apache.hadoop.mapred;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.mockito.Matchers.isA;
|
||||||
|
import static org.mockito.Mockito.atLeastOnce;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.verify;
|
import static org.mockito.Mockito.verify;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import org.apache.hadoop.mapred.JobConf;
|
||||||
import org.apache.hadoop.mapreduce.Cluster;
|
import org.apache.hadoop.mapreduce.Cluster;
|
||||||
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.JobPriority;
|
||||||
|
import org.apache.hadoop.mapreduce.JobStatus;
|
||||||
|
import org.apache.hadoop.mapreduce.TaskType;
|
||||||
|
import org.apache.hadoop.mapreduce.TaskReport;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
public class JobClientUnitTest {
|
public class JobClientUnitTest {
|
||||||
|
|
||||||
|
public class TestJobClient extends JobClient {
|
||||||
|
|
||||||
|
TestJobClient(JobConf jobConf) throws IOException {
|
||||||
|
super(jobConf);
|
||||||
|
}
|
||||||
|
|
||||||
|
void setCluster(Cluster cluster) {
|
||||||
|
this.cluster = cluster;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
@Test
|
@Test
|
||||||
public void testMapTaskReportsWithNullJob() throws Exception {
|
public void testMapTaskReportsWithNullJob() throws Exception {
|
||||||
JobClient client = new JobClient();
|
TestJobClient client = new TestJobClient(new JobConf());
|
||||||
Cluster mockCluster = mock(Cluster.class);
|
Cluster mockCluster = mock(Cluster.class);
|
||||||
client.cluster = mockCluster;
|
client.setCluster(mockCluster);
|
||||||
JobID id = new JobID("test",0);
|
JobID id = new JobID("test",0);
|
||||||
|
|
||||||
when(mockCluster.getJob(id)).thenReturn(null);
|
when(mockCluster.getJob(id)).thenReturn(null);
|
||||||
|
@ -47,9 +67,9 @@ public class JobClientUnitTest {
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
@Test
|
@Test
|
||||||
public void testReduceTaskReportsWithNullJob() throws Exception {
|
public void testReduceTaskReportsWithNullJob() throws Exception {
|
||||||
JobClient client = new JobClient();
|
TestJobClient client = new TestJobClient(new JobConf());
|
||||||
Cluster mockCluster = mock(Cluster.class);
|
Cluster mockCluster = mock(Cluster.class);
|
||||||
client.cluster = mockCluster;
|
client.setCluster(mockCluster);
|
||||||
JobID id = new JobID("test",0);
|
JobID id = new JobID("test",0);
|
||||||
|
|
||||||
when(mockCluster.getJob(id)).thenReturn(null);
|
when(mockCluster.getJob(id)).thenReturn(null);
|
||||||
|
@ -63,9 +83,9 @@ public class JobClientUnitTest {
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
@Test
|
@Test
|
||||||
public void testSetupTaskReportsWithNullJob() throws Exception {
|
public void testSetupTaskReportsWithNullJob() throws Exception {
|
||||||
JobClient client = new JobClient();
|
TestJobClient client = new TestJobClient(new JobConf());
|
||||||
Cluster mockCluster = mock(Cluster.class);
|
Cluster mockCluster = mock(Cluster.class);
|
||||||
client.cluster = mockCluster;
|
client.setCluster(mockCluster);
|
||||||
JobID id = new JobID("test",0);
|
JobID id = new JobID("test",0);
|
||||||
|
|
||||||
when(mockCluster.getJob(id)).thenReturn(null);
|
when(mockCluster.getJob(id)).thenReturn(null);
|
||||||
|
@ -79,9 +99,9 @@ public class JobClientUnitTest {
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
@Test
|
@Test
|
||||||
public void testCleanupTaskReportsWithNullJob() throws Exception {
|
public void testCleanupTaskReportsWithNullJob() throws Exception {
|
||||||
JobClient client = new JobClient();
|
TestJobClient client = new TestJobClient(new JobConf());
|
||||||
Cluster mockCluster = mock(Cluster.class);
|
Cluster mockCluster = mock(Cluster.class);
|
||||||
client.cluster = mockCluster;
|
client.setCluster(mockCluster);
|
||||||
JobID id = new JobID("test",0);
|
JobID id = new JobID("test",0);
|
||||||
|
|
||||||
when(mockCluster.getJob(id)).thenReturn(null);
|
when(mockCluster.getJob(id)).thenReturn(null);
|
||||||
|
@ -91,4 +111,49 @@ public class JobClientUnitTest {
|
||||||
|
|
||||||
verify(mockCluster).getJob(id);
|
verify(mockCluster).getJob(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testShowJob() throws Exception {
|
||||||
|
TestJobClient client = new TestJobClient(new JobConf());
|
||||||
|
JobID jobID = new JobID("test", 0);
|
||||||
|
|
||||||
|
JobStatus mockJobStatus = mock(JobStatus.class);
|
||||||
|
when(mockJobStatus.getJobID()).thenReturn(jobID);
|
||||||
|
when(mockJobStatus.getState()).thenReturn(JobStatus.State.RUNNING);
|
||||||
|
when(mockJobStatus.getStartTime()).thenReturn(0L);
|
||||||
|
when(mockJobStatus.getUsername()).thenReturn("mockuser");
|
||||||
|
when(mockJobStatus.getQueue()).thenReturn("mockqueue");
|
||||||
|
when(mockJobStatus.getPriority()).thenReturn(JobPriority.NORMAL);
|
||||||
|
when(mockJobStatus.getNumUsedSlots()).thenReturn(1);
|
||||||
|
when(mockJobStatus.getNumReservedSlots()).thenReturn(1);
|
||||||
|
when(mockJobStatus.getUsedMem()).thenReturn(1024);
|
||||||
|
when(mockJobStatus.getReservedMem()).thenReturn(512);
|
||||||
|
when(mockJobStatus.getNeededMem()).thenReturn(2048);
|
||||||
|
when(mockJobStatus.getSchedulingInfo()).thenReturn("NA");
|
||||||
|
|
||||||
|
Job mockJob = mock(Job.class);
|
||||||
|
when(mockJob.getTaskReports(isA(TaskType.class))).thenReturn(new TaskReport[0]);
|
||||||
|
|
||||||
|
Cluster mockCluster = mock(Cluster.class);
|
||||||
|
when(mockCluster.getJob(jobID)).thenReturn(mockJob);
|
||||||
|
|
||||||
|
client.setCluster(mockCluster);
|
||||||
|
|
||||||
|
|
||||||
|
client.displayJobList(new JobStatus[] {mockJobStatus});
|
||||||
|
verify(mockJobStatus, atLeastOnce()).getJobID();
|
||||||
|
verify(mockJob, atLeastOnce()).getTaskReports(isA(TaskType.class));
|
||||||
|
verify(mockCluster, atLeastOnce()).getJob(jobID);
|
||||||
|
verify(mockJobStatus).getState();
|
||||||
|
verify(mockJobStatus).getStartTime();
|
||||||
|
verify(mockJobStatus).getUsername();
|
||||||
|
verify(mockJobStatus).getQueue();
|
||||||
|
verify(mockJobStatus).getPriority();
|
||||||
|
verify(mockJobStatus).getNumUsedSlots();
|
||||||
|
verify(mockJobStatus).getNumReservedSlots();
|
||||||
|
verify(mockJobStatus).getUsedMem();
|
||||||
|
verify(mockJobStatus).getReservedMem();
|
||||||
|
verify(mockJobStatus).getNeededMem();
|
||||||
|
verify(mockJobStatus).getSchedulingInfo();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue