merge MAPREDUCE-3944 from trunk
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1297673 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
74360b15d3
commit
084f085b5f
|
@ -91,6 +91,10 @@ Release 0.23.2 - UNRELEASED
|
|||
MAPREDUCE-2855. Passing a cached class-loader to ResourceBundle creator to
|
||||
minimize counter names lookup time. (Siddarth Seth via vinodkv)
|
||||
|
||||
MAPREDUCE-3944. Change the history jobs/ webservice to return partial job
|
||||
info for a significant performance improvement. (Robert Joseph Evans via
|
||||
sseth)
|
||||
|
||||
BUG FIXES
|
||||
|
||||
MAPREDUCE-3918 proc_historyserver no longer in command line arguments for
|
||||
|
|
|
@ -287,7 +287,7 @@ public class CompletedJob implements org.apache.hadoop.mapreduce.v2.app.job.Job
|
|||
}
|
||||
|
||||
//History data is leisurely loaded when task level data is requested
|
||||
private synchronized void loadFullHistoryData(boolean loadTasks,
|
||||
protected synchronized void loadFullHistoryData(boolean loadTasks,
|
||||
Path historyFileAbsolute) throws IOException {
|
||||
LOG.info("Loading history file: [" + historyFileAbsolute + "]");
|
||||
if (this.jobInfo != null) {
|
||||
|
|
|
@ -31,6 +31,7 @@ import javax.ws.rs.core.UriInfo;
|
|||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||
|
@ -184,35 +185,32 @@ public class HsWebServices {
|
|||
break;
|
||||
}
|
||||
|
||||
// getAllJobs only gives you a partial we want a full
|
||||
Job fullJob = appCtx.getJob(job.getID());
|
||||
if (fullJob == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
JobInfo jobInfo = new JobInfo(fullJob);
|
||||
// can't really validate queue is a valid one since queues could change
|
||||
if (queueQuery != null && !queueQuery.isEmpty()) {
|
||||
if (!jobInfo.getQueueName().equals(queueQuery)) {
|
||||
if (!job.getQueueName().equals(queueQuery)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (userQuery != null && !userQuery.isEmpty()) {
|
||||
if (!jobInfo.getUserName().equals(userQuery)) {
|
||||
if (!job.getUserName().equals(userQuery)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
JobReport report = job.getReport();
|
||||
|
||||
if (checkStart
|
||||
&& (jobInfo.getStartTime() < sBegin || jobInfo.getStartTime() > sEnd)) {
|
||||
&& (report.getStartTime() < sBegin || report.getStartTime() > sEnd)) {
|
||||
continue;
|
||||
}
|
||||
if (checkEnd
|
||||
&& (jobInfo.getFinishTime() < fBegin || jobInfo.getFinishTime() > fEnd)) {
|
||||
&& (report.getFinishTime() < fBegin || report.getFinishTime() > fEnd)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
JobInfo jobInfo = new JobInfo(job);
|
||||
|
||||
allJobs.add(jobInfo);
|
||||
num++;
|
||||
}
|
||||
|
|
|
@ -55,18 +55,18 @@ public class JobInfo {
|
|||
protected int mapsCompleted;
|
||||
protected int reducesTotal;
|
||||
protected int reducesCompleted;
|
||||
protected boolean uberized;
|
||||
protected Boolean uberized;
|
||||
protected String diagnostics;
|
||||
protected long avgMapTime = 0;
|
||||
protected long avgReduceTime = 0;
|
||||
protected long avgShuffleTime = 0;
|
||||
protected long avgMergeTime = 0;
|
||||
protected int failedReduceAttempts = 0;
|
||||
protected int killedReduceAttempts = 0;
|
||||
protected int successfulReduceAttempts = 0;
|
||||
protected int failedMapAttempts = 0;
|
||||
protected int killedMapAttempts = 0;
|
||||
protected int successfulMapAttempts = 0;
|
||||
protected Long avgMapTime;
|
||||
protected Long avgReduceTime;
|
||||
protected Long avgShuffleTime;
|
||||
protected Long avgMergeTime;
|
||||
protected Integer failedReduceAttempts;
|
||||
protected Integer killedReduceAttempts;
|
||||
protected Integer successfulReduceAttempts;
|
||||
protected Integer failedMapAttempts;
|
||||
protected Integer killedMapAttempts;
|
||||
protected Integer successfulMapAttempts;
|
||||
protected ArrayList<ConfEntryInfo> acls;
|
||||
|
||||
@XmlTransient
|
||||
|
@ -80,7 +80,7 @@ public class JobInfo {
|
|||
public JobInfo(Job job) {
|
||||
this.id = MRApps.toString(job.getID());
|
||||
JobReport report = job.getReport();
|
||||
countTasksAndAttempts(job);
|
||||
|
||||
this.mapsTotal = job.getTotalMaps();
|
||||
this.mapsCompleted = job.getCompletedMaps();
|
||||
this.reducesTotal = job.getTotalReduces();
|
||||
|
@ -91,19 +91,33 @@ public class JobInfo {
|
|||
this.queue = job.getQueueName();
|
||||
this.user = job.getUserName();
|
||||
this.state = job.getState().toString();
|
||||
this.uberized = job.isUber();
|
||||
this.diagnostics = "";
|
||||
List<String> diagnostics = job.getDiagnostics();
|
||||
if (diagnostics != null && !diagnostics.isEmpty()) {
|
||||
StringBuffer b = new StringBuffer();
|
||||
for (String diag : diagnostics) {
|
||||
b.append(diag);
|
||||
}
|
||||
this.diagnostics = b.toString();
|
||||
}
|
||||
|
||||
this.acls = new ArrayList<ConfEntryInfo>();
|
||||
|
||||
if (job instanceof CompletedJob) {
|
||||
avgMapTime = 0l;
|
||||
avgReduceTime = 0l;
|
||||
avgShuffleTime = 0l;
|
||||
avgMergeTime = 0l;
|
||||
failedReduceAttempts = 0;
|
||||
killedReduceAttempts = 0;
|
||||
successfulReduceAttempts = 0;
|
||||
failedMapAttempts = 0;
|
||||
killedMapAttempts = 0;
|
||||
successfulMapAttempts = 0;
|
||||
countTasksAndAttempts(job);
|
||||
this.uberized = job.isUber();
|
||||
this.diagnostics = "";
|
||||
List<String> diagnostics = job.getDiagnostics();
|
||||
if (diagnostics != null && !diagnostics.isEmpty()) {
|
||||
StringBuffer b = new StringBuffer();
|
||||
for (String diag : diagnostics) {
|
||||
b.append(diag);
|
||||
}
|
||||
this.diagnostics = b.toString();
|
||||
}
|
||||
|
||||
|
||||
Map<JobACL, AccessControlList> allacls = job.getJobACLs();
|
||||
if (allacls != null) {
|
||||
for (Map.Entry<JobACL, AccessControlList> entry : allacls.entrySet()) {
|
||||
|
@ -122,43 +136,43 @@ public class JobInfo {
|
|||
return numReduces;
|
||||
}
|
||||
|
||||
public long getAvgMapTime() {
|
||||
public Long getAvgMapTime() {
|
||||
return avgMapTime;
|
||||
}
|
||||
|
||||
public long getAvgReduceTime() {
|
||||
public Long getAvgReduceTime() {
|
||||
return avgReduceTime;
|
||||
}
|
||||
|
||||
public long getAvgShuffleTime() {
|
||||
public Long getAvgShuffleTime() {
|
||||
return avgShuffleTime;
|
||||
}
|
||||
|
||||
public long getAvgMergeTime() {
|
||||
public Long getAvgMergeTime() {
|
||||
return avgMergeTime;
|
||||
}
|
||||
|
||||
public long getFailedReduceAttempts() {
|
||||
public Integer getFailedReduceAttempts() {
|
||||
return failedReduceAttempts;
|
||||
}
|
||||
|
||||
public long getKilledReduceAttempts() {
|
||||
public Integer getKilledReduceAttempts() {
|
||||
return killedReduceAttempts;
|
||||
}
|
||||
|
||||
public long getSuccessfulReduceAttempts() {
|
||||
public Integer getSuccessfulReduceAttempts() {
|
||||
return successfulReduceAttempts;
|
||||
}
|
||||
|
||||
public long getFailedMapAttempts() {
|
||||
public Integer getFailedMapAttempts() {
|
||||
return failedMapAttempts;
|
||||
}
|
||||
|
||||
public long getKilledMapAttempts() {
|
||||
public Integer getKilledMapAttempts() {
|
||||
return killedMapAttempts;
|
||||
}
|
||||
|
||||
public long getSuccessfulMapAttempts() {
|
||||
public Integer getSuccessfulMapAttempts() {
|
||||
return successfulMapAttempts;
|
||||
}
|
||||
|
||||
|
@ -210,7 +224,7 @@ public class JobInfo {
|
|||
return this.finishTime;
|
||||
}
|
||||
|
||||
public boolean isUber() {
|
||||
public Boolean isUber() {
|
||||
return this.uberized;
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,192 @@
|
|||
package org.apache.hadoop.mapreduce.v2.hs;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.mapreduce.JobACL;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||
import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
public class MockHistoryJobs extends MockJobs {
|
||||
|
||||
public static class JobsPair {
|
||||
public Map<JobId, Job> partial;
|
||||
public Map<JobId, Job> full;
|
||||
}
|
||||
|
||||
public static JobsPair newHistoryJobs(int numJobs, int numTasksPerJob,
|
||||
int numAttemptsPerTask) throws IOException {
|
||||
Map<JobId, Job> mocked = newJobs(numJobs, numTasksPerJob, numAttemptsPerTask);
|
||||
return split(mocked);
|
||||
}
|
||||
|
||||
public static JobsPair newHistoryJobs(ApplicationId appID, int numJobsPerApp,
|
||||
int numTasksPerJob, int numAttemptsPerTask) throws IOException {
|
||||
Map<JobId, Job> mocked = newJobs(appID, numJobsPerApp, numTasksPerJob,
|
||||
numAttemptsPerTask);
|
||||
return split(mocked);
|
||||
}
|
||||
|
||||
private static JobsPair split(Map<JobId, Job> mocked) throws IOException {
|
||||
JobsPair ret = new JobsPair();
|
||||
ret.full = Maps.newHashMap();
|
||||
ret.partial = Maps.newHashMap();
|
||||
for(Map.Entry<JobId, Job> entry: mocked.entrySet()) {
|
||||
JobId id = entry.getKey();
|
||||
Job j = entry.getValue();
|
||||
ret.full.put(id, new MockCompletedJob(j));
|
||||
JobReport report = j.getReport();
|
||||
JobIndexInfo info = new JobIndexInfo(report.getStartTime(),
|
||||
report.getFinishTime(), j.getUserName(), j.getName(), id,
|
||||
j.getCompletedMaps(), j.getCompletedReduces(), String.valueOf(j.getState()));
|
||||
info.setQueueName(j.getQueueName());
|
||||
ret.partial.put(id, new PartialJob(info, id));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
private static class MockCompletedJob extends CompletedJob {
|
||||
private Job job;
|
||||
|
||||
public MockCompletedJob(Job job) throws IOException {
|
||||
super(new Configuration(), job.getID(), null, true, job.getUserName(),
|
||||
null, null);
|
||||
this.job = job;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getCompletedMaps() {
|
||||
return job.getCompletedMaps();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getCompletedReduces() {
|
||||
return job.getCompletedReduces();
|
||||
}
|
||||
|
||||
@Override
|
||||
public org.apache.hadoop.mapreduce.Counters getAllCounters() {
|
||||
return job.getAllCounters();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JobId getID() {
|
||||
return job.getID();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JobReport getReport() {
|
||||
return job.getReport();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float getProgress() {
|
||||
return job.getProgress();
|
||||
}
|
||||
|
||||
@Override
|
||||
public JobState getState() {
|
||||
return job.getState();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Task getTask(TaskId taskId) {
|
||||
return job.getTask(taskId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TaskAttemptCompletionEvent[] getTaskAttemptCompletionEvents(
|
||||
int fromEventId, int maxEvents) {
|
||||
return job.getTaskAttemptCompletionEvents(fromEventId, maxEvents);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<TaskId, Task> getTasks() {
|
||||
return job.getTasks();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void loadFullHistoryData(boolean loadTasks,
|
||||
Path historyFileAbsolute) throws IOException {
|
||||
//Empty
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getDiagnostics() {
|
||||
return job.getDiagnostics();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return job.getName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getQueueName() {
|
||||
return job.getQueueName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getTotalMaps() {
|
||||
return job.getTotalMaps();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getTotalReduces() {
|
||||
return job.getTotalReduces();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isUber() {
|
||||
return job.isUber();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<TaskId, Task> getTasks(TaskType taskType) {
|
||||
return job.getTasks();
|
||||
}
|
||||
|
||||
@Override
|
||||
public
|
||||
boolean checkAccess(UserGroupInformation callerUGI, JobACL jobOperation) {
|
||||
return job.checkAccess(callerUGI, jobOperation);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<JobACL, AccessControlList> getJobACLs() {
|
||||
return job.getJobACLs();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUserName() {
|
||||
return job.getUserName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Path getConfFile() {
|
||||
return job.getConfFile();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AMInfo> getAMInfos() {
|
||||
return job.getAMInfos();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -27,6 +27,7 @@ import static org.junit.Assert.fail;
|
|||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
import java.util.Map;
|
||||
|
||||
|
@ -40,8 +41,11 @@ import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
|||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs.JobsPair;
|
||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||
import org.apache.hadoop.yarn.Clock;
|
||||
import org.apache.hadoop.yarn.YarnException;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.event.EventHandler;
|
||||
|
@ -89,13 +93,21 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
final ApplicationAttemptId appAttemptID;
|
||||
final ApplicationId appID;
|
||||
final String user = MockJobs.newUserName();
|
||||
final Map<JobId, Job> jobs;
|
||||
final Map<JobId, Job> partialJobs;
|
||||
final Map<JobId, Job> fullJobs;
|
||||
final long startTime = System.currentTimeMillis();
|
||||
|
||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
||||
appID = MockJobs.newAppID(appid);
|
||||
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
||||
JobsPair jobs;
|
||||
try {
|
||||
jobs = MockHistoryJobs.newHistoryJobs(appID, numJobs, numTasks, numAttempts);
|
||||
} catch (IOException e) {
|
||||
throw new YarnException(e);
|
||||
}
|
||||
partialJobs = jobs.partial;
|
||||
fullJobs = jobs.full;
|
||||
}
|
||||
|
||||
TestAppContext() {
|
||||
|
@ -119,12 +131,16 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
|
||||
@Override
|
||||
public Job getJob(JobId jobID) {
|
||||
return jobs.get(jobID);
|
||||
return fullJobs.get(jobID);
|
||||
}
|
||||
|
||||
public Job getPartialJob(JobId jobID) {
|
||||
return partialJobs.get(jobID);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<JobId, Job> getAllJobs() {
|
||||
return jobs; // OK
|
||||
return partialJobs; // OK
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
|
@ -204,8 +220,8 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
JSONArray arr = jobs.getJSONArray("job");
|
||||
assertEquals("incorrect number of elements", 1, arr.length());
|
||||
JSONObject info = arr.getJSONObject(0);
|
||||
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
|
||||
VerifyJobsUtils.verifyHsJob(info, job);
|
||||
Job job = appContext.getPartialJob(MRApps.toJobID(info.getString("id")));
|
||||
VerifyJobsUtils.verifyHsJobPartial(info, job);
|
||||
|
||||
}
|
||||
|
||||
|
@ -222,8 +238,8 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
JSONArray arr = jobs.getJSONArray("job");
|
||||
assertEquals("incorrect number of elements", 1, arr.length());
|
||||
JSONObject info = arr.getJSONObject(0);
|
||||
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
|
||||
VerifyJobsUtils.verifyHsJob(info, job);
|
||||
Job job = appContext.getPartialJob(MRApps.toJobID(info.getString("id")));
|
||||
VerifyJobsUtils.verifyHsJobPartial(info, job);
|
||||
|
||||
}
|
||||
|
||||
|
@ -239,8 +255,8 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
JSONArray arr = jobs.getJSONArray("job");
|
||||
assertEquals("incorrect number of elements", 1, arr.length());
|
||||
JSONObject info = arr.getJSONObject(0);
|
||||
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
|
||||
VerifyJobsUtils.verifyHsJob(info, job);
|
||||
Job job = appContext.getPartialJob(MRApps.toJobID(info.getString("id")));
|
||||
VerifyJobsUtils.verifyHsJobPartial(info, job);
|
||||
|
||||
}
|
||||
|
||||
|
@ -261,10 +277,35 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
assertEquals("incorrect number of elements", 1, jobs.getLength());
|
||||
NodeList job = dom.getElementsByTagName("job");
|
||||
assertEquals("incorrect number of elements", 1, job.getLength());
|
||||
verifyHsJobXML(job, appContext);
|
||||
|
||||
verifyHsJobPartialXML(job, appContext);
|
||||
}
|
||||
|
||||
public void verifyHsJobPartialXML(NodeList nodes, TestAppContext appContext) {
|
||||
|
||||
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||
|
||||
for (int i = 0; i < nodes.getLength(); i++) {
|
||||
Element element = (Element) nodes.item(i);
|
||||
|
||||
Job job = appContext.getPartialJob(MRApps.toJobID(WebServicesTestUtils
|
||||
.getXmlString(element, "id")));
|
||||
assertNotNull("Job not found - output incorrect", job);
|
||||
|
||||
VerifyJobsUtils.verifyHsJobGeneric(job,
|
||||
WebServicesTestUtils.getXmlString(element, "id"),
|
||||
WebServicesTestUtils.getXmlString(element, "user"),
|
||||
WebServicesTestUtils.getXmlString(element, "name"),
|
||||
WebServicesTestUtils.getXmlString(element, "state"),
|
||||
WebServicesTestUtils.getXmlString(element, "queue"),
|
||||
WebServicesTestUtils.getXmlLong(element, "startTime"),
|
||||
WebServicesTestUtils.getXmlLong(element, "finishTime"),
|
||||
WebServicesTestUtils.getXmlInt(element, "mapsTotal"),
|
||||
WebServicesTestUtils.getXmlInt(element, "mapsCompleted"),
|
||||
WebServicesTestUtils.getXmlInt(element, "reducesTotal"),
|
||||
WebServicesTestUtils.getXmlInt(element, "reducesCompleted"));
|
||||
}
|
||||
}
|
||||
|
||||
public void verifyHsJobXML(NodeList nodes, TestAppContext appContext) {
|
||||
|
||||
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||
|
@ -320,7 +361,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
JSONObject json = response.getEntity(JSONObject.class);
|
||||
assertEquals("incorrect number of elements", 1, json.length());
|
||||
JSONObject info = json.getJSONObject("job");
|
||||
VerifyJobsUtils.verifyHsJob(info, jobsMap.get(id));
|
||||
VerifyJobsUtils.verifyHsJob(info, appContext.getJob(id));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -356,7 +397,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
JSONObject json = response.getEntity(JSONObject.class);
|
||||
assertEquals("incorrect number of elements", 1, json.length());
|
||||
JSONObject info = json.getJSONObject("job");
|
||||
VerifyJobsUtils.verifyHsJob(info, jobsMap.get(id));
|
||||
VerifyJobsUtils.verifyHsJob(info, appContext.getJob(id));
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -694,7 +735,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
JSONObject json = response.getEntity(JSONObject.class);
|
||||
assertEquals("incorrect number of elements", 1, json.length());
|
||||
JSONObject info = json.getJSONObject("jobAttempts");
|
||||
verifyHsJobAttempts(info, jobsMap.get(id));
|
||||
verifyHsJobAttempts(info, appContext.getJob(id));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -712,7 +753,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
JSONObject json = response.getEntity(JSONObject.class);
|
||||
assertEquals("incorrect number of elements", 1, json.length());
|
||||
JSONObject info = json.getJSONObject("jobAttempts");
|
||||
verifyHsJobAttempts(info, jobsMap.get(id));
|
||||
verifyHsJobAttempts(info, appContext.getJob(id));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -730,7 +771,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
JSONObject json = response.getEntity(JSONObject.class);
|
||||
assertEquals("incorrect number of elements", 1, json.length());
|
||||
JSONObject info = json.getJSONObject("jobAttempts");
|
||||
verifyHsJobAttempts(info, jobsMap.get(id));
|
||||
verifyHsJobAttempts(info, appContext.getJob(id));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -754,7 +795,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
|||
NodeList attempts = dom.getElementsByTagName("jobAttempts");
|
||||
assertEquals("incorrect number of elements", 1, attempts.getLength());
|
||||
NodeList info = dom.getElementsByTagName("jobAttempt");
|
||||
verifyHsJobAttemptsXML(info, jobsMap.get(id));
|
||||
verifyHsJobAttemptsXML(info, appContext.getJob(id));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,6 +23,7 @@ import static org.junit.Assert.assertTrue;
|
|||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
@ -34,8 +35,11 @@ import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
|||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs.JobsPair;
|
||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||
import org.apache.hadoop.yarn.Clock;
|
||||
import org.apache.hadoop.yarn.YarnException;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.event.EventHandler;
|
||||
|
@ -73,11 +77,19 @@ public class TestHsWebServicesJobsQuery extends JerseyTest {
|
|||
|
||||
static class TestAppContext implements AppContext {
|
||||
final String user = MockJobs.newUserName();
|
||||
final Map<JobId, Job> jobs;
|
||||
final Map<JobId, Job> fullJobs;
|
||||
final Map<JobId, Job> partialJobs;
|
||||
final long startTime = System.currentTimeMillis();
|
||||
|
||||
TestAppContext(int numJobs, int numTasks, int numAttempts) {
|
||||
jobs = MockJobs.newJobs(numJobs, numTasks, numAttempts);
|
||||
JobsPair jobs;
|
||||
try {
|
||||
jobs = MockHistoryJobs.newHistoryJobs(numJobs, numTasks, numAttempts);
|
||||
} catch (IOException e) {
|
||||
throw new YarnException(e);
|
||||
}
|
||||
partialJobs = jobs.partial;
|
||||
fullJobs = jobs.full;
|
||||
}
|
||||
|
||||
TestAppContext() {
|
||||
|
@ -101,12 +113,16 @@ public class TestHsWebServicesJobsQuery extends JerseyTest {
|
|||
|
||||
@Override
|
||||
public Job getJob(JobId jobID) {
|
||||
return jobs.get(jobID);
|
||||
return fullJobs.get(jobID);
|
||||
}
|
||||
|
||||
public Job getPartialJob(JobId jobID) {
|
||||
return partialJobs.get(jobID);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<JobId, Job> getAllJobs() {
|
||||
return jobs; // OK
|
||||
return partialJobs; // OK
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
|
@ -199,8 +215,8 @@ public class TestHsWebServicesJobsQuery extends JerseyTest {
|
|||
assertEquals("incorrect number of elements", 3, arr.length());
|
||||
// just verify one of them.
|
||||
JSONObject info = arr.getJSONObject(0);
|
||||
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
|
||||
VerifyJobsUtils.verifyHsJob(info, job);
|
||||
Job job = appContext.getPartialJob(MRApps.toJobID(info.getString("id")));
|
||||
VerifyJobsUtils.verifyHsJobPartial(info, job);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -32,11 +32,20 @@ import org.codehaus.jettison.json.JSONObject;
|
|||
|
||||
public class VerifyJobsUtils {
|
||||
|
||||
public static void verifyHsJob(JSONObject info, Job job) throws JSONException {
|
||||
public static void verifyHsJobPartial(JSONObject info, Job job) throws JSONException {
|
||||
assertEquals("incorrect number of elements", 11, info.length());
|
||||
|
||||
// this is 23 instead of 24 because acls not being checked since
|
||||
// we are using mock job instead of CompletedJob
|
||||
assertEquals("incorrect number of elements", 23, info.length());
|
||||
// everyone access fields
|
||||
verifyHsJobGeneric(job, info.getString("id"), info.getString("user"),
|
||||
info.getString("name"), info.getString("state"),
|
||||
info.getString("queue"), info.getLong("startTime"),
|
||||
info.getLong("finishTime"), info.getInt("mapsTotal"),
|
||||
info.getInt("mapsCompleted"), info.getInt("reducesTotal"),
|
||||
info.getInt("reducesCompleted"));
|
||||
}
|
||||
|
||||
public static void verifyHsJob(JSONObject info, Job job) throws JSONException {
|
||||
assertEquals("incorrect number of elements", 24, info.length());
|
||||
|
||||
// everyone access fields
|
||||
verifyHsJobGeneric(job, info.getString("id"), info.getString("user"),
|
||||
|
|
|
@ -129,7 +129,7 @@ History Server REST API's.
|
|||
|
||||
** Jobs API
|
||||
|
||||
The jobs resource provides a list of the MapReduce jobs that have finished.
|
||||
The jobs resource provides a list of the MapReduce jobs that have finished. It does not currently return a full list of parameters
|
||||
|
||||
*** URI
|
||||
|
||||
|
@ -160,7 +160,9 @@ History Server REST API's.
|
|||
*** Elements of the <jobs> object
|
||||
|
||||
When you make a request for the list of jobs, the information will be returned as an array of job objects.
|
||||
See also {{Job API}} for syntax of the job object.
|
||||
See also {{Job API}} for syntax of the job object. Except this is a subset of a full job. Only startTime,
|
||||
finishTime, id, name, queue, user, state, mapsTotal, mapsCompleted, reducesTotal, and reducesCompleted are
|
||||
returned.
|
||||
|
||||
*---------------+--------------+-------------------------------+
|
||||
|| Item || Data Type || Description |
|
||||
|
@ -194,73 +196,29 @@ History Server REST API's.
|
|||
"jobs" : {
|
||||
"job" : [
|
||||
{
|
||||
"avgReduceTime" : 833,
|
||||
"failedReduceAttempts" : 0,
|
||||
"state" : "SUCCEEDED",
|
||||
"successfulReduceAttempts" : 1,
|
||||
"acls" : [
|
||||
{
|
||||
"value" : " ",
|
||||
"name" : "mapreduce.job.acl-modify-job"
|
||||
},
|
||||
{
|
||||
"value" : " ",
|
||||
"name" : "mapreduce.job.acl-view-job"
|
||||
}
|
||||
],
|
||||
"user" : "user1",
|
||||
"reducesTotal" : 1,
|
||||
"mapsCompleted" : 1,
|
||||
"startTime" : 1326381344489,
|
||||
"id" : "job_1326381300833_1_1",
|
||||
"avgMapTime" : 2671,
|
||||
"successfulMapAttempts" : 1,
|
||||
"name" : "word count",
|
||||
"avgShuffleTime" : 2540,
|
||||
"reducesCompleted" : 1,
|
||||
"diagnostics" : "",
|
||||
"failedMapAttempts" : 0,
|
||||
"avgMergeTime" : 2570,
|
||||
"killedReduceAttempts" : 0,
|
||||
"mapsTotal" : 1,
|
||||
"queue" : "default",
|
||||
"uberized" : false,
|
||||
"killedMapAttempts" : 0,
|
||||
"finishTime" : 1326381356010
|
||||
},
|
||||
{
|
||||
"avgReduceTime" : 124961,
|
||||
"failedReduceAttempts" : 0,
|
||||
"state" : "SUCCEEDED",
|
||||
"successfulReduceAttempts" : 1,
|
||||
"acls" : [
|
||||
{
|
||||
"value" : " ",
|
||||
"name" : "mapreduce.job.acl-modify-job"
|
||||
},
|
||||
{
|
||||
"value" : " ",
|
||||
"name" : "mapreduce.job.acl-view-job"
|
||||
}
|
||||
],
|
||||
"user" : "user1",
|
||||
"reducesTotal" : 1,
|
||||
"mapsCompleted" : 1,
|
||||
"startTime" : 1326381446529,
|
||||
"id" : "job_1326381300833_2_2",
|
||||
"avgMapTime" : 2638,
|
||||
"successfulMapAttempts" : 1,
|
||||
"name" : "Sleep job",
|
||||
"avgShuffleTime" : 2540,
|
||||
"reducesCompleted" : 1,
|
||||
"diagnostics" : "",
|
||||
"failedMapAttempts" : 0,
|
||||
"avgMergeTime" : 2589,
|
||||
"killedReduceAttempts" : 0,
|
||||
"mapsTotal" : 1,
|
||||
"queue" : "default",
|
||||
"uberized" : false,
|
||||
"killedMapAttempts" : 0,
|
||||
"finishTime" : 1326381582106
|
||||
}
|
||||
]
|
||||
|
@ -303,26 +261,6 @@ History Server REST API's.
|
|||
<mapsCompleted>1</mapsCompleted>
|
||||
<reducesTotal>1</reducesTotal>
|
||||
<reducesCompleted>1</reducesCompleted>
|
||||
<uberized>false</uberized>
|
||||
<diagnostics/>
|
||||
<avgMapTime>2671</avgMapTime>
|
||||
<avgReduceTime>833</avgReduceTime>
|
||||
<avgShuffleTime>2540</avgShuffleTime>
|
||||
<avgMergeTime>2570</avgMergeTime>
|
||||
<failedReduceAttempts>0</failedReduceAttempts>
|
||||
<killedReduceAttempts>0</killedReduceAttempts>
|
||||
<successfulReduceAttempts>1</successfulReduceAttempts>
|
||||
<failedMapAttempts>0</failedMapAttempts>
|
||||
<killedMapAttempts>0</killedMapAttempts>
|
||||
<successfulMapAttempts>1</successfulMapAttempts>
|
||||
<acls>
|
||||
<name>mapreduce.job.acl-modify-job</name>
|
||||
<value> </value>
|
||||
</acls>
|
||||
<acls>
|
||||
<name>mapreduce.job.acl-view-job</name>
|
||||
<value> </value>
|
||||
</acls>
|
||||
</job>
|
||||
<job>
|
||||
<startTime>1326381446529</startTime>
|
||||
|
@ -336,26 +274,6 @@ History Server REST API's.
|
|||
<mapsCompleted>1</mapsCompleted>
|
||||
<reducesTotal>1</reducesTotal>
|
||||
<reducesCompleted>1</reducesCompleted>
|
||||
<uberized>false</uberized>
|
||||
<diagnostics/>
|
||||
<avgMapTime>2638</avgMapTime>
|
||||
<avgReduceTime>124961</avgReduceTime>
|
||||
<avgShuffleTime>2540</avgShuffleTime>
|
||||
<avgMergeTime>2589</avgMergeTime>
|
||||
<failedReduceAttempts>0</failedReduceAttempts>
|
||||
<killedReduceAttempts>0</killedReduceAttempts>
|
||||
<successfulReduceAttempts>1</successfulReduceAttempts>
|
||||
<failedMapAttempts>0</failedMapAttempts>
|
||||
<killedMapAttempts>0</killedMapAttempts>
|
||||
<successfulMapAttempts>1</successfulMapAttempts>
|
||||
<acls>
|
||||
<name>mapreduce.job.acl-modify-job</name>
|
||||
<value> </value>
|
||||
</acls>
|
||||
<acls>
|
||||
<name>mapreduce.job.acl-view-job</name>
|
||||
<value> </value>
|
||||
</acls>
|
||||
</job>
|
||||
</jobs>
|
||||
+---+
|
||||
|
|
Loading…
Reference in New Issue