svn merge -c 1489271 FIXES: MAPREDUCE-5283. Over 10 different tests have near identical implementations of AppContext. Contributed by Sandy Ryza
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1489272 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
fa6ec826eb
commit
3b6035ba83
|
@ -142,6 +142,9 @@ Release 2.1.0-beta - UNRELEASED
|
||||||
MAPREDUCE-5300. Fix backward incompatibility for
|
MAPREDUCE-5300. Fix backward incompatibility for
|
||||||
o.a.h.mapreduce.filecache.DistributedCache. (Zhijie Shen via acmurthy)
|
o.a.h.mapreduce.filecache.DistributedCache. (Zhijie Shen via acmurthy)
|
||||||
|
|
||||||
|
MAPREDUCE-5283. Over 10 different tests have near identical
|
||||||
|
implementations of AppContext (Sandy Ryza via jlowe)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
MAPREDUCE-4974. Optimising the LineRecordReader initialize() method
|
MAPREDUCE-4974. Optimising the LineRecordReader initialize() method
|
||||||
|
|
|
@ -0,0 +1,118 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.app;
|
||||||
|
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.ClusterInfo;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
|
||||||
|
import com.google.common.collect.Maps;
|
||||||
|
|
||||||
|
public class MockAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
public MockAppContext(int appid) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
||||||
|
jobs = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public MockAppContext(int appid, int numTasks, int numAttempts, Path confPath) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
||||||
|
Map<JobId, Job> map = Maps.newHashMap();
|
||||||
|
Job job = MockJobs.newJob(appID, 0, numTasks, numAttempts, confPath);
|
||||||
|
map.put(job.getID(), job);
|
||||||
|
jobs = map;
|
||||||
|
}
|
||||||
|
|
||||||
|
public MockAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
||||||
|
this(appid, numJobs, numTasks, numAttempts, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public MockAppContext(int appid, int numJobs, int numTasks, int numAttempts,
|
||||||
|
boolean hasFailedTasks) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
||||||
|
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts, hasFailedTasks);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ClusterInfo getClusterInfo() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -28,16 +28,12 @@ import java.util.Map.Entry;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.webapp.WebApps;
|
import org.apache.hadoop.yarn.webapp.WebApps;
|
||||||
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
|
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -46,103 +42,34 @@ import com.google.inject.Injector;
|
||||||
|
|
||||||
public class TestAMWebApp {
|
public class TestAMWebApp {
|
||||||
|
|
||||||
static class TestAppContext implements AppContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> jobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext() {
|
|
||||||
this(0, 1, 1, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return jobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return jobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test public void testAppControllerIndex() {
|
@Test public void testAppControllerIndex() {
|
||||||
TestAppContext ctx = new TestAppContext();
|
AppContext ctx = new MockAppContext(0, 1, 1, 1);
|
||||||
Injector injector = WebAppTests.createMockInjector(AppContext.class, ctx);
|
Injector injector = WebAppTests.createMockInjector(AppContext.class, ctx);
|
||||||
AppController controller = injector.getInstance(AppController.class);
|
AppController controller = injector.getInstance(AppController.class);
|
||||||
controller.index();
|
controller.index();
|
||||||
assertEquals(ctx.appID.toString(), controller.get(APP_ID,""));
|
assertEquals(ctx.getApplicationID().toString(), controller.get(APP_ID,""));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testAppView() {
|
@Test public void testAppView() {
|
||||||
WebAppTests.testPage(AppView.class, AppContext.class, new TestAppContext());
|
WebAppTests.testPage(AppView.class, AppContext.class, new MockAppContext(0, 1, 1, 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Test public void testJobView() {
|
@Test public void testJobView() {
|
||||||
AppContext appContext = new TestAppContext();
|
AppContext appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = getJobParams(appContext);
|
Map<String, String> params = getJobParams(appContext);
|
||||||
WebAppTests.testPage(JobPage.class, AppContext.class, appContext, params);
|
WebAppTests.testPage(JobPage.class, AppContext.class, appContext, params);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testTasksView() {
|
@Test public void testTasksView() {
|
||||||
AppContext appContext = new TestAppContext();
|
AppContext appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = getTaskParams(appContext);
|
Map<String, String> params = getTaskParams(appContext);
|
||||||
WebAppTests.testPage(TasksPage.class, AppContext.class, appContext, params);
|
WebAppTests.testPage(TasksPage.class, AppContext.class, appContext, params);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testTaskView() {
|
@Test public void testTaskView() {
|
||||||
AppContext appContext = new TestAppContext();
|
AppContext appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = getTaskParams(appContext);
|
Map<String, String> params = getTaskParams(appContext);
|
||||||
App app = new App(appContext);
|
App app = new App(appContext);
|
||||||
app.setJob(appContext.getAllJobs().values().iterator().next());
|
app.setJob(appContext.getAllJobs().values().iterator().next());
|
||||||
|
@ -170,18 +97,18 @@ public class TestAMWebApp {
|
||||||
|
|
||||||
@Test public void testConfView() {
|
@Test public void testConfView() {
|
||||||
WebAppTests.testPage(JobConfPage.class, AppContext.class,
|
WebAppTests.testPage(JobConfPage.class, AppContext.class,
|
||||||
new TestAppContext());
|
new MockAppContext(0, 1, 1, 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testCountersView() {
|
@Test public void testCountersView() {
|
||||||
AppContext appContext = new TestAppContext();
|
AppContext appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = getJobParams(appContext);
|
Map<String, String> params = getJobParams(appContext);
|
||||||
WebAppTests.testPage(CountersPage.class, AppContext.class,
|
WebAppTests.testPage(CountersPage.class, AppContext.class,
|
||||||
appContext, params);
|
appContext, params);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testSingleCounterView() {
|
@Test public void testSingleCounterView() {
|
||||||
AppContext appContext = new TestAppContext();
|
AppContext appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
Job job = appContext.getAllJobs().values().iterator().next();
|
Job job = appContext.getAllJobs().values().iterator().next();
|
||||||
// add a failed task to the job without any counters
|
// add a failed task to the job without any counters
|
||||||
Task failedTask = MockJobs.newTask(job.getID(), 2, 1, true);
|
Task failedTask = MockJobs.newTask(job.getID(), 2, 1, true);
|
||||||
|
@ -196,14 +123,14 @@ public class TestAMWebApp {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testTaskCountersView() {
|
@Test public void testTaskCountersView() {
|
||||||
AppContext appContext = new TestAppContext();
|
AppContext appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = getTaskParams(appContext);
|
Map<String, String> params = getTaskParams(appContext);
|
||||||
WebAppTests.testPage(CountersPage.class, AppContext.class,
|
WebAppTests.testPage(CountersPage.class, AppContext.class,
|
||||||
appContext, params);
|
appContext, params);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testSingleTaskCounterView() {
|
@Test public void testSingleTaskCounterView() {
|
||||||
AppContext appContext = new TestAppContext(0, 1, 1, 2);
|
AppContext appContext = new MockAppContext(0, 1, 1, 2);
|
||||||
Map<String, String> params = getTaskParams(appContext);
|
Map<String, String> params = getTaskParams(appContext);
|
||||||
params.put(AMParams.COUNTER_GROUP,
|
params.put(AMParams.COUNTER_GROUP,
|
||||||
"org.apache.hadoop.mapreduce.FileSystemCounter");
|
"org.apache.hadoop.mapreduce.FileSystemCounter");
|
||||||
|
@ -222,7 +149,7 @@ public class TestAMWebApp {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void main(String[] args) {
|
public static void main(String[] args) {
|
||||||
WebApps.$for("yarn", AppContext.class, new TestAppContext(0, 8, 88, 4)).
|
WebApps.$for("yarn", AppContext.class, new MockAppContext(0, 8, 88, 4)).
|
||||||
at(58888).inDevMode().start(new AMWebApp()).joinThread();
|
at(58888).inDevMode().start(new AMWebApp()).joinThread();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,22 +23,14 @@ import static org.junit.Assert.assertTrue;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import javax.ws.rs.core.MediaType;
|
import javax.ws.rs.core.MediaType;
|
||||||
import javax.xml.parsers.DocumentBuilder;
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
import javax.xml.parsers.DocumentBuilderFactory;
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
import org.codehaus.jettison.json.JSONException;
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
@ -72,82 +64,13 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
public class TestAMWebServices extends JerseyTest {
|
public class TestAMWebServices extends JerseyTest {
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
private static TestAppContext appContext;
|
private static AppContext appContext;
|
||||||
|
|
||||||
static class TestAppContext implements AppContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> jobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext() {
|
|
||||||
this(0, 1, 1, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return jobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return jobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
|
||||||
appContext = new TestAppContext();
|
appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
bind(JAXBContextResolver.class);
|
bind(JAXBContextResolver.class);
|
||||||
bind(AMWebServices.class);
|
bind(AMWebServices.class);
|
||||||
bind(GenericExceptionHandler.class);
|
bind(GenericExceptionHandler.class);
|
||||||
|
@ -318,7 +241,7 @@ public class TestAMWebServices extends JerseyTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void verifyAMInfo(JSONObject info, TestAppContext ctx)
|
public void verifyAMInfo(JSONObject info, AppContext ctx)
|
||||||
throws JSONException {
|
throws JSONException {
|
||||||
assertEquals("incorrect number of elements", 5, info.length());
|
assertEquals("incorrect number of elements", 5, info.length());
|
||||||
|
|
||||||
|
@ -327,7 +250,7 @@ public class TestAMWebServices extends JerseyTest {
|
||||||
info.getLong("elapsedTime"));
|
info.getLong("elapsedTime"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void verifyAMInfoXML(String xml, TestAppContext ctx)
|
public void verifyAMInfoXML(String xml, AppContext ctx)
|
||||||
throws JSONException, Exception {
|
throws JSONException, Exception {
|
||||||
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
DocumentBuilder db = dbf.newDocumentBuilder();
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
@ -348,7 +271,7 @@ public class TestAMWebServices extends JerseyTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void verifyAMInfoGeneric(TestAppContext ctx, String id, String user,
|
public void verifyAMInfoGeneric(AppContext ctx, String id, String user,
|
||||||
String name, long startedOn, long elapsedTime) {
|
String name, long startedOn, long elapsedTime) {
|
||||||
|
|
||||||
WebServicesTestUtils.checkStringMatch("id", ctx.getApplicationID()
|
WebServicesTestUtils.checkStringMatch("id", ctx.getApplicationID()
|
||||||
|
|
|
@ -36,16 +36,11 @@ import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.util.ConverterUtils;
|
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
@ -82,82 +77,13 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
public class TestAMWebServicesAttempts extends JerseyTest {
|
public class TestAMWebServicesAttempts extends JerseyTest {
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
private static TestAppContext appContext;
|
private static AppContext appContext;
|
||||||
|
|
||||||
static class TestAppContext implements AppContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> jobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext() {
|
|
||||||
this(0, 1, 2, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return jobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return jobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
|
||||||
appContext = new TestAppContext();
|
appContext = new MockAppContext(0, 1, 2, 1);
|
||||||
bind(JAXBContextResolver.class);
|
bind(JAXBContextResolver.class);
|
||||||
bind(AMWebServices.class);
|
bind(AMWebServices.class);
|
||||||
bind(GenericExceptionHandler.class);
|
bind(GenericExceptionHandler.class);
|
||||||
|
|
|
@ -40,6 +40,7 @@ import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
@ -81,79 +82,11 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
public class TestAMWebServicesJobConf extends JerseyTest {
|
public class TestAMWebServicesJobConf extends JerseyTest {
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
private static TestAppContext appContext;
|
private static AppContext appContext;
|
||||||
|
|
||||||
private static File testConfDir = new File("target",
|
private static File testConfDir = new File("target",
|
||||||
TestAMWebServicesJobConf.class.getSimpleName() + "confDir");
|
TestAMWebServicesJobConf.class.getSimpleName() + "confDir");
|
||||||
|
|
||||||
static class TestAppContext implements AppContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> jobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numTasks, int numAttempts, Path confPath) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
Map<JobId, Job> map = Maps.newHashMap();
|
|
||||||
Job job = MockJobs.newJob(appID, 0, numTasks, numAttempts, confPath);
|
|
||||||
map.put(job.getID(), job);
|
|
||||||
jobs = map;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return jobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return jobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
@ -181,7 +114,7 @@ public class TestAMWebServicesJobConf extends JerseyTest {
|
||||||
fail("error creating config file: " + e.getMessage());
|
fail("error creating config file: " + e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
appContext = new TestAppContext(0, 2, 1, confPath);
|
appContext = new MockAppContext(0, 2, 1, confPath);
|
||||||
|
|
||||||
bind(JAXBContextResolver.class);
|
bind(JAXBContextResolver.class);
|
||||||
bind(AMWebServices.class);
|
bind(AMWebServices.class);
|
||||||
|
|
|
@ -38,16 +38,11 @@ import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.security.authorize.AccessControlList;
|
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.util.Times;
|
import org.apache.hadoop.yarn.util.Times;
|
||||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
@ -85,82 +80,13 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
public class TestAMWebServicesJobs extends JerseyTest {
|
public class TestAMWebServicesJobs extends JerseyTest {
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
private static TestAppContext appContext;
|
private static AppContext appContext;
|
||||||
|
|
||||||
static class TestAppContext implements AppContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> jobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext() {
|
|
||||||
this(0, 1, 2, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return jobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return jobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
|
||||||
appContext = new TestAppContext();
|
appContext = new MockAppContext(0, 1, 2, 1);
|
||||||
bind(JAXBContextResolver.class);
|
bind(JAXBContextResolver.class);
|
||||||
bind(AMWebServices.class);
|
bind(AMWebServices.class);
|
||||||
bind(GenericExceptionHandler.class);
|
bind(GenericExceptionHandler.class);
|
||||||
|
@ -545,7 +471,7 @@ public class TestAMWebServicesJobs extends JerseyTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void verifyAMJobXML(NodeList nodes, TestAppContext appContext) {
|
public void verifyAMJobXML(NodeList nodes, AppContext appContext) {
|
||||||
|
|
||||||
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||||
|
|
||||||
|
|
|
@ -35,15 +35,10 @@ import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
import org.codehaus.jettison.json.JSONArray;
|
import org.codehaus.jettison.json.JSONArray;
|
||||||
|
@ -79,82 +74,13 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
public class TestAMWebServicesTasks extends JerseyTest {
|
public class TestAMWebServicesTasks extends JerseyTest {
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
private static TestAppContext appContext;
|
private static AppContext appContext;
|
||||||
|
|
||||||
static class TestAppContext implements AppContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> jobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext() {
|
|
||||||
this(0, 1, 2, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return jobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return jobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
|
||||||
appContext = new TestAppContext();
|
appContext = new MockAppContext(0, 1, 2, 1);
|
||||||
bind(JAXBContextResolver.class);
|
bind(JAXBContextResolver.class);
|
||||||
bind(AMWebServices.class);
|
bind(AMWebServices.class);
|
||||||
bind(GenericExceptionHandler.class);
|
bind(GenericExceptionHandler.class);
|
||||||
|
|
|
@ -0,0 +1,112 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.hs;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs.JobsPair;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
|
||||||
|
import org.apache.hadoop.yarn.YarnException;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
|
||||||
|
public class MockHistoryContext extends MockAppContext implements HistoryContext {
|
||||||
|
|
||||||
|
private final Map<JobId, Job> partialJobs;
|
||||||
|
private final Map<JobId, Job> fullJobs;
|
||||||
|
|
||||||
|
public MockHistoryContext(int numJobs, int numTasks, int numAttempts) {
|
||||||
|
super(0);
|
||||||
|
JobsPair jobs;
|
||||||
|
try {
|
||||||
|
jobs = MockHistoryJobs.newHistoryJobs(numJobs, numTasks, numAttempts);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new YarnException(e);
|
||||||
|
}
|
||||||
|
partialJobs = jobs.partial;
|
||||||
|
fullJobs = jobs.full;
|
||||||
|
}
|
||||||
|
|
||||||
|
public MockHistoryContext(int appid, int numJobs, int numTasks,
|
||||||
|
int numAttempts) {
|
||||||
|
super(appid);
|
||||||
|
JobsPair jobs;
|
||||||
|
try {
|
||||||
|
jobs = MockHistoryJobs.newHistoryJobs(getApplicationID(), numJobs, numTasks,
|
||||||
|
numAttempts);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new YarnException(e);
|
||||||
|
}
|
||||||
|
partialJobs = jobs.partial;
|
||||||
|
fullJobs = jobs.full;
|
||||||
|
}
|
||||||
|
|
||||||
|
public MockHistoryContext(int appid, int numTasks, int numAttempts, Path confPath) {
|
||||||
|
super(appid, numTasks, numAttempts, confPath);
|
||||||
|
fullJobs = super.getAllJobs();
|
||||||
|
partialJobs = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public MockHistoryContext(int appid, int numJobs, int numTasks, int numAttempts,
|
||||||
|
boolean hasFailedTasks) {
|
||||||
|
super(appid);
|
||||||
|
JobsPair jobs;
|
||||||
|
try {
|
||||||
|
jobs = MockHistoryJobs.newHistoryJobs(getApplicationID(), numJobs, numTasks,
|
||||||
|
numAttempts, hasFailedTasks);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new YarnException(e);
|
||||||
|
}
|
||||||
|
partialJobs = jobs.partial;
|
||||||
|
fullJobs = jobs.full;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return fullJobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Job getPartialJob(JobId jobID) {
|
||||||
|
return partialJobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return fullJobs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs(ApplicationId appID) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public JobsInfo getPartialJobs(Long offset, Long count, String user,
|
||||||
|
String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
|
||||||
|
JobState jobState) {
|
||||||
|
return CachedHistoryStorage.getPartialJobs(this.partialJobs.values(),
|
||||||
|
offset, count, user, queue, sBegin, sEnd, fBegin, fEnd, jobState);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -41,16 +41,11 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MRApp;
|
import org.apache.hadoop.mapreduce.v2.app.MRApp;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockAppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.webapp.TestAMWebApp;
|
import org.apache.hadoop.mapreduce.v2.app.webapp.TestAMWebApp;
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage;
|
import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage;
|
||||||
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
|
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -61,92 +56,17 @@ import com.google.inject.Injector;
|
||||||
public class TestHSWebApp {
|
public class TestHSWebApp {
|
||||||
private static final Log LOG = LogFactory.getLog(TestHSWebApp.class);
|
private static final Log LOG = LogFactory.getLog(TestHSWebApp.class);
|
||||||
|
|
||||||
static class TestAppContext implements AppContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> jobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts,
|
|
||||||
boolean hasFailedTasks) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts,
|
|
||||||
hasFailedTasks);
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
|
||||||
this(appid, numJobs, numTasks, numAttempts, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext() {
|
|
||||||
this(0, 1, 1, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return jobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return jobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test public void testAppControllerIndex() {
|
@Test public void testAppControllerIndex() {
|
||||||
TestAppContext ctx = new TestAppContext();
|
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
|
||||||
Injector injector = WebAppTests.createMockInjector(AppContext.class, ctx);
|
Injector injector = WebAppTests.createMockInjector(AppContext.class, ctx);
|
||||||
HsController controller = injector.getInstance(HsController.class);
|
HsController controller = injector.getInstance(HsController.class);
|
||||||
controller.index();
|
controller.index();
|
||||||
assertEquals(ctx.appID.toString(), controller.get(APP_ID,""));
|
assertEquals(ctx.getApplicationID().toString(), controller.get(APP_ID,""));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testJobView() {
|
@Test public void testJobView() {
|
||||||
LOG.info("HsJobPage");
|
LOG.info("HsJobPage");
|
||||||
AppContext appContext = new TestAppContext();
|
AppContext appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
|
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
|
||||||
WebAppTests.testPage(HsJobPage.class, AppContext.class, appContext, params);
|
WebAppTests.testPage(HsJobPage.class, AppContext.class, appContext, params);
|
||||||
}
|
}
|
||||||
|
@ -154,7 +74,7 @@ public class TestHSWebApp {
|
||||||
@Test
|
@Test
|
||||||
public void testTasksView() {
|
public void testTasksView() {
|
||||||
LOG.info("HsTasksPage");
|
LOG.info("HsTasksPage");
|
||||||
AppContext appContext = new TestAppContext();
|
AppContext appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
|
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
|
||||||
WebAppTests.testPage(HsTasksPage.class, AppContext.class, appContext,
|
WebAppTests.testPage(HsTasksPage.class, AppContext.class, appContext,
|
||||||
params);
|
params);
|
||||||
|
@ -163,7 +83,7 @@ public class TestHSWebApp {
|
||||||
@Test
|
@Test
|
||||||
public void testTaskView() {
|
public void testTaskView() {
|
||||||
LOG.info("HsTaskPage");
|
LOG.info("HsTaskPage");
|
||||||
AppContext appContext = new TestAppContext();
|
AppContext appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
|
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
|
||||||
WebAppTests
|
WebAppTests
|
||||||
.testPage(HsTaskPage.class, AppContext.class, appContext, params);
|
.testPage(HsTaskPage.class, AppContext.class, appContext, params);
|
||||||
|
@ -171,7 +91,7 @@ public class TestHSWebApp {
|
||||||
|
|
||||||
@Test public void testAttemptsWithJobView() {
|
@Test public void testAttemptsWithJobView() {
|
||||||
LOG.info("HsAttemptsPage with data");
|
LOG.info("HsAttemptsPage with data");
|
||||||
TestAppContext ctx = new TestAppContext();
|
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
|
||||||
JobId id = ctx.getAllJobs().keySet().iterator().next();
|
JobId id = ctx.getAllJobs().keySet().iterator().next();
|
||||||
Map<String, String> params = new HashMap<String,String>();
|
Map<String, String> params = new HashMap<String,String>();
|
||||||
params.put(JOB_ID, id.toString());
|
params.put(JOB_ID, id.toString());
|
||||||
|
@ -183,7 +103,7 @@ public class TestHSWebApp {
|
||||||
|
|
||||||
@Test public void testAttemptsView() {
|
@Test public void testAttemptsView() {
|
||||||
LOG.info("HsAttemptsPage");
|
LOG.info("HsAttemptsPage");
|
||||||
AppContext appContext = new TestAppContext();
|
AppContext appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
|
Map<String, String> params = TestAMWebApp.getTaskParams(appContext);
|
||||||
WebAppTests.testPage(HsAttemptsPage.class, AppContext.class,
|
WebAppTests.testPage(HsAttemptsPage.class, AppContext.class,
|
||||||
appContext, params);
|
appContext, params);
|
||||||
|
@ -192,18 +112,18 @@ public class TestHSWebApp {
|
||||||
@Test public void testConfView() {
|
@Test public void testConfView() {
|
||||||
LOG.info("HsConfPage");
|
LOG.info("HsConfPage");
|
||||||
WebAppTests.testPage(HsConfPage.class, AppContext.class,
|
WebAppTests.testPage(HsConfPage.class, AppContext.class,
|
||||||
new TestAppContext());
|
new MockAppContext(0, 1, 1, 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testAboutView() {
|
@Test public void testAboutView() {
|
||||||
LOG.info("HsAboutPage");
|
LOG.info("HsAboutPage");
|
||||||
WebAppTests.testPage(HsAboutPage.class, AppContext.class,
|
WebAppTests.testPage(HsAboutPage.class, AppContext.class,
|
||||||
new TestAppContext());
|
new MockAppContext(0, 1, 1, 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test public void testJobCounterView() {
|
@Test public void testJobCounterView() {
|
||||||
LOG.info("JobCounterView");
|
LOG.info("JobCounterView");
|
||||||
AppContext appContext = new TestAppContext();
|
AppContext appContext = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
|
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
|
||||||
WebAppTests.testPage(HsCountersPage.class, AppContext.class,
|
WebAppTests.testPage(HsCountersPage.class, AppContext.class,
|
||||||
appContext, params);
|
appContext, params);
|
||||||
|
@ -211,7 +131,7 @@ public class TestHSWebApp {
|
||||||
|
|
||||||
@Test public void testJobCounterViewForKilledJob() {
|
@Test public void testJobCounterViewForKilledJob() {
|
||||||
LOG.info("JobCounterViewForKilledJob");
|
LOG.info("JobCounterViewForKilledJob");
|
||||||
AppContext appContext = new TestAppContext(0, 1, 1, 1, true);
|
AppContext appContext = new MockAppContext(0, 1, 1, 1, true);
|
||||||
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
|
Map<String, String> params = TestAMWebApp.getJobParams(appContext);
|
||||||
WebAppTests.testPage(HsCountersPage.class, AppContext.class,
|
WebAppTests.testPage(HsCountersPage.class, AppContext.class,
|
||||||
appContext, params);
|
appContext, params);
|
||||||
|
@ -220,7 +140,7 @@ public class TestHSWebApp {
|
||||||
@Test public void testSingleCounterView() {
|
@Test public void testSingleCounterView() {
|
||||||
LOG.info("HsSingleCounterPage");
|
LOG.info("HsSingleCounterPage");
|
||||||
WebAppTests.testPage(HsSingleCounterPage.class, AppContext.class,
|
WebAppTests.testPage(HsSingleCounterPage.class, AppContext.class,
|
||||||
new TestAppContext());
|
new MockAppContext(0, 1, 1, 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -228,7 +148,7 @@ public class TestHSWebApp {
|
||||||
LOG.info("HsLogsPage");
|
LOG.info("HsLogsPage");
|
||||||
Injector injector =
|
Injector injector =
|
||||||
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class,
|
WebAppTests.testPage(AggregatedLogsPage.class, AppContext.class,
|
||||||
new TestAppContext());
|
new MockAppContext(0, 1, 1, 1));
|
||||||
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
|
PrintWriter spyPw = WebAppTests.getPrintWriter(injector);
|
||||||
verify(spyPw).write("Cannot get container logs without a ContainerId");
|
verify(spyPw).write("Cannot get container logs without a ContainerId");
|
||||||
verify(spyPw).write("Cannot get container logs without a NodeId");
|
verify(spyPw).write("Cannot get container logs without a NodeId");
|
||||||
|
@ -238,7 +158,7 @@ public class TestHSWebApp {
|
||||||
@Test
|
@Test
|
||||||
public void testLogsView2() throws IOException {
|
public void testLogsView2() throws IOException {
|
||||||
LOG.info("HsLogsPage with data");
|
LOG.info("HsLogsPage with data");
|
||||||
TestAppContext ctx = new TestAppContext();
|
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = new HashMap<String, String>();
|
Map<String, String> params = new HashMap<String, String>();
|
||||||
|
|
||||||
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
|
params.put(CONTAINER_ID, MRApp.newContainerId(1, 1, 333, 1)
|
||||||
|
@ -260,7 +180,7 @@ public class TestHSWebApp {
|
||||||
@Test
|
@Test
|
||||||
public void testLogsViewSingle() throws IOException {
|
public void testLogsViewSingle() throws IOException {
|
||||||
LOG.info("HsLogsPage with params for single log and data limits");
|
LOG.info("HsLogsPage with params for single log and data limits");
|
||||||
TestAppContext ctx = new TestAppContext();
|
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = new HashMap<String, String>();
|
Map<String, String> params = new HashMap<String, String>();
|
||||||
|
|
||||||
final Configuration conf = new YarnConfiguration();
|
final Configuration conf = new YarnConfiguration();
|
||||||
|
@ -295,7 +215,7 @@ public class TestHSWebApp {
|
||||||
@Test
|
@Test
|
||||||
public void testLogsViewBadStartEnd() throws IOException {
|
public void testLogsViewBadStartEnd() throws IOException {
|
||||||
LOG.info("HsLogsPage with bad start/end params");
|
LOG.info("HsLogsPage with bad start/end params");
|
||||||
TestAppContext ctx = new TestAppContext();
|
MockAppContext ctx = new MockAppContext(0, 1, 1, 1);
|
||||||
Map<String, String> params = new HashMap<String, String>();
|
Map<String, String> params = new HashMap<String, String>();
|
||||||
|
|
||||||
params.put("start", "foo");
|
params.put("start", "foo");
|
||||||
|
|
|
@ -22,28 +22,18 @@ import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import javax.ws.rs.core.MediaType;
|
import javax.ws.rs.core.MediaType;
|
||||||
import javax.xml.parsers.DocumentBuilder;
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
import javax.xml.parsers.DocumentBuilderFactory;
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
|
import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
|
import org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
|
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
|
||||||
import org.apache.hadoop.util.VersionInfo;
|
import org.apache.hadoop.util.VersionInfo;
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
import org.apache.hadoop.yarn.webapp.WebApp;
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
@ -77,97 +67,14 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
public class TestHsWebServices extends JerseyTest {
|
public class TestHsWebServices extends JerseyTest {
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
private static TestAppContext appContext;
|
private static HistoryContext appContext;
|
||||||
private static HsWebApp webApp;
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
static class TestAppContext implements HistoryContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> jobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext() {
|
|
||||||
this(0, 1, 1, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return jobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return jobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs(ApplicationId appID) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public JobsInfo getPartialJobs(Long offset, Long count, String user,
|
|
||||||
String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
|
|
||||||
JobState jobState) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
|
||||||
appContext = new TestAppContext();
|
appContext = new MockHistoryContext(0, 1, 1, 1);
|
||||||
JobHistory jobHistoryService = new JobHistory();
|
JobHistory jobHistoryService = new JobHistory();
|
||||||
HistoryContext historyContext = (HistoryContext) jobHistoryService;
|
HistoryContext historyContext = (HistoryContext) jobHistoryService;
|
||||||
webApp = new HsWebApp(historyContext);
|
webApp = new HsWebApp(historyContext);
|
||||||
|
@ -356,7 +263,7 @@ public class TestHsWebServices extends JerseyTest {
|
||||||
JobHistoryServer.historyServerTimeStamp, startedon);
|
JobHistoryServer.historyServerTimeStamp, startedon);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void verifyHSInfo(JSONObject info, TestAppContext ctx)
|
public void verifyHSInfo(JSONObject info, AppContext ctx)
|
||||||
throws JSONException {
|
throws JSONException {
|
||||||
assertEquals("incorrect number of elements", 4, info.length());
|
assertEquals("incorrect number of elements", 4, info.length());
|
||||||
|
|
||||||
|
@ -365,7 +272,7 @@ public class TestHsWebServices extends JerseyTest {
|
||||||
info.getLong("startedOn"));
|
info.getLong("startedOn"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void verifyHSInfoXML(String xml, TestAppContext ctx)
|
public void verifyHSInfoXML(String xml, AppContext ctx)
|
||||||
throws JSONException, Exception {
|
throws JSONException, Exception {
|
||||||
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
DocumentBuilder db = dbf.newDocumentBuilder();
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
|
|
@ -35,22 +35,15 @@ import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
|
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.util.ConverterUtils;
|
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
import org.apache.hadoop.yarn.webapp.WebApp;
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
|
@ -89,97 +82,14 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
public class TestHsWebServicesAttempts extends JerseyTest {
|
public class TestHsWebServicesAttempts extends JerseyTest {
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
private static TestAppContext appContext;
|
private static HistoryContext appContext;
|
||||||
private static HsWebApp webApp;
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
static class TestAppContext implements HistoryContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> jobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext() {
|
|
||||||
this(0, 1, 2, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return jobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return jobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs(ApplicationId appID) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public JobsInfo getPartialJobs(Long offset, Long count, String user,
|
|
||||||
String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
|
|
||||||
JobState jobState) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
|
||||||
appContext = new TestAppContext();
|
appContext = new MockHistoryContext(0, 1, 2, 1);
|
||||||
webApp = mock(HsWebApp.class);
|
webApp = mock(HsWebApp.class);
|
||||||
when(webApp.name()).thenReturn("hsmockwebapp");
|
when(webApp.name()).thenReturn("hsmockwebapp");
|
||||||
|
|
||||||
|
|
|
@ -41,18 +41,11 @@ import org.apache.hadoop.fs.FileUtil;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
|
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
import org.apache.hadoop.yarn.webapp.WebApp;
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
@ -67,7 +60,6 @@ import org.w3c.dom.Element;
|
||||||
import org.w3c.dom.NodeList;
|
import org.w3c.dom.NodeList;
|
||||||
import org.xml.sax.InputSource;
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
import com.google.common.collect.Maps;
|
|
||||||
import com.google.inject.Guice;
|
import com.google.inject.Guice;
|
||||||
import com.google.inject.Injector;
|
import com.google.inject.Injector;
|
||||||
import com.google.inject.servlet.GuiceServletContextListener;
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
@ -87,94 +79,12 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
public class TestHsWebServicesJobConf extends JerseyTest {
|
public class TestHsWebServicesJobConf extends JerseyTest {
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
private static TestAppContext appContext;
|
private static HistoryContext appContext;
|
||||||
private static HsWebApp webApp;
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
private static File testConfDir = new File("target",
|
private static File testConfDir = new File("target",
|
||||||
TestHsWebServicesJobConf.class.getSimpleName() + "confDir");
|
TestHsWebServicesJobConf.class.getSimpleName() + "confDir");
|
||||||
|
|
||||||
static class TestAppContext implements HistoryContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> jobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numTasks, int numAttempts, Path confPath) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
Map<JobId, Job> map = Maps.newHashMap();
|
|
||||||
Job job = MockJobs.newJob(appID, 0, numTasks, numAttempts, confPath);
|
|
||||||
map.put(job.getID(), job);
|
|
||||||
jobs = map;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return jobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return jobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs(ApplicationId appID) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public JobsInfo getPartialJobs(Long offset, Long count, String user,
|
|
||||||
String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
|
|
||||||
JobState jobState) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
@ -202,7 +112,7 @@ public class TestHsWebServicesJobConf extends JerseyTest {
|
||||||
fail("error creating config file: " + e.getMessage());
|
fail("error creating config file: " + e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
appContext = new TestAppContext(0, 2, 1, confPath);
|
appContext = new MockHistoryContext(0, 2, 1, confPath);
|
||||||
|
|
||||||
webApp = mock(HsWebApp.class);
|
webApp = mock(HsWebApp.class);
|
||||||
when(webApp.name()).thenReturn("hsmockwebapp");
|
when(webApp.name()).thenReturn("hsmockwebapp");
|
||||||
|
|
|
@ -27,7 +27,6 @@ import static org.junit.Assert.fail;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
@ -38,23 +37,12 @@ import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs;
|
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs.JobsPair;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.YarnException;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
import org.apache.hadoop.yarn.webapp.WebApp;
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
@ -91,116 +79,14 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
public class TestHsWebServicesJobs extends JerseyTest {
|
public class TestHsWebServicesJobs extends JerseyTest {
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
private static TestAppContext appContext;
|
private static MockHistoryContext appContext;
|
||||||
private static HsWebApp webApp;
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
static class TestAppContext implements HistoryContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> partialJobs;
|
|
||||||
final Map<JobId, Job> fullJobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts,
|
|
||||||
boolean hasFailedTasks) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
JobsPair jobs;
|
|
||||||
try {
|
|
||||||
jobs = MockHistoryJobs.newHistoryJobs(appID, numJobs, numTasks,
|
|
||||||
numAttempts, hasFailedTasks);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new YarnException(e);
|
|
||||||
}
|
|
||||||
partialJobs = jobs.partial;
|
|
||||||
fullJobs = jobs.full;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
|
||||||
this(appid, numJobs, numTasks, numAttempts, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext() {
|
|
||||||
this(0, 1, 2, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return fullJobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Job getPartialJob(JobId jobID) {
|
|
||||||
return partialJobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return partialJobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs(ApplicationId appID) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public JobsInfo getPartialJobs(Long offset, Long count, String user,
|
|
||||||
String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
|
|
||||||
JobState jobState) {
|
|
||||||
return CachedHistoryStorage.getPartialJobs(this.partialJobs.values(),
|
|
||||||
offset, count, user, queue, sBegin, sEnd, fBegin, fEnd, jobState);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
|
||||||
appContext = new TestAppContext();
|
appContext = new MockHistoryContext(0, 1, 2, 1, false);
|
||||||
webApp = mock(HsWebApp.class);
|
webApp = mock(HsWebApp.class);
|
||||||
when(webApp.name()).thenReturn("hsmockwebapp");
|
when(webApp.name()).thenReturn("hsmockwebapp");
|
||||||
|
|
||||||
|
@ -312,7 +198,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
||||||
verifyHsJobPartialXML(job, appContext);
|
verifyHsJobPartialXML(job, appContext);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void verifyHsJobPartialXML(NodeList nodes, TestAppContext appContext) {
|
public void verifyHsJobPartialXML(NodeList nodes, MockHistoryContext appContext) {
|
||||||
|
|
||||||
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||||
|
|
||||||
|
@ -338,7 +224,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void verifyHsJobXML(NodeList nodes, TestAppContext appContext) {
|
public void verifyHsJobXML(NodeList nodes, AppContext appContext) {
|
||||||
|
|
||||||
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||||
|
|
||||||
|
@ -640,7 +526,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
|
||||||
@Test
|
@Test
|
||||||
public void testJobCountersForKilledJob() throws Exception {
|
public void testJobCountersForKilledJob() throws Exception {
|
||||||
WebResource r = resource();
|
WebResource r = resource();
|
||||||
appContext = new TestAppContext(0, 1, 1, 1, true);
|
appContext = new MockHistoryContext(0, 1, 1, 1, true);
|
||||||
injector = Guice.createInjector(new ServletModule() {
|
injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
|
|
@ -23,7 +23,6 @@ import static org.junit.Assert.assertTrue;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
@ -35,20 +34,10 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs;
|
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryJobs.JobsPair;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.YarnException;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
import org.apache.hadoop.yarn.webapp.WebApp;
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
@ -78,105 +67,14 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
public class TestHsWebServicesJobsQuery extends JerseyTest {
|
public class TestHsWebServicesJobsQuery extends JerseyTest {
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
private static TestAppContext appContext;
|
private static MockHistoryContext appContext;
|
||||||
private static HsWebApp webApp;
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
static class TestAppContext implements HistoryContext {
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> fullJobs;
|
|
||||||
final Map<JobId, Job> partialJobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int numJobs, int numTasks, int numAttempts) {
|
|
||||||
JobsPair jobs;
|
|
||||||
try {
|
|
||||||
jobs = MockHistoryJobs.newHistoryJobs(numJobs, numTasks, numAttempts);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new YarnException(e);
|
|
||||||
}
|
|
||||||
partialJobs = jobs.partial;
|
|
||||||
fullJobs = jobs.full;
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext() {
|
|
||||||
this(3, 2, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return fullJobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
public Job getPartialJob(JobId jobID) {
|
|
||||||
return partialJobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return partialJobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs(ApplicationId appID) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public JobsInfo getPartialJobs(Long offset, Long count, String user,
|
|
||||||
String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
|
|
||||||
JobState jobState) {
|
|
||||||
return CachedHistoryStorage.getPartialJobs(this.partialJobs.values(),
|
|
||||||
offset, count, user, queue, sBegin, sEnd, fBegin, fEnd, jobState);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
|
||||||
appContext = new TestAppContext();
|
appContext = new MockHistoryContext(3, 2, 1);
|
||||||
webApp = mock(HsWebApp.class);
|
webApp = mock(HsWebApp.class);
|
||||||
when(webApp.name()).thenReturn("hsmockwebapp");
|
when(webApp.name()).thenReturn("hsmockwebapp");
|
||||||
|
|
||||||
|
|
|
@ -34,21 +34,14 @@ import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
|
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
import org.apache.hadoop.yarn.Clock;
|
|
||||||
import org.apache.hadoop.yarn.ClusterInfo;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.event.EventHandler;
|
|
||||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
import org.apache.hadoop.yarn.webapp.WebApp;
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
@ -85,97 +78,14 @@ import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
public class TestHsWebServicesTasks extends JerseyTest {
|
public class TestHsWebServicesTasks extends JerseyTest {
|
||||||
|
|
||||||
private static Configuration conf = new Configuration();
|
private static Configuration conf = new Configuration();
|
||||||
private static TestAppContext appContext;
|
private static MockHistoryContext appContext;
|
||||||
private static HsWebApp webApp;
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
static class TestAppContext implements HistoryContext {
|
|
||||||
final ApplicationAttemptId appAttemptID;
|
|
||||||
final ApplicationId appID;
|
|
||||||
final String user = MockJobs.newUserName();
|
|
||||||
final Map<JobId, Job> jobs;
|
|
||||||
final long startTime = System.currentTimeMillis();
|
|
||||||
|
|
||||||
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
|
||||||
appID = MockJobs.newAppID(appid);
|
|
||||||
appAttemptID = ApplicationAttemptId.newInstance(appID, 0);
|
|
||||||
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
|
||||||
}
|
|
||||||
|
|
||||||
TestAppContext() {
|
|
||||||
this(0, 1, 2, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationAttemptId getApplicationAttemptId() {
|
|
||||||
return appAttemptID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ApplicationId getApplicationID() {
|
|
||||||
return appID;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CharSequence getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Job getJob(JobId jobID) {
|
|
||||||
return jobs.get(jobID);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs() {
|
|
||||||
return jobs; // OK
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("rawtypes")
|
|
||||||
@Override
|
|
||||||
public EventHandler getEventHandler() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Clock getClock() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getApplicationName() {
|
|
||||||
return "TestApp";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public long getStartTime() {
|
|
||||||
return startTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ClusterInfo getClusterInfo() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<JobId, Job> getAllJobs(ApplicationId appID) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public JobsInfo getPartialJobs(Long offset, Long count, String user,
|
|
||||||
String queue, Long sBegin, Long sEnd, Long fBegin, Long fEnd,
|
|
||||||
JobState jobState) {
|
|
||||||
// TODO Auto-generated method stub
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
@Override
|
@Override
|
||||||
protected void configureServlets() {
|
protected void configureServlets() {
|
||||||
|
|
||||||
appContext = new TestAppContext();
|
appContext = new MockHistoryContext(0, 1, 2, 1);
|
||||||
webApp = mock(HsWebApp.class);
|
webApp = mock(HsWebApp.class);
|
||||||
when(webApp.name()).thenReturn("hsmockwebapp");
|
when(webApp.name()).thenReturn("hsmockwebapp");
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue