MAPREDUCE-2677. Fixed 404 for some links from HistoryServer. Contributed by Robert Evans.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1166901 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Arun Murthy 2011-09-08 20:59:34 +00:00
parent dd34c4e24f
commit b3c9c3c182
22 changed files with 861 additions and 60 deletions

View File

@ -1236,6 +1236,9 @@ Release 0.23.0 - Unreleased
MAPREDUCE-2844. Fixed display of nodes in UI. (Ravi Teja Ch N V via
acmurthy)
MAPREDUCE-2677. Fixed 404 for some links from HistoryServer. (Robert Evans
via acmurthy)
Release 0.22.0 - Unreleased
INCOMPATIBLE CHANGES

View File

@ -28,11 +28,27 @@
@RequestScoped
public class App {
final AppContext context;
Job job;
Task task;
private Job job;
private Task task;
@Inject
App(AppContext ctx) {
context = ctx;
}
void setJob(Job job) {
this.job = job;
}
public Job getJob() {
return job;
}
void setTask(Task task) {
this.task = task;
}
public Task getTask() {
return task;
}
}

View File

@ -31,9 +31,13 @@
import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.Controller;
import org.apache.hadoop.yarn.webapp.View;
import com.google.inject.Inject;
/**
* This class renders the various pages that the web app supports.
*/
public class AppController extends Controller implements AMParams {
final App app;
@ -50,10 +54,16 @@ protected AppController(App app, Configuration conf, RequestContext ctx) {
this(app, conf, ctx, "am");
}
/**
* Render the default(index.html) page for the Application Controller
*/
@Override public void index() {
setTitle(join("MapReduce Application ", $(APP_ID)));
}
/**
* Render the /info page with an overview of current application.
*/
public void info() {
info("Application Master Overview").
_("Application ID:", $(APP_ID)).
@ -65,22 +75,52 @@ public void info() {
render(InfoPage.class);
}
/**
* @return The class that will render the /job page
*/
protected Class<? extends View> jobPage() {
return JobPage.class;
}
/**
* Render the /job page
*/
public void job() {
requireJob();
render(JobPage.class);
render(jobPage());
}
/**
* @return the class that will render the /jobcounters page
*/
protected Class<? extends View> countersPage() {
return CountersPage.class;
}
/**
* Render the /jobcounters page
*/
public void jobCounters() {
requireJob();
if (app.job != null) {
if (app.getJob() != null) {
setTitle(join("Counters for ", $(JOB_ID)));
}
render(CountersPage.class);
render(countersPage());
}
/**
* @return the class that will render the /tasks page
*/
protected Class<? extends View> tasksPage() {
return TasksPage.class;
}
/**
* Render the /tasks page
*/
public void tasks() {
requireJob();
if (app.job != null) {
if (app.getJob() != null) {
try {
String tt = $(TASK_TYPE);
tt = tt.isEmpty() ? "All" : StringUtils.capitalize(MRApps.taskType(tt).
@ -90,20 +130,40 @@ public void tasks() {
badRequest(e.getMessage());
}
}
render(TasksPage.class);
render(tasksPage());
}
/**
* @return the class that will render the /task page
*/
protected Class<? extends View> taskPage() {
return TaskPage.class;
}
/**
* Render the /task page
*/
public void task() {
requireTask();
if (app.task != null) {
if (app.getTask() != null) {
setTitle(join("Attempts for ", $(TASK_ID)));
}
render(TaskPage.class);
render(taskPage());
}
/**
* @return the class that will render the /attempts page
*/
protected Class<? extends View> attemptsPage() {
return AttemptsPage.class;
}
/**
* Render the attempts page
*/
public void attempts() {
requireJob();
if (app.job != null) {
if (app.getJob() != null) {
try {
String taskType = $(TASK_TYPE);
if (taskType.isEmpty()) {
@ -119,27 +179,38 @@ public void attempts() {
badRequest(e.getMessage());
}
}
render(AttemptsPage.class);
render(attemptsPage());
}
/**
* Render a BAD_REQUEST error.
* @param s the error message to include.
*/
void badRequest(String s) {
setStatus(response().SC_BAD_REQUEST);
setTitle(join("Bad request: ", s));
}
/**
* Render a NOT_FOUND error.
* @param s the error message to include.
*/
void notFound(String s) {
setStatus(response().SC_NOT_FOUND);
setTitle(join("Not found: ", s));
}
/**
* Ensure that a JOB_ID was passed into the page.
*/
void requireJob() {
try {
if ($(JOB_ID).isEmpty()) {
throw new RuntimeException("missing job ID");
}
JobId jobID = MRApps.toJobID($(JOB_ID));
app.job = app.context.getJob(jobID);
if (app.job == null) {
app.setJob(app.context.getJob(jobID));
if (app.getJob() == null) {
notFound($(JOB_ID));
}
} catch (Exception e) {
@ -147,18 +218,21 @@ void requireJob() {
}
}
/**
* Ensure that a TASK_ID was passed into the page.
*/
void requireTask() {
try {
if ($(TASK_ID).isEmpty()) {
throw new RuntimeException("missing task ID");
}
TaskId taskID = MRApps.toTaskID($(TASK_ID));
app.job = app.context.getJob(taskID.getJobId());
if (app.job == null) {
app.setJob(app.context.getJob(taskID.getJobId()));
if (app.getJob() == null) {
notFound(MRApps.toString(taskID.getJobId()));
} else {
app.task = app.job.getTask(taskID);
if (app.task == null) {
app.setTask(app.getJob().getTask(taskID));
if (app.getTask() == null) {
notFound($(TASK_ID));
}
}

View File

@ -57,7 +57,7 @@ protected Collection<TaskAttempt> getTaskAttempts() {
String attemptStateStr = $(ATTEMPT_STATE);
TaskAttemptStateUI neededState = MRApps
.taskAttemptState(attemptStateStr);
for (Task task : super.app.job.getTasks(taskType).values()) {
for (Task task : super.app.getJob().getTasks(taskType).values()) {
Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
for (TaskAttempt attempt : attempts.values()) {
if (neededState.correspondsTo(attempt.getState())) {

View File

@ -45,8 +45,8 @@ public class NavBlock extends HtmlBlock {
ul().
li().a(url("app/info"), "About")._().
li().a(url("app"), "Jobs")._()._();
if (app.job != null) {
String jobid = MRApps.toString(app.job.getID());
if (app.getJob() != null) {
String jobid = MRApps.toString(app.getJob().getID());
nav.
h3("Job").
ul().

View File

@ -98,11 +98,11 @@ protected void render(Block html) {
}
protected boolean isValidRequest() {
return app.task != null;
return app.getTask() != null;
}
protected Collection<TaskAttempt> getTaskAttempts() {
return app.task.getAttempts().values();
return app.getTask().getAttempts().values();
}
}

View File

@ -42,7 +42,7 @@ public class TasksBlock extends HtmlBlock {
}
@Override protected void render(Block html) {
if (app.job == null) {
if (app.getJob() == null) {
html.
h2($(TITLE));
return;
@ -63,7 +63,7 @@ public class TasksBlock extends HtmlBlock {
th("Finish Time").
th("Elapsed Time")._()._().
tbody();
for (Task task : app.job.getTasks().values()) {
for (Task task : app.getJob().getTasks().values()) {
if (type != null && task.getType() != type) {
continue;
}

View File

@ -43,6 +43,12 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>

View File

@ -279,7 +279,8 @@ public int getTotalReduces() {
@Override
public boolean isUber() {
throw new YarnException("Not yet implemented!");
LOG.warn("isUber is not yet implemented");
return false;
}
@Override

View File

@ -62,7 +62,7 @@
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.hs.webapp.HSWebApp;
import org.apache.hadoop.mapreduce.v2.hs.webapp.HsWebApp;
import org.apache.hadoop.mapreduce.v2.jobhistory.JHConfig;
import org.apache.hadoop.mapreduce.v2.security.client.ClientHSSecurityInfo;
import org.apache.hadoop.net.NetUtils;
@ -132,7 +132,7 @@ public void start() {
}
private void initializeWebApp(Configuration conf) {
webApp = new HSWebApp(history);
webApp = new HsWebApp(history);
String bindAddress = conf.get(JHConfig.HS_WEBAPP_BIND_ADDRESS,
JHConfig.DEFAULT_HS_WEBAPP_BIND_ADDRESS);
WebApps.$for("yarn", this).at(bindAddress).start(webApp);

View File

@ -0,0 +1,52 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
/**
* A Page the shows info about the history server
*/
public class HsAboutPage extends HsView {
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.hs.webapp.HsView#preHead(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
*/
@Override protected void preHead(Page.HTML<_> html) {
commonPreHead(html);
//override the nav config from commonPReHead
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:0}");
}
/**
* The content of this page is the attempts block
* @return AttemptsBlock.class
*/
@Override protected Class<? extends SubView> content() {
info("History Server").
_("BuildVersion", VersionInfo.getBuildVersion());
return InfoBlock.class;
}
}

View File

@ -21,41 +21,123 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.app.webapp.App;
import org.apache.hadoop.mapreduce.v2.app.webapp.AppController;
import org.apache.hadoop.yarn.webapp.View;
import com.google.inject.Inject;
/**
* This class renders the various pages that the History Server WebApp supports
*/
public class HsController extends AppController {
@Inject HsController(App app, Configuration conf, RequestContext ctx) {
super(app, conf, ctx, "History");
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#index()
*/
@Override
public void index() {
// TODO Auto-generated method stub
setTitle("JobHistory");
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#jobPage()
*/
@Override
protected Class<? extends View> jobPage() {
return HsJobPage.class;
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#countersPage()
*/
@Override
protected Class<? extends View> countersPage() {
return HsCountersPage.class;
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#tasksPage()
*/
@Override
protected Class<? extends View> tasksPage() {
return HsTasksPage.class;
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#taskPage()
*/
@Override
protected Class<? extends View> taskPage() {
return HsTaskPage.class;
}
// Need all of these methods here also as Guice doesn't look into parent
// classes.
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#job()
*/
@Override
public void job() {
super.job();
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#jobCounters()
*/
@Override
public void jobCounters() {
super.jobCounters();
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#tasks()
*/
@Override
public void tasks() {
super.tasks();
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#task()
*/
@Override
public void task() {
super.task();
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.app.webapp.AppController#attempts()
*/
@Override
public void attempts() {
super.attempts();
}
/**
* @return the page about the current server.
*/
protected Class<? extends View> aboutPage() {
return HsAboutPage.class;
}
/**
* Render a page about the current server.
*/
public void about() {
render(aboutPage());
}
}

View File

@ -0,0 +1,63 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import org.apache.hadoop.mapreduce.v2.app.webapp.CountersBlock;
import org.apache.hadoop.yarn.webapp.SubView;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
/**
* Render the counters page
*/
public class HsCountersPage extends HsView {
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.hs.webapp.HsView#preHead(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
*/
@Override protected void preHead(Page.HTML<_> html) {
commonPreHead(html);
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:1}");
set(DATATABLES_SELECTOR, "#counters .dt-counters");
set(initSelector(DATATABLES),
"{bJQueryUI:true, sDom:'t', iDisplayLength:-1}");
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.yarn.webapp.view.TwoColumnLayout#postHead(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
*/
@Override protected void postHead(Page.HTML<_> html) {
html.
style("#counters, .dt-counters { table-layout: fixed }",
"#counters th { overflow: hidden; vertical-align: center }",
"#counters .dataTables_wrapper { min-height: 1em }",
"#counters .group { width: 10em }",
"#counters .name { width: 30em }");
}
/**
* The content of this page is the CountersBlock now.
* @return CountersBlock.class
*/
@Override protected Class<? extends SubView> content() {
return CountersBlock.class;
}
}

View File

@ -0,0 +1,54 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import org.apache.hadoop.mapreduce.v2.app.webapp.JobBlock;
import org.apache.hadoop.yarn.webapp.SubView;
/**
* Render a page that describes a specific job.
*/
public class HsJobPage extends HsView {
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.hs.webapp.HsView#preHead(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
*/
@Override protected void preHead(Page.HTML<_> html) {
String jobID = $(JOB_ID);
set(TITLE, jobID.isEmpty() ? "Bad request: missing job ID"
: join("MapReduce Job ", $(JOB_ID)));
commonPreHead(html);
//Override the nav config from the commonPreHead
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:1}");
}
/**
* The content of this page is the JobBlock
* @return JobBlock.class
*/
@Override protected Class<? extends SubView> content() {
return JobBlock.class;
}
}

View File

@ -0,0 +1,66 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import org.apache.hadoop.mapreduce.v2.app.webapp.App;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import com.google.inject.Inject;
/**
* The navigation block for the history server
*/
public class HsNavBlock extends HtmlBlock {
final App app;
@Inject HsNavBlock(App app) { this.app = app; }
/*
* (non-Javadoc)
* @see org.apache.hadoop.yarn.webapp.view.HtmlBlock#render(org.apache.hadoop.yarn.webapp.view.HtmlBlock.Block)
*/
@Override protected void render(Block html) {
DIV<Hamlet> nav = html.
div("#nav").
h3("Application").
ul().
li().a("about", "About")._().
li().a(url("app"), "Jobs")._()._();
if (app.getJob() != null) {
String jobid = MRApps.toString(app.getJob().getID());
nav.
h3("Job").
ul().
li().a(url("job", jobid), "Overview")._().
li().a(url("jobcounters", jobid), "Counters")._().
li().a(url("tasks", jobid, "m"), "Map tasks")._().
li().a(url("tasks", jobid, "r"), "Reduce tasks")._()._();
}
nav.
h3("Tools").
ul().
li().a("/conf", "Configuration")._().
li().a("/stacks", "Server stacks")._().
li().a("/metrics", "Server metrics")._()._()._().
div("#themeswitcher")._();
}
}

View File

@ -0,0 +1,159 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.percent;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
import java.util.Collection;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.App;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TD;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TR;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import com.google.common.base.Joiner;
import com.google.inject.Inject;
/**
* A Page the shows the status of a given task
*/
public class HsTaskPage extends HsView {
/**
* A Block of HTML that will render a given task attempt.
*/
static class AttemptsBlock extends HtmlBlock {
final App app;
@Inject
AttemptsBlock(App ctx) {
app = ctx;
}
@Override
protected void render(Block html) {
if (!isValidRequest()) {
html.
h2($(TITLE));
return;
}
TBODY<TABLE<Hamlet>> tbody = html.
table("#attempts").
thead().
tr().
th(".id", "Attempt").
th(".progress", "Progress").
th(".state", "State").
th(".node", "node").
th(".tsh", "Started").
th(".tsh", "Finished").
th(".tsh", "Elapsed").
th(".note", "Note")._()._().
tbody();
for (TaskAttempt ta : getTaskAttempts()) {
String taid = MRApps.toString(ta.getID());
String progress = percent(ta.getProgress());
ContainerId containerId = ta.getAssignedContainerID();
String nodeHttpAddr = ta.getNodeHttpAddress();
long startTime = ta.getLaunchTime();
long finishTime = ta.getFinishTime();
long elapsed = Times.elapsed(startTime, finishTime);
TD<TR<TBODY<TABLE<Hamlet>>>> nodeTd = tbody.
tr().
td(".id", taid).
td(".progress", progress).
td(".state", ta.getState().toString()).
td().
a(".nodelink", url("http://", nodeHttpAddr), nodeHttpAddr);
if (containerId != null) {
String containerIdStr = ConverterUtils.toString(containerId);
nodeTd._(" ").
a(".logslink", url("http://", nodeHttpAddr, "yarn", "containerlogs",
containerIdStr), "logs");
}
nodeTd._().
td(".ts", Times.format(startTime)).
td(".ts", Times.format(finishTime)).
td(".dt", StringUtils.formatTime(elapsed)).
td(".note", Joiner.on('\n').join(ta.getDiagnostics()))._();
}
tbody._()._();
}
/**
* @return true if this is a valid request else false.
*/
protected boolean isValidRequest() {
return app.getTask() != null;
}
/**
* @return all of the attempts to render.
*/
protected Collection<TaskAttempt> getTaskAttempts() {
return app.getTask().getAttempts().values();
}
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.hs.webapp.HsView#preHead(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
*/
@Override protected void preHead(Page.HTML<_> html) {
commonPreHead(html);
//override the nav config from commonPReHead
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:1}");
//Set up the java script and CSS for the attempts table
set(DATATABLES_ID, "attempts");
set(initID(DATATABLES, "attempts"), attemptsTableInit());
setTableStyles(html, "attempts");
}
/**
* The content of this page is the attempts block
* @return AttemptsBlock.class
*/
@Override protected Class<? extends SubView> content() {
return AttemptsBlock.class;
}
/**
* @return The end of the JS map that is the jquery datatable config for the
* attempts table.
*/
private String attemptsTableInit() {
return tableInit().append("}").toString();
}
}

View File

@ -0,0 +1,65 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
import org.apache.hadoop.mapreduce.v2.app.webapp.TasksBlock;
import org.apache.hadoop.yarn.webapp.SubView;
/**
* A page showing the tasks for a given application.
*/
public class HsTasksPage extends HsView {
/*
* (non-Javadoc)
* @see org.apache.hadoop.mapreduce.v2.hs.webapp.HsView#preHead(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
*/
@Override protected void preHead(Page.HTML<_> html) {
commonPreHead(html);
set(DATATABLES_ID, "tasks");
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:1}");
set(initID(DATATABLES, "tasks"), tasksTableInit());
setTableStyles(html, "tasks");
}
/**
* The content of this page is the TasksBlock
* @return TasksBlock.class
*/
@Override protected Class<? extends SubView> content() {
return TasksBlock.class;
}
/**
* @return the end of the JS map that is the jquery datatable configuration
* for the tasks table.
*/
private String tasksTableInit() {
return tableInit().
append(",aoColumns:[{sType:'title-numeric'},{sType:'title-numeric',").
append("bSearchable:false},null,{sType:'title-numeric'},").
append("{sType:'title-numeric'},{sType:'title-numeric'}]}").toString();
}
}

View File

@ -24,7 +24,14 @@
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
/**
* A view that should be used as the base class for all history server pages.
*/
public class HsView extends TwoColumnLayout {
/*
* (non-Javadoc)
* @see org.apache.hadoop.yarn.webapp.view.TwoColumnLayout#preHead(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
*/
@Override protected void preHead(Page.HTML<_> html) {
commonPreHead(html);
set(DATATABLES_ID, "jobs");
@ -32,10 +39,13 @@ public class HsView extends TwoColumnLayout {
setTableStyles(html, "jobs");
}
/**
* The prehead that should be common to all subclasses.
* @param html used to render.
*/
protected void commonPreHead(Page.HTML<_> html) {
//html.meta_http("refresh", "10");
set(ACCORDION_ID, "nav");
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:1}");
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:0}");
set(THEMESWITCHER_ID, "themeswitcher");
}
@ -43,17 +53,27 @@ protected void commonPreHead(Page.HTML<_> html) {
* (non-Javadoc)
* @see org.apache.hadoop.yarn.webapp.view.TwoColumnLayout#nav()
*/
@Override
protected Class<? extends SubView> nav() {
return org.apache.hadoop.mapreduce.v2.app.webapp.NavBlock.class;
return HsNavBlock.class;
}
/*
* (non-Javadoc)
* @see org.apache.hadoop.yarn.webapp.view.TwoColumnLayout#content()
*/
@Override
protected Class<? extends SubView> content() {
return JobsBlock.class;
}
//TODO We need a way to move all of the javascript/CSS that is for a subview
// into that subview.
/**
* @return The end of a javascript map that is the jquery datatable
* configuration for the jobs table. the Jobs table is assumed to be
* rendered by the class returned from {@link #content()}
*/
private String jobsTableInit() {
return tableInit().
append(",aoColumns:[{sType:'title-numeric'},").

View File

@ -25,11 +25,11 @@
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
import org.apache.hadoop.yarn.webapp.WebApp;
public class HSWebApp extends WebApp implements AMParams {
public class HsWebApp extends WebApp implements AMParams {
private HistoryContext history;
public HSWebApp(HistoryContext history) {
public HsWebApp(HistoryContext history) {
this.history = history;
}
@ -44,6 +44,7 @@ public void setup() {
route(pajoin("/attempts", JOB_ID, TASK_TYPE, ATTEMPT_STATE),
HsController.class, "attempts");
route(pajoin("/task", TASK_ID), HsController.class, "task");
route("/about", HsController.class, "about");
}
}

View File

@ -1,27 +1,126 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import org.apache.hadoop.yarn.webapp.WebApps;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.APP_ID;
import static org.junit.Assert.assertEquals;
import java.util.Map;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.yarn.Clock;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
import org.junit.Test;
import com.google.inject.Injector;
public class TestHSWebApp {
public static void main(String[] args) {
WebApps.$for("yarn").at(19888).start().joinThread();
static class TestAppContext implements AppContext {
final ApplicationAttemptId appAttemptID;
final ApplicationId appID;
final String user = MockJobs.newUserName();
final Map<JobId, Job> jobs;
final long startTime = System.currentTimeMillis();
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
appID = MockJobs.newAppID(appid);
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
}
TestAppContext() {
this(0, 1, 1, 1);
}
@Override
public ApplicationAttemptId getApplicationAttemptId() {
return appAttemptID;
}
@Override
public ApplicationId getApplicationID() {
return appID;
}
@Override
public CharSequence getUser() {
return user;
}
@Override
public Job getJob(JobId jobID) {
return jobs.get(jobID);
}
@Override
public Map<JobId, Job> getAllJobs() {
return jobs; // OK
}
@Override
public EventHandler getEventHandler() {
return null;
}
@Override
public Clock getClock() {
return null;
}
@Override
public String getApplicationName() {
return "TestApp";
}
@Override
public long getStartTime() {
return startTime;
}
}
@Test public void testAppControllerIndex() {
TestAppContext ctx = new TestAppContext();
Injector injector = WebAppTests.createMockInjector(AppContext.class, ctx);
HsController controller = injector.getInstance(HsController.class);
controller.index();
assertEquals(Apps.toString(ctx.appID), controller.get(APP_ID,""));
}
@Test public void testJobView() {
WebAppTests.testPage(HsJobPage.class, AppContext.class, new TestAppContext());
}
@Test public void testTasksView() {
WebAppTests.testPage(HsTasksPage.class, AppContext.class,
new TestAppContext());
}
@Test public void testTaskView() {
WebAppTests.testPage(HsTaskPage.class, AppContext.class,
new TestAppContext());
}
}

View File

@ -26,6 +26,11 @@
import org.apache.hadoop.yarn.webapp.WebAppException;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
/**
* The parent class of all HTML pages. Override
* {@link #render(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)}
* to actually render the page.
*/
public abstract class HtmlPage extends TextView {
public static class _ implements Hamlet._ {
@ -79,6 +84,10 @@ public void render() {
}
}
/**
* Render the the HTML page.
* @param html the page to render data to.
*/
protected abstract void render(Page.HTML<_> html);
}

View File

@ -18,21 +18,25 @@
package org.apache.hadoop.yarn.webapp.view;
import com.google.common.collect.Lists;
import com.google.inject.Inject;
import java.util.List;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.util.StringHelper.*;
import static org.apache.hadoop.yarn.webapp.Params.*;
import java.util.List;
import org.apache.hadoop.yarn.webapp.SubView;
import com.google.common.collect.Lists;
/**
* A simpler two column layout implementation. Works with resizable themes.
* A simpler two column layout implementation with a header, a navigation bar
* on the left, content on the right, and a footer. Works with resizable themes.
* @see TwoColumnCssLayout
*/
public class TwoColumnLayout extends HtmlPage {
/*
* (non-Javadoc)
* @see org.apache.hadoop.yarn.webapp.view.HtmlPage#render(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
*/
@Override protected void render(Page.HTML<_> html) {
preHead(html);
html.
@ -65,28 +69,55 @@ public class TwoColumnLayout extends HtmlPage {
_(content())._()._()._()._()._();
}
/**
* Do what needs to be done before the header is rendered. This usually
* involves setting page variables for Javascript and CSS rendering.
* @param html the html to use to render.
*/
protected void preHead(Page.HTML<_> html) {
}
/**
* Do what needs to be done after the header is rendered.
* @param html the html to use to render.
*/
protected void postHead(Page.HTML<_> html) {
}
/**
* @return the class that will render the header of the page.
*/
protected Class<? extends SubView> header() {
return HeaderBlock.class;
}
/**
* @return the class that will render the content of the page.
*/
protected Class<? extends SubView> content() {
return LipsumBlock.class;
}
/**
* @return the class that will render the navigation bar.
*/
protected Class<? extends SubView> nav() {
return NavBlock.class;
}
/**
* @return the class that will render the footer.
*/
protected Class<? extends SubView> footer() {
return FooterBlock.class;
}
/**
* Sets up a table to be a consistent style.
* @param html the HTML to use to render.
* @param tableId the ID of the table to set styles on.
* @param innerStyles any other styles to add to the table.
*/
protected void setTableStyles(Page.HTML<_> html, String tableId,
String... innerStyles) {
List<String> styles = Lists.newArrayList();