YARN-1413. Implemented serving of aggregated-logs in the ApplicationHistory server. Contributed by Mayank Bansal.

svn merge --ignore-ancestry -c 1556752 ../YARN-321


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1562206 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2014-01-28 20:03:04 +00:00
parent 1b20412d06
commit a6dfb7b045
10 changed files with 98 additions and 14 deletions

View File

@ -516,6 +516,9 @@ Branch YARN-321: Generic ApplicationHistoryService
YARN-1534. Fixed failure of test TestAHSWebApp. (Shinichi Yamashita via vinodkv) YARN-1534. Fixed failure of test TestAHSWebApp. (Shinichi Yamashita via vinodkv)
YARN-1413. Implemented serving of aggregated-logs in the ApplicationHistory
server. (Mayank Bansal via vinodkv)
Release 2.2.0 - 2013-10-13 Release 2.2.0 - 2013-10-13
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -178,4 +178,8 @@ public final class StringHelper {
public static String percent(double value) { public static String percent(double value) {
return String.format("%.2f", value * 100); return String.format("%.2f", value * 100);
} }
public static String getPartUrl(String url, String part) {
return url.substring(url.indexOf(part));
}
} }

View File

@ -17,6 +17,8 @@
*/ */
package org.apache.hadoop.yarn.webapp.util; package org.apache.hadoop.yarn.webapp.util;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import java.net.InetAddress; import java.net.InetAddress;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.UnknownHostException; import java.net.UnknownHostException;
@ -27,7 +29,9 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpConfig.Policy; import org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.ConverterUtils;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
@ -170,4 +174,11 @@ public class WebAppUtils {
return schemePrefix + url; return schemePrefix + url;
} }
} }
public static String getLogUrl(String nodeHttpAddress, String allocatedNode,
ContainerId containerId, String user) {
return join(HttpConfig.getSchemePrefix(), nodeHttpAddress, "/logs", "/",
allocatedNode, "/", ConverterUtils.toString(containerId), "/",
ConverterUtils.toString(containerId), "/", user);
}
} }

View File

@ -47,4 +47,10 @@ public class AHSController extends Controller {
render(ContainerPage.class); render(ContainerPage.class);
} }
/**
* Render the logs page.
*/
public void logs() {
render(AHSLogsPage.class);
}
} }

View File

@ -0,0 +1,55 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.log.AggregatedLogsBlock;
public class AHSLogsPage extends AHSView {
/*
* (non-Javadoc)
*
* @see
* org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSView#
* preHead(org.apache.hadoop .yarn.webapp.hamlet.Hamlet.HTML)
*/
@Override
protected void preHead(Page.HTML<_> html) {
String logEntity = $(ENTITY_STRING);
if (logEntity == null || logEntity.isEmpty()) {
logEntity = $(CONTAINER_ID);
}
if (logEntity == null || logEntity.isEmpty()) {
logEntity = "UNKNOWN";
}
commonPreHead(html);
}
/**
* The content of this page is the AggregatedLogsBlock
*
* @return AggregatedLogsBlock.class
*/
@Override
protected Class<? extends SubView> content() {
return AggregatedLogsBlock.class;
}
}

View File

@ -45,5 +45,7 @@ public class AHSWebApp extends WebApp implements YarnWebParams {
route(pajoin("/appattempt", APPLICATION_ATTEMPT_ID), AHSController.class, route(pajoin("/appattempt", APPLICATION_ATTEMPT_ID), AHSController.class,
"appattempt"); "appattempt");
route(pajoin("/container", CONTAINER_ID), AHSController.class, "container"); route(pajoin("/container", CONTAINER_ID), AHSController.class, "container");
route(pajoin("/logs", NM_NODENAME, CONTAINER_ID, ENTITY_STRING, APP_OWNER,
CONTAINER_LOG_TYPE), AHSController.class, "logs");
} }
} }

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.webapp; package org.apache.hadoop.yarn.server.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.join; import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.util.StringHelper.getPartUrl;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.APPLICATION_ATTEMPT_ID; import static org.apache.hadoop.yarn.webapp.YarnWebParams.APPLICATION_ATTEMPT_ID;
import java.io.IOException; import java.io.IOException;
@ -126,6 +127,8 @@ public class AppAttemptBlock extends HtmlBlock {
StringBuilder containersTableData = new StringBuilder("[\n"); StringBuilder containersTableData = new StringBuilder("[\n");
for (ContainerReport containerReport : containers) { for (ContainerReport containerReport : containers) {
String logURL = containerReport.getLogUrl();
logURL = getPartUrl(logURL, "log");
ContainerInfo container = new ContainerInfo(containerReport); ContainerInfo container = new ContainerInfo(containerReport);
// ConatinerID numerical value parsed by parseHadoopID in yarn.dt.plugins.js // ConatinerID numerical value parsed by parseHadoopID in yarn.dt.plugins.js
containersTableData containersTableData
@ -141,9 +144,9 @@ public class AppAttemptBlock extends HtmlBlock {
.append("</a>\",\"") .append("</a>\",\"")
.append(container.getContainerExitStatus()) .append(container.getContainerExitStatus())
.append("\",\"<a href='") .append("\",\"<a href='")
.append(container.getLogUrl() == null ? "#" : url(container.getLogUrl())) .append(logURL == null ? "#" : url(logURL))
.append("'>") .append("'>")
.append(container.getLogUrl() == null ? "N/A" : "Logs") .append(logURL == null ? "N/A" : "Logs")
.append("</a>\"],\n"); .append("</a>\"],\n");
} }
if (containersTableData.charAt(containersTableData.length() - 2) == ',') { if (containersTableData.charAt(containersTableData.length() - 2) == ',') {

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.yarn.server.webapp; package org.apache.hadoop.yarn.server.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.join; import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.util.StringHelper.getPartUrl;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.APPLICATION_ID; import static org.apache.hadoop.yarn.webapp.YarnWebParams.APPLICATION_ID;
import java.io.IOException; import java.io.IOException;
@ -147,7 +148,8 @@ public class AppBlock extends HtmlBlock {
if (containerReport != null) { if (containerReport != null) {
ContainerInfo container = new ContainerInfo(containerReport); ContainerInfo container = new ContainerInfo(containerReport);
startTime = container.getStartedTime(); startTime = container.getStartedTime();
logsLink = container.getLogUrl(); logsLink = containerReport.getLogUrl();
logsLink = getPartUrl(logsLink,"log");
} }
String nodeLink = null; String nodeLink = null;
if (appAttempt.getHost() != null && appAttempt.getRpcPort() >= 0 if (appAttempt.getHost() != null && appAttempt.getRpcPort() >= 0

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.webapp; package org.apache.hadoop.yarn.server.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.join; import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.util.StringHelper.getPartUrl;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID; import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
import java.io.IOException; import java.io.IOException;
@ -76,8 +77,10 @@ public class ContainerBlock extends HtmlBlock {
puts("Container not found: " + containerid); puts("Container not found: " + containerid);
return; return;
} }
ContainerInfo container = new ContainerInfo(containerReport); ContainerInfo container = new ContainerInfo(containerReport);
String logURL = containerReport.getLogUrl();
logURL = getPartUrl(logURL,"log");
setTitle(join("Container ", containerid)); setTitle(join("Container ", containerid));
info("Container Overview"). info("Container Overview").
@ -91,8 +94,7 @@ public class ContainerBlock extends HtmlBlock {
container.getFinishedTime()))). container.getFinishedTime()))).
_("Resource:", container.getAllocatedMB() + " Memory, " + _("Resource:", container.getAllocatedMB() + " Memory, " +
container.getAllocatedVCores() + " VCores"). container.getAllocatedVCores() + " VCores").
_("Logs:", container.getLogUrl() == null ? "#" : root_url(container.getLogUrl()), _("Logs:", logURL == null ? "#" : url(logURL), "Logs").
container.getLogUrl() == null ? "N/A" : container.getLogUrl()).
_("Diagnostics:", container.getDiagnosticsInfo()); _("Diagnostics:", container.getDiagnosticsInfo());
html._(InfoBlock.class); html._(InfoBlock.class);

View File

@ -18,8 +18,6 @@
package org.apache.hadoop.yarn.server.resourcemanager.rmcontainer; package org.apache.hadoop.yarn.server.resourcemanager.rmcontainer;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import java.util.EnumSet; import java.util.EnumSet;
import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock; import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
@ -27,7 +25,6 @@ import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.ContainerId;
@ -46,7 +43,7 @@ import org.apache.hadoop.yarn.state.InvalidStateTransitonException;
import org.apache.hadoop.yarn.state.SingleArcTransition; import org.apache.hadoop.yarn.state.SingleArcTransition;
import org.apache.hadoop.yarn.state.StateMachine; import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory; import org.apache.hadoop.yarn.state.StateMachineFactory;
import org.apache.hadoop.yarn.util.ConverterUtils; import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
@SuppressWarnings({"unchecked", "rawtypes"}) @SuppressWarnings({"unchecked", "rawtypes"})
public class RMContainerImpl implements RMContainer { public class RMContainerImpl implements RMContainer {
@ -366,10 +363,9 @@ public class RMContainerImpl implements RMContainer {
public void transition(RMContainerImpl container, RMContainerEvent event) { public void transition(RMContainerImpl container, RMContainerEvent event) {
// The logs of running containers should be found on NM webUI // The logs of running containers should be found on NM webUI
// The logs should be accessible after the container is launched // The logs should be accessible after the container is launched
container.logURL = join(HttpConfig.getSchemePrefix(), container.logURL = WebAppUtils.getLogUrl(container.container
container.container.getNodeHttpAddress(), "/node", "/containerlogs/", .getNodeHttpAddress(), container.getAllocatedNode().toString(),
ConverterUtils.toString(container.containerId), "/", container.containerId, container.user);
container.user);
// Unregister from containerAllocationExpirer. // Unregister from containerAllocationExpirer.
container.containerAllocationExpirer.unregister(container container.containerAllocationExpirer.unregister(container
.getContainerId()); .getContainerId());