MAPREDUCE-3297. Moved log related components into yarn-common so that HistoryServer and clients can use them without depending on the yarn-server-nodemanager module. Contributed by Siddharth Seth.

svn merge -c r1196986 --ignore-ancestry ../../trunk/


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1196988 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Vinod Kumar Vavilapalli 2011-11-03 08:07:16 +00:00
parent 46d9ffc886
commit b793e277aa
33 changed files with 421 additions and 364 deletions

View File

@ -1,5 +1,21 @@
Hadoop MapReduce Change Log
Release 0.23.1 - Unreleased
INCOMPATIBLE CHANGES
NEW FEATURES
IMPROVEMENTS
MAPREDUCE-3297. Moved log related components into yarn-common so that
HistoryServer and clients can use them without depending on the
yarn-server-nodemanager module. (Siddharth Seth via vinodkv)
OPTIMIZATIONS
BUG FIXES
Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES

View File

@ -34,10 +34,6 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server-nodemanager</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>

View File

@ -46,7 +46,7 @@ import org.apache.hadoop.mapreduce.v2.LogParams;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.LogDumper;
import org.apache.hadoop.yarn.logaggregation.LogDumper;
/**
* Interprets the map reduce cli options

View File

@ -23,8 +23,8 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.app.webapp.App;
import org.apache.hadoop.mapreduce.v2.app.webapp.AppController;
import org.apache.hadoop.yarn.server.nodemanager.webapp.AggregatedLogsPage;
import org.apache.hadoop.yarn.webapp.View;
import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage;
import com.google.inject.Inject;

View File

@ -17,11 +17,11 @@
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.ENTITY_STRING;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
import org.apache.hadoop.yarn.server.nodemanager.webapp.AggregatedLogsBlock;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.log.AggregatedLogsBlock;
public class HsLogsPage extends HsView {

View File

@ -18,11 +18,11 @@
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.NM_NODENAME;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.ENTITY_STRING;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.APP_OWNER;
import static org.apache.hadoop.yarn.util.StringHelper.pajoin;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.APP_OWNER;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.NM_NODENAME;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.webapp.AMParams;

View File

@ -22,10 +22,10 @@ import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.APP_ID;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.ATTEMPT_STATE;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.NM_NODENAME;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.ENTITY_STRING;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.APP_OWNER;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.APP_OWNER;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.NM_NODENAME;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
@ -44,8 +44,8 @@ import org.apache.hadoop.yarn.Clock;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.server.nodemanager.webapp.AggregatedLogsPage;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage;
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
import org.junit.Test;

View File

@ -208,7 +208,7 @@
<!-- Ignore EI_EXPOSE_REP2 in Log services -->
<Match>
<Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat$LogValue" />
<Class name="org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat$LogValue" />
<Bug pattern="EI_EXPOSE_REP2" />
</Match>
<Match>

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation;
package org.apache.hadoop.yarn.logaggregation;
import java.io.DataInput;
import java.io.DataInputStream;

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation;
package org.apache.hadoop.yarn.logaggregation;
public enum ContainerLogsRetentionPolicy {
APPLICATION_MASTER_ONLY, AM_AND_FAILED_CONTAINERS_ONLY, ALL_CONTAINERS

View File

@ -0,0 +1,107 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.logaggregation;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
public class LogAggregationUtils {
/**
* Constructs the full filename for an application's log file per node.
* @param remoteRootLogDir
* @param appId
* @param user
* @param nodeId
* @param suffix
* @return the remote log file.
*/
public static Path getRemoteNodeLogFileForApp(Path remoteRootLogDir,
ApplicationId appId, String user, NodeId nodeId, String suffix) {
return new Path(getRemoteAppLogDir(remoteRootLogDir, appId, user, suffix),
getNodeString(nodeId));
}
/**
* Gets the remote app log dir.
* @param remoteRootLogDir
* @param appId
* @param user
* @param suffix
* @return the remote application specific log dir.
*/
public static Path getRemoteAppLogDir(Path remoteRootLogDir,
ApplicationId appId, String user, String suffix) {
return new Path(getRemoteLogSuffixedDir(remoteRootLogDir, user, suffix),
appId.toString());
}
/**
* Gets the remote suffixed log dir for the user.
* @param remoteRootLogDir
* @param user
* @param suffix
* @return the remote suffixed log dir.
*/
public static Path getRemoteLogSuffixedDir(Path remoteRootLogDir,
String user, String suffix) {
if (suffix == null || suffix.isEmpty()) {
return getRemoteLogUserDir(remoteRootLogDir, user);
}
// TODO Maybe support suffix to be more than a single file.
return new Path(getRemoteLogUserDir(remoteRootLogDir, user), suffix);
}
// TODO Add a utility method to list available log files. Ignore the
// temporary ones.
/**
* Gets the remote log user dir.
* @param remoteRootLogDir
* @param user
* @return the remote per user log dir.
*/
public static Path getRemoteLogUserDir(Path remoteRootLogDir, String user) {
return new Path(remoteRootLogDir, user);
}
/**
* Returns the suffix component of the log dir.
* @param conf
* @return the suffix which will be appended to the user log dir.
*/
public static String getRemoteNodeLogDirSuffix(Configuration conf) {
return conf.get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR_SUFFIX,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR_SUFFIX);
}
/**
* Converts a nodeId to a form used in the app log file name.
* @param nodeId
* @return the node string to be used to construct the file name.
*/
private static String getNodeString(NodeId nodeId) {
return nodeId.toString().replace(":", "_");
}
}

View File

@ -16,7 +16,7 @@
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation;
package org.apache.hadoop.yarn.logaggregation;
import java.io.DataInputStream;
import java.io.DataOutputStream;
@ -41,8 +41,8 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat.LogReader;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader;
import org.apache.hadoop.yarn.util.ConverterUtils;
public class LogDumper extends Configured implements Tool {
@ -117,7 +117,7 @@ public class LogDumper extends Configured implements Tool {
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
AggregatedLogFormat.LogReader reader =
new AggregatedLogFormat.LogReader(getConf(),
LogAggregationService.getRemoteNodeLogFileForApp(
LogAggregationUtils.getRemoteNodeLogFileForApp(
remoteRootLogDir,
appId,
appOwner,
@ -135,10 +135,10 @@ public class LogDumper extends Configured implements Tool {
Path remoteRootLogDir =
new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
String suffix = LogAggregationService.getRemoteNodeLogDirSuffix(getConf());
String suffix = LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf());
AggregatedLogFormat.LogReader reader =
new AggregatedLogFormat.LogReader(getConf(),
LogAggregationService.getRemoteNodeLogFileForApp(remoteRootLogDir,
LogAggregationUtils.getRemoteNodeLogFileForApp(remoteRootLogDir,
ConverterUtils.toApplicationId(appId), jobOwner,
ConverterUtils.toNodeId(nodeId), suffix));
DataOutputStream out = new DataOutputStream(System.out);
@ -185,7 +185,7 @@ public class LogDumper extends Configured implements Tool {
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR_SUFFIX);
//TODO Change this to get a list of files from the LAS.
Path remoteAppLogDir =
LogAggregationService.getRemoteAppLogDir(remoteRootLogDir, appId, user,
LogAggregationUtils.getRemoteAppLogDir(remoteRootLogDir, appId, user,
logDirSuffix);
RemoteIterator<FileStatus> nodeFiles =
FileContext.getFileContext().listStatus(remoteAppLogDir);

View File

@ -16,13 +16,13 @@
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.webapp;
package org.apache.hadoop.yarn.webapp;
public interface NMWebParams {
public interface YarnWebParams {
String NM_NODENAME = "nm.id";
String APPLICATION_ID = "nm.appId";
String CONTAINER_ID = "nm.containerId";
String CONTAINER_LOG_TYPE= "nm.containerLogType";
String ENTITY_STRING = "nm.entityString";
String APP_OWNER = "nm.appOwner";
String APPLICATION_ID = "app.id";
String CONTAINER_ID = "container.id";
String CONTAINER_LOG_TYPE= "log.type";
String ENTITY_STRING = "entity.string";
String APP_OWNER = "app.owner";
}

View File

@ -0,0 +1,183 @@
package org.apache.hadoop.yarn.webapp.log;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.APP_OWNER;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.NM_NODENAME;
import java.io.DataInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import com.google.inject.Inject;
public class AggregatedLogsBlock extends HtmlBlock {
private final Configuration conf;
@Inject
AggregatedLogsBlock(Configuration conf) {
this.conf = conf;
}
@Override
protected void render(Block html) {
ContainerId containerId = verifyAndGetContainerId(html);
NodeId nodeId = verifyAndGetNodeId(html);
String appOwner = verifyAndGetAppOwner(html);
if (containerId == null || nodeId == null || appOwner == null
|| appOwner.isEmpty()) {
return;
}
ApplicationId applicationId =
containerId.getApplicationAttemptId().getApplicationId();
String logEntity = $(ENTITY_STRING);
if (logEntity == null || logEntity.isEmpty()) {
logEntity = containerId.toString();
}
if (!conf.getBoolean(YarnConfiguration.NM_LOG_AGGREGATION_ENABLED,
YarnConfiguration.DEFAULT_NM_LOG_AGGREGATION_ENABLED)) {
html.h1()
._("Aggregation is not enabled. Try the nodemanager at " + nodeId)
._();
return;
}
Path remoteRootLogDir =
new Path(conf.get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
AggregatedLogFormat.LogReader reader = null;
try {
reader =
new AggregatedLogFormat.LogReader(conf,
LogAggregationUtils.getRemoteNodeLogFileForApp(
remoteRootLogDir, applicationId, appOwner, nodeId,
LogAggregationUtils.getRemoteNodeLogDirSuffix(conf)));
} catch (FileNotFoundException e) {
// ACLs not available till the log file is opened.
html.h1()
._("Logs not available for "
+ logEntity
+ ". Aggregation may not be complete, "
+ "Check back later or try the nodemanager at "
+ nodeId)._();
return;
} catch (IOException e) {
html.h1()._("Error getting logs for " + logEntity)._();
LOG.error("Error getting logs for " + logEntity, e);
return;
}
String owner = null;
Map<ApplicationAccessType, String> appAcls = null;
try {
owner = reader.getApplicationOwner();
appAcls = reader.getApplicationAcls();
} catch (IOException e) {
html.h1()._("Error getting logs for " + logEntity)._();
LOG.error("Error getting logs for " + logEntity, e);
return;
}
ApplicationACLsManager aclsManager = new ApplicationACLsManager(conf);
aclsManager.addApplication(applicationId, appAcls);
String remoteUser = request().getRemoteUser();
UserGroupInformation callerUGI = null;
if (remoteUser != null) {
callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
}
if (callerUGI != null
&& !aclsManager.checkAccess(callerUGI, ApplicationAccessType.VIEW_APP,
owner, applicationId)) {
html.h1()
._("User [" + remoteUser
+ "] is not authorized to view the logs for " + logEntity)._();
return;
}
DataInputStream valueStream;
LogKey key = new LogKey();
try {
valueStream = reader.next(key);
while (valueStream != null
&& !key.toString().equals(containerId.toString())) {
valueStream = reader.next(key);
}
if (valueStream == null) {
html.h1()._(
"Logs not available for " + logEntity
+ ". Could be caused by the rentention policy")._();
return;
}
writer().write("<pre>");
AggregatedLogFormat.LogReader.readAcontainerLogs(valueStream, writer());
writer().write("</pre>");
return;
} catch (IOException e) {
html.h1()._("Error getting logs for " + logEntity)._();
LOG.error("Error getting logs for " + logEntity, e);
return;
}
}
private ContainerId verifyAndGetContainerId(Block html) {
String containerIdStr = $(CONTAINER_ID);
if (containerIdStr == null || containerIdStr.isEmpty()) {
html.h1()._("Cannot get container logs without a ContainerId")._();
return null;
}
ContainerId containerId = null;
try {
containerId = ConverterUtils.toContainerId(containerIdStr);
} catch (IllegalArgumentException e) {
html.h1()
._("Cannot get container logs for invalid containerId: "
+ containerIdStr)._();
return null;
}
return containerId;
}
private NodeId verifyAndGetNodeId(Block html) {
String nodeIdStr = $(NM_NODENAME);
if (nodeIdStr == null || nodeIdStr.isEmpty()) {
html.h1()._("Cannot get container logs without a NodeId")._();
return null;
}
NodeId nodeId = null;
try {
nodeId = ConverterUtils.toNodeId(nodeIdStr);
} catch (IllegalArgumentException e) {
html.h1()._("Cannot get container logs. Invalid nodeId: " + nodeIdStr)
._();
return null;
}
return nodeId;
}
private String verifyAndGetAppOwner(Block html) {
String appOwner = $(APP_OWNER);
if (appOwner == null || appOwner.isEmpty()) {
html.h1()._("Cannot get container logs without an app owner")._();
}
return appOwner;
}
}

View File

@ -16,11 +16,11 @@
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.webapp;
package org.apache.hadoop.yarn.webapp.log;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
public class AggregatedLogsNavBlock extends HtmlBlock implements NMWebParams {
public class AggregatedLogsNavBlock extends HtmlBlock {
@Override
protected void render(Block html) {

View File

@ -0,0 +1,45 @@
package org.apache.hadoop.yarn.webapp.log;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.THEMESWITCHER_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.view.TwoColumnLayout;
public class AggregatedLogsPage extends TwoColumnLayout {
/* (non-Javadoc)
* @see org.apache.hadoop.yarn.server.nodemanager.webapp.NMView#preHead(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
*/
@Override
protected void preHead(Page.HTML<_> html) {
String logEntity = $(ENTITY_STRING);
if (logEntity == null || logEntity.isEmpty()) {
logEntity = $(CONTAINER_ID);
}
if (logEntity == null || logEntity.isEmpty()) {
logEntity = "UNKNOWN";
}
set(TITLE, join("Logs for ", logEntity));
set(ACCORDION_ID, "nav");
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:0}");
set(THEMESWITCHER_ID, "themeswitcher");
}
@Override
protected Class<? extends SubView> content() {
return AggregatedLogsBlock.class;
}
@Override
protected Class<? extends SubView> nav() {
return AggregatedLogsNavBlock.class;
}
}

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEventType;
@ -41,7 +42,6 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Cont
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ApplicationLocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizationEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.ContainerLogsRetentionPolicy;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppStartedEvent;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;

View File

@ -35,12 +35,13 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat.LogValue;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat.LogWriter;
import org.apache.hadoop.yarn.util.ConverterUtils;
public class AppLogAggregatorImpl implements AppLogAggregator {

View File

@ -43,6 +43,8 @@ import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy;
import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.LogHandler;
@ -138,83 +140,7 @@ public class LogAggregationService extends AbstractService implements
super.stop();
}
/**
* Constructs the full filename for an application's log file per node.
* @param remoteRootLogDir
* @param appId
* @param user
* @param nodeId
* @param suffix
* @return the remote log file.
*/
public static Path getRemoteNodeLogFileForApp(Path remoteRootLogDir,
ApplicationId appId, String user, NodeId nodeId, String suffix) {
return new Path(getRemoteAppLogDir(remoteRootLogDir, appId, user, suffix),
getNodeString(nodeId));
}
/**
* Gets the remote app log dir.
* @param remoteRootLogDir
* @param appId
* @param user
* @param suffix
* @return the remote application specific log dir.
*/
public static Path getRemoteAppLogDir(Path remoteRootLogDir,
ApplicationId appId, String user, String suffix) {
return new Path(getRemoteLogSuffixedDir(remoteRootLogDir, user, suffix),
appId.toString());
}
/**
* Gets the remote suffixed log dir for the user.
* @param remoteRootLogDir
* @param user
* @param suffix
* @return the remote suffixed log dir.
*/
private static Path getRemoteLogSuffixedDir(Path remoteRootLogDir,
String user, String suffix) {
if (suffix == null || suffix.isEmpty()) {
return getRemoteLogUserDir(remoteRootLogDir, user);
}
// TODO Maybe support suffix to be more than a single file.
return new Path(getRemoteLogUserDir(remoteRootLogDir, user), suffix);
}
// TODO Add a utility method to list available log files. Ignore the
// temporary ones.
/**
* Gets the remote log user dir.
* @param remoteRootLogDir
* @param user
* @return the remote per user log dir.
*/
private static Path getRemoteLogUserDir(Path remoteRootLogDir, String user) {
return new Path(remoteRootLogDir, user);
}
/**
* Returns the suffix component of the log dir.
* @param conf
* @return the suffix which will be appended to the user log dir.
*/
public static String getRemoteNodeLogDirSuffix(Configuration conf) {
return conf.get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR_SUFFIX,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR_SUFFIX);
}
/**
* Converts a nodeId to a form used in the app log file name.
* @param nodeId
* @return the node string to be used to construct the file name.
*/
private static String getNodeString(NodeId nodeId) {
return nodeId.toString().replace(":", "_");
}
@ -268,7 +194,7 @@ public class LogAggregationService extends AbstractService implements
}
Path getRemoteNodeLogFileForApp(ApplicationId appId, String user) {
return LogAggregationService.getRemoteNodeLogFileForApp(
return LogAggregationUtils.getRemoteNodeLogFileForApp(
this.remoteRootLogDir, appId, user, this.nodeId,
this.remoteRootLogDirSuffix);
}
@ -299,7 +225,7 @@ public class LogAggregationService extends AbstractService implements
}
try {
userDir =
getRemoteLogUserDir(
LogAggregationUtils.getRemoteLogUserDir(
LogAggregationService.this.remoteRootLogDir, user);
userDir =
userDir.makeQualified(remoteFS.getUri(),
@ -312,7 +238,7 @@ public class LogAggregationService extends AbstractService implements
}
try {
suffixDir =
getRemoteLogSuffixedDir(
LogAggregationUtils.getRemoteLogSuffixedDir(
LogAggregationService.this.remoteRootLogDir, user,
LogAggregationService.this.remoteRootLogDirSuffix);
suffixDir =
@ -326,8 +252,8 @@ public class LogAggregationService extends AbstractService implements
}
try {
appDir =
getRemoteAppLogDir(LogAggregationService.this.remoteRootLogDir,
appId, user,
LogAggregationUtils.getRemoteAppLogDir(
LogAggregationService.this.remoteRootLogDir, appId, user,
LogAggregationService.this.remoteRootLogDirSuffix);
appDir =
appDir.makeQualified(remoteFS.getUri(),

View File

@ -23,7 +23,7 @@ import java.util.Map;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.ContainerLogsRetentionPolicy;
import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy;
public class LogHandlerAppStartedEvent extends LogHandlerEvent {

View File

@ -15,186 +15,3 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.webapp;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.NM_NODENAME;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.ENTITY_STRING;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.APP_OWNER;
import java.io.DataInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.LogAggregationService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import com.google.inject.Inject;
public class AggregatedLogsBlock extends HtmlBlock {
private final Configuration conf;
@Inject
AggregatedLogsBlock(Configuration conf) {
this.conf = conf;
}
@Override
protected void render(Block html) {
ContainerId containerId = verifyAndGetContainerId(html);
NodeId nodeId = verifyAndGetNodeId(html);
String appOwner = verifyAndGetAppOwner(html);
if (containerId == null || nodeId == null || appOwner == null
|| appOwner.isEmpty()) {
return;
}
ApplicationId applicationId =
containerId.getApplicationAttemptId().getApplicationId();
String logEntity = $(ENTITY_STRING);
if (logEntity == null || logEntity.isEmpty()) {
logEntity = containerId.toString();
}
if (!conf.getBoolean(YarnConfiguration.NM_LOG_AGGREGATION_ENABLED,
YarnConfiguration.DEFAULT_NM_LOG_AGGREGATION_ENABLED)) {
html.h1()
._("Aggregation is not enabled. Try the nodemanager at " + nodeId)
._();
return;
}
Path remoteRootLogDir =
new Path(conf.get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
AggregatedLogFormat.LogReader reader = null;
try {
reader =
new AggregatedLogFormat.LogReader(conf,
LogAggregationService.getRemoteNodeLogFileForApp(
remoteRootLogDir, applicationId, appOwner, nodeId,
LogAggregationService.getRemoteNodeLogDirSuffix(conf)));
} catch (FileNotFoundException e) {
// ACLs not available till the log file is opened.
html.h1()
._("Logs not available for "
+ logEntity
+ ". Aggregation may not be complete, "
+ "Check back later or try the nodemanager at "
+ nodeId)._();
return;
} catch (IOException e) {
html.h1()._("Error getting logs for " + logEntity)._();
LOG.error("Error getting logs for " + logEntity, e);
return;
}
String owner = null;
Map<ApplicationAccessType, String> appAcls = null;
try {
owner = reader.getApplicationOwner();
appAcls = reader.getApplicationAcls();
} catch (IOException e) {
html.h1()._("Error getting logs for " + logEntity)._();
LOG.error("Error getting logs for " + logEntity, e);
return;
}
ApplicationACLsManager aclsManager = new ApplicationACLsManager(conf);
aclsManager.addApplication(applicationId, appAcls);
String remoteUser = request().getRemoteUser();
UserGroupInformation callerUGI = null;
if (remoteUser != null) {
callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
}
if (callerUGI != null
&& !aclsManager.checkAccess(callerUGI, ApplicationAccessType.VIEW_APP,
owner, applicationId)) {
html.h1()
._("User [" + remoteUser
+ "] is not authorized to view the logs for " + logEntity)._();
return;
}
DataInputStream valueStream;
LogKey key = new LogKey();
try {
valueStream = reader.next(key);
while (valueStream != null
&& !key.toString().equals(containerId.toString())) {
valueStream = reader.next(key);
}
if (valueStream == null) {
html.h1()._(
"Logs not available for " + logEntity
+ ". Could be caused by the rentention policy")._();
return;
}
writer().write("<pre>");
AggregatedLogFormat.LogReader.readAcontainerLogs(valueStream, writer());
writer().write("</pre>");
return;
} catch (IOException e) {
html.h1()._("Error getting logs for " + logEntity)._();
LOG.error("Error getting logs for " + logEntity, e);
return;
}
}
private ContainerId verifyAndGetContainerId(Block html) {
String containerIdStr = $(CONTAINER_ID);
if (containerIdStr == null || containerIdStr.isEmpty()) {
html.h1()._("Cannot get container logs without a ContainerId")._();
return null;
}
ContainerId containerId = null;
try {
containerId = ConverterUtils.toContainerId(containerIdStr);
} catch (IllegalArgumentException e) {
html.h1()
._("Cannot get container logs for invalid containerId: "
+ containerIdStr)._();
return null;
}
return containerId;
}
private NodeId verifyAndGetNodeId(Block html) {
String nodeIdStr = $(NM_NODENAME);
if (nodeIdStr == null || nodeIdStr.isEmpty()) {
html.h1()._("Cannot get container logs without a NodeId")._();
return null;
}
NodeId nodeId = null;
try {
nodeId = ConverterUtils.toNodeId(nodeIdStr);
} catch (IllegalArgumentException e) {
html.h1()._("Cannot get container logs. Invalid nodeId: " + nodeIdStr)
._();
return null;
}
return nodeId;
}
private String verifyAndGetAppOwner(Block html) {
String appOwner = $(APP_OWNER);
if (appOwner == null || appOwner.isEmpty()) {
html.h1()._("Cannot get container logs without an app owner")._();
}
return appOwner;
}
}

View File

@ -15,47 +15,3 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.webapp;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.ENTITY_STRING;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.THEMESWITCHER_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import org.apache.hadoop.yarn.webapp.SubView;
public class AggregatedLogsPage extends NMView {
/* (non-Javadoc)
* @see org.apache.hadoop.yarn.server.nodemanager.webapp.NMView#preHead(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
*/
@Override
protected void preHead(Page.HTML<_> html) {
String logEntity = $(ENTITY_STRING);
if (logEntity == null || logEntity.isEmpty()) {
logEntity = $(CONTAINER_ID);
}
if (logEntity == null || logEntity.isEmpty()) {
logEntity = "UNKNOWN";
}
set(TITLE, join("Logs for ", logEntity));
set(ACCORDION_ID, "nav");
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:0}");
set(THEMESWITCHER_ID, "themeswitcher");
}
@Override
protected Class<? extends SubView> content() {
return AggregatedLogsBlock.class;
}
@Override
protected Class<? extends SubView> nav() {
return AggregatedLogsNavBlock.class;
}
}

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.BODY;
@ -60,7 +61,7 @@ public class AllApplicationsPage extends NMView {
}
public static class AllApplicationsBlock extends HtmlBlock implements
NMWebParams {
YarnWebParams {
private final Context nmContext;

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.BODY;
@ -60,7 +61,7 @@ public class AllContainersPage extends NMView {
}
public static class AllContainersBlock extends HtmlBlock implements
NMWebParams {
YarnWebParams {
private final Context nmContext;

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
@ -42,7 +43,7 @@ import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import com.google.inject.Inject;
public class ApplicationPage extends NMView implements NMWebParams {
public class ApplicationPage extends NMView implements YarnWebParams {
@Override protected void preHead(Page.HTML<_> html) {
commonPreHead(html);
@ -61,7 +62,7 @@ public class ApplicationPage extends NMView implements NMWebParams {
}
public static class ApplicationBlock extends HtmlBlock implements
NMWebParams {
YarnWebParams {
private final Context nmContext;
private final Configuration conf;

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.yarn.server.nodemanager.webapp;
import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.THEMESWITCHER_ID;
@ -49,6 +49,7 @@ import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Cont
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@ -84,7 +85,7 @@ public class ContainerLogsPage extends NMView {
}
public static class ContainersLogsBlock extends HtmlBlock implements
NMWebParams {
YarnWebParams {
private final Configuration conf;
private final LocalDirAllocator logsSelector;
private final Context nmContext;

View File

@ -28,6 +28,7 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
@ -36,7 +37,7 @@ import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import com.google.inject.Inject;
public class ContainerPage extends NMView implements NMWebParams {
public class ContainerPage extends NMView implements YarnWebParams {
@Override
protected void preHead(Page.HTML<_> html) {
@ -50,7 +51,7 @@ public class ContainerPage extends NMView implements NMWebParams {
return ContainerBlock.class;
}
public static class ContainerBlock extends HtmlBlock implements NMWebParams {
public static class ContainerBlock extends HtmlBlock implements YarnWebParams {
private final Context nmContext;

View File

@ -28,10 +28,11 @@ import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.webapp.Controller;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
import com.google.inject.Inject;
public class NMController extends Controller implements NMWebParams {
public class NMController extends Controller implements YarnWebParams {
private Context nmContext;
private Configuration nmConf;

View File

@ -18,9 +18,10 @@
package org.apache.hadoop.yarn.server.nodemanager.webapp;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
public class NavBlock extends HtmlBlock implements NMWebParams {
public class NavBlock extends HtmlBlock implements YarnWebParams {
@Override
protected void render(Block html) {

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.service.AbstractService;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebApps;
@ -76,7 +77,7 @@ public class WebServer extends AbstractService {
super.stop();
}
public static class NMWebApp extends WebApp implements NMWebParams {
public static class NMWebApp extends WebApp implements YarnWebParams {
private final ResourceView resourceView;
private final ApplicationACLsManager aclsManager;

View File

@ -63,13 +63,15 @@ import org.apache.hadoop.yarn.event.DrainDispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat;
import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader;
import org.apache.hadoop.yarn.server.nodemanager.CMgrCompletedAppsEvent;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat.LogKey;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat.LogReader;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppStartedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerContainerFinishedEvent;

View File

@ -35,10 +35,10 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.DrainDispatcher;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.ContainerLogsRetentionPolicy;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppStartedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerContainerFinishedEvent;