Cleanup changes during rebase with trunk (Vrushali C)

This commit is contained in:
Vrushali 2016-06-21 16:38:20 -07:00 committed by Sangjin Lee
parent 1a227744ac
commit 6d943038f6
8 changed files with 46 additions and 31 deletions

View File

@ -1133,11 +1133,6 @@
<scope>test</scope>
<optional>true</optional>
<exclusions>
<!-- exclude jdk.tools (1.7) as we're not managing it -->
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
<exclusion>
<groupId>org.jruby</groupId>
<artifactId>jruby-complete</artifactId>

View File

@ -48,11 +48,10 @@ function hadoop_usage
hadoop_add_subcommand "rmadmin" "admin tools"
hadoop_add_subcommand "scmadmin" "SharedCacheManager admin tools"
hadoop_add_subcommand "sharedcachemanager" "run the SharedCacheManager daemon"
hadoop_add_subcommand "timelineserver" "run the timeline server"
hadoop_add_subcommand "timelinereader" "run the timeline reader server"
hadoop_add_subcommand "timelineserver" "run the timeline server"
hadoop_add_subcommand "top" "view cluster information"
hadoop_add_subcommand "version" "print the version"
hadoop_generate_usage "${MYNAME}" true
hadoop_generate_usage "${HADOOP_SHELL_EXECNAME}" true
}
@ -173,6 +172,10 @@ function yarncmd_case
hadoop_debug "Append YARN_SHAREDCACHEMANAGER_OPTS onto HADOOP_OPTS"
HADOOP_OPTS="${HADOOP_OPTS} ${YARN_SHAREDCACHEMANAGER_OPTS}"
;;
timelinereader)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderServer'
;;
timelineserver)
HADOOP_SUBCMD_SUPPORTDAEMONIZATION="true"
HADOOP_CLASSNAME='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
@ -184,10 +187,6 @@ function yarncmd_case
HADOOP_HEAPSIZE_MAX="${YARN_TIMELINESERVER_HEAPSIZE}"
fi
;;
timelinereader)
supportdaemonization="true"
CLASS='org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderServer'
;;
version)
HADOOP_CLASSNAME=org.apache.hadoop.util.VersionInfo
hadoop_debug "Append YARN_CLIENT_OPTS onto HADOOP_OPTS"

View File

@ -81,7 +81,6 @@ import org.apache.hadoop.yarn.server.api.records.NodeAction;
import org.apache.hadoop.yarn.server.api.records.NodeHealthStatus;
import org.apache.hadoop.yarn.server.api.records.NodeStatus;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager.NMContext;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.ContainerManagerImpl;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;

View File

@ -154,7 +154,6 @@ import org.apache.hadoop.yarn.util.timeline.TimelineUtils;
import com.google.common.annotations.VisibleForTesting;
import com.google.protobuf.ByteString;
import org.apache.hadoop.yarn.util.resource.Resources;
public class ContainerManagerImpl extends CompositeService implements
ContainerManager {
@ -351,9 +350,8 @@ public class ContainerManagerImpl extends CompositeService implements
LOG.info("Recovering application " + appId);
//TODO: Recover flow and flow run ID
ApplicationImpl app = new ApplicationImpl(
dispatcher, p.getUser(), null, appId, creds, context,
p.getAppLogAggregationInitedTime());
ApplicationImpl app = new ApplicationImpl(dispatcher, p.getUser(), appId,
creds, context, p.getAppLogAggregationInitedTime());
context.getApplications().put(appId, app);
app.handle(new ApplicationInitEvent(appId, acls, logAggregationContext));
}

View File

@ -40,7 +40,6 @@ import org.apache.hadoop.yarn.api.records.LogAggregationContext;
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.LogAggregationContextPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils;
import org.apache.hadoop.yarn.client.api.TimelineClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.proto.YarnProtos;
@ -118,10 +117,19 @@ public class ApplicationImpl implements Application {
Context context, long recoveredLogInitedTime) {
this.dispatcher = dispatcher;
this.user = user;
this.flowContext = flowContext;
this.appId = appId;
this.credentials = credentials;
this.aclsManager = context.getApplicationACLsManager();
Configuration conf = context.getConf();
if (YarnConfiguration.timelineServiceV2Enabled(conf)) {
if (flowContext == null) {
throw new IllegalArgumentException("flow context cannot be null");
}
this.flowContext = flowContext;
if (YarnConfiguration.systemMetricsPublisherEnabled(conf)) {
context.getNMTimelinePublisher().createTimelineClient(appId);
}
}
this.context = context;
this.appStateStore = context.getNMStateStore();
ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
@ -136,16 +144,6 @@ public class ApplicationImpl implements Application {
Credentials credentials, Context context) {
this(dispatcher, user, flowContext, appId, credentials,
context, -1);
Configuration conf = context.getConf();
if (YarnConfiguration.timelineServiceV2Enabled(conf)) {
if (flowContext == null) {
throw new IllegalArgumentException("flow context cannot be null");
}
this.flowContext = flowContext;
if (YarnConfiguration.systemMetricsPublisherEnabled(conf)) {
context.getNMTimelinePublisher().createTimelineClient(appId);
}
}
}
/**

View File

@ -181,6 +181,8 @@ public class TimelineServiceV1Publisher extends AbstractSystemMetricsPublisher {
SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId()));
}
@SuppressWarnings("unchecked")
@Override
public void appACLsUpdated(RMApp app, String appViewACLs, long updatedTime) {
TimelineEntity entity = createApplicationEntity(app.getApplicationId());
TimelineEvent tEvent = new TimelineEvent();
@ -248,6 +250,10 @@ public class TimelineServiceV1Publisher extends AbstractSystemMetricsPublisher {
app.getFinalApplicationStatus().toString());
eventInfo.put(AppAttemptMetricsConstants.STATE_EVENT_INFO, RMServerUtils
.createApplicationAttemptState(appAttemtpState).toString());
if (appAttempt.getMasterContainer() != null) {
eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO,
appAttempt.getMasterContainer().getId().toString());
}
tEvent.setEventInfo(eventInfo);
entity.addEvent(tEvent);
@ -301,6 +307,12 @@ public class TimelineServiceV1Publisher extends AbstractSystemMetricsPublisher {
container.getContainerExitStatus());
eventInfo.put(ContainerMetricsConstants.STATE_EVENT_INFO,
container.getContainerState().toString());
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_ENTITY_INFO,
container.getAllocatedNode().getHost());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_ENTITY_INFO,
container.getAllocatedNode().getPort());
entity.setOtherInfo(entityInfo);
tEvent.setEventInfo(eventInfo);
entity.addEvent(tEvent);

View File

@ -196,10 +196,14 @@ public class TimelineServiceV2Publisher extends AbstractSystemMetricsPublisher {
@Override
public void appACLsUpdated(RMApp app, String appViewACLs, long updatedTime) {
ApplicationEntity entity = createApplicationEntity(app.getApplicationId());
TimelineEvent tEvent = new TimelineEvent();
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ApplicationMetricsConstants.APP_VIEW_ACLS_ENTITY_INFO,
appViewACLs);
(appViewACLs == null) ? "" : appViewACLs);
entity.setInfo(entityInfo);
tEvent.setId(ApplicationMetricsConstants.ACLS_UPDATED_EVENT_TYPE);
tEvent.setTimestamp(updatedTime);
entity.addEvent(tEvent);
getDispatcher().getEventHandler().handle(new TimelineV2PublishEvent(
SystemMetricsEventType.PUBLISH_ENTITY, entity, app.getApplicationId()));
@ -285,6 +289,10 @@ public class TimelineServiceV2Publisher extends AbstractSystemMetricsPublisher {
app.getFinalApplicationStatus().toString());
eventInfo.put(AppAttemptMetricsConstants.STATE_EVENT_INFO, RMServerUtils
.createApplicationAttemptState(appAttemtpState).toString());
if (appAttempt.getMasterContainer() != null) {
eventInfo.put(AppAttemptMetricsConstants.MASTER_CONTAINER_EVENT_INFO,
appAttempt.getMasterContainer().getId().toString());
}
tEvent.setInfo(eventInfo);
entity.addEvent(tEvent);
@ -353,6 +361,12 @@ public class TimelineServiceV2Publisher extends AbstractSystemMetricsPublisher {
container.getContainerExitStatus());
eventInfo.put(ContainerMetricsConstants.STATE_EVENT_INFO,
container.getContainerState().toString());
Map<String, Object> entityInfo = new HashMap<String, Object>();
entityInfo.put(ContainerMetricsConstants.ALLOCATED_HOST_ENTITY_INFO,
container.getAllocatedNode().getHost());
entityInfo.put(ContainerMetricsConstants.ALLOCATED_PORT_ENTITY_INFO,
container.getAllocatedNode().getPort());
entity.setInfo(entityInfo);
tEvent.setInfo(eventInfo);
entity.addEvent(tEvent);

View File

@ -22,12 +22,12 @@
<parent>
<artifactId>hadoop-yarn-server</artifactId>
<groupId>org.apache.hadoop</groupId>
<version>3.0.0-SNAPSHOT</version>
<version>3.0.0-alpha1-SNAPSHOT</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server-timelineservice-hbase-tests</artifactId>
<version>3.0.0-SNAPSHOT</version>
<version>3.0.0-alpha1-SNAPSHOT</version>
<name>Apache Hadoop YARN Timeline Service HBase tests</name>
<properties>