YARN-321. Merging YARN-321 branch to trunk.
svn merge ../branches/YARN-321 git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1561452 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
edb6dc5f30
commit
cbee889711
|
@ -122,6 +122,13 @@
|
|||
<include>*-sources.jar</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/target</directory>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>
|
||||
<includes>
|
||||
<include>*-sources.jar</include>
|
||||
</includes>
|
||||
</fileSet>
|
||||
<fileSet>
|
||||
<directory>hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/target</directory>
|
||||
<outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>
|
||||
|
|
|
@ -44,9 +44,13 @@ import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
|||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
|
||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||
|
@ -371,4 +375,29 @@ public class ResourceMgrDelegate extends YarnClient {
|
|||
IOException {
|
||||
return client.getQueueAclsInfo();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptReport getApplicationAttemptReport(
|
||||
ApplicationAttemptId appAttemptId) throws YarnException, IOException {
|
||||
return client.getApplicationAttemptReport(appAttemptId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ApplicationAttemptReport> getApplicationAttempts(
|
||||
ApplicationId appId) throws YarnException, IOException {
|
||||
return client.getApplicationAttempts(appId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerReport getContainerReport(ContainerId containerId)
|
||||
throws YarnException, IOException {
|
||||
return client.getContainerReport(containerId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ContainerReport> getContainers(
|
||||
ApplicationAttemptId applicationAttemptId) throws YarnException,
|
||||
IOException {
|
||||
return client.getContainers(applicationAttemptId);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -9,8 +9,67 @@ Trunk - Unreleased
|
|||
YARN-1496. Protocol additions to allow moving apps between queues (Sandy
|
||||
Ryza)
|
||||
|
||||
YARN-930. Bootstrapping ApplicationHistoryService module. (vinodkv)
|
||||
|
||||
YARN-947. Implementing the data objects to be used by the History reader
|
||||
and writer interfaces. (Zhijie Shen via vinodkv)
|
||||
|
||||
YARN-934. Defined a Writer Interface for HistoryStorage. (Zhijie Shen via
|
||||
vinodkv)
|
||||
|
||||
YARN-925. Defined a Reader Interface for HistoryStorage. (Mayank Bansal via
|
||||
vinodkv)
|
||||
|
||||
YARN-978. Created ApplicationAttemptReport. (Mayank Bansal via vinodkv)
|
||||
|
||||
YARN-956. Added a testable in-memory HistoryStorage. (Mayank Bansal via
|
||||
vinodkv)
|
||||
|
||||
YARN-975. Added a file-system implementation for HistoryStorage. (Zhijie Shen
|
||||
via vinodkv)
|
||||
|
||||
YARN-1123. Added a new ContainerReport and its Protobuf implementation. (Mayank
|
||||
Bansal via vinodkv)
|
||||
|
||||
YARN-979. Added more APIs for getting information about ApplicationAttempts
|
||||
and Containers from ApplicationHistoryProtocol. (Mayank Bansal and Zhijie Shen
|
||||
via vinodkv)
|
||||
|
||||
YARN-953. Changed ResourceManager to start writing history data. (Zhijie Shen
|
||||
via vinodkv)
|
||||
|
||||
YARN-1266. Implemented PB service and client wrappers for
|
||||
ApplicationHistoryProtocol. (Mayank Bansal via vinodkv)
|
||||
|
||||
YARN-955. Implemented ApplicationHistoryProtocol handler. (Mayank Bansal via
|
||||
vinodkv)
|
||||
|
||||
YARN-1242. Changed yarn scripts to be able to start ApplicationHistoryServer
|
||||
as an individual process. (Mayank Bansal via vinodkv)
|
||||
|
||||
YARN-954. Implemented web UI for the ApplicationHistoryServer and wired it into
|
||||
the HistoryStorage. (Zhijie Shen via vinodkv)
|
||||
|
||||
YARN-967. Added the client and CLI interfaces for obtaining ApplicationHistory
|
||||
data. (Mayank Bansal via vinodkv)
|
||||
|
||||
YARN-1023. Added Webservices REST APIs support for Application History. (Zhijie
|
||||
Shen via vinodkv)
|
||||
|
||||
YARN-1413. Implemented serving of aggregated-logs in the ApplicationHistory
|
||||
server. (Mayank Bansal via vinodkv)
|
||||
|
||||
IMPROVEMENTS
|
||||
|
||||
YARN-1007. Enhance History Reader interface for Containers. (Mayank Bansal via
|
||||
devaraj)
|
||||
|
||||
YARN-974. Added more information to RMContainer to be collected and recorded in
|
||||
Application-History. (Zhijie Shen via vinodkv)
|
||||
|
||||
YARN-987. Added ApplicationHistoryManager responsible for exposing reports to
|
||||
all clients. (Mayank Bansal via vinodkv)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
BUG FIXES
|
||||
|
@ -18,6 +77,35 @@ Trunk - Unreleased
|
|||
YARN-524 TestYarnVersionInfo failing if generated properties doesn't
|
||||
include an SVN URL. (stevel)
|
||||
|
||||
YARN-935. Correcting pom.xml to build applicationhistoryserver module
|
||||
successfully. (Zhijie Shen via vinodkv)
|
||||
|
||||
YARN-962. Fixed bug in application-history proto file and renamed it be just
|
||||
a client proto file. (Zhijie Shen via vinodkv)
|
||||
|
||||
YARN-984. Renamed the incorrectly named applicationhistoryservice.records.pb.impl
|
||||
package to be applicationhistoryservice.records.impl.pb. (Devaraj K via vinodkv)
|
||||
|
||||
YARN-1534. Fixed failure of test TestAHSWebApp. (Shinichi Yamashita via vinodkv)
|
||||
|
||||
YARN-1555. Fixed test failures in applicationhistoryservice.* (Vinod Kumar
|
||||
Vavilapalli via mayank)
|
||||
|
||||
YARN-1594. Updated pom.xml of applicationhistoryservice sub-project according to
|
||||
YARN-888. (Vinod Kumar Vavilapalli via zjshen)
|
||||
|
||||
YARN-1596. Fixed Javadoc warnings on branch YARN-321. (Vinod Kumar Vavilapalli
|
||||
via zjshen)
|
||||
|
||||
YARN-1597. Fixed Findbugs warnings on branch YARN-321. (Vinod Kumar Vavilapalli
|
||||
via zjshen)
|
||||
|
||||
YARN-1595. Made enabling history service configurable and fixed test failures on
|
||||
branch YARN-321. (Vinod Kumar Vavilapalli via zjshen)
|
||||
|
||||
YARN-1605. Fixed formatting issues in the new module on branch YARN-321. (Vinod
|
||||
Kumar Vavilapalli via zjshen)
|
||||
|
||||
Release 2.4.0 - UNRELEASED
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -58,10 +58,13 @@ function print_usage(){
|
|||
echo "where COMMAND is one of:"
|
||||
echo " resourcemanager run the ResourceManager"
|
||||
echo " nodemanager run a nodemanager on each slave"
|
||||
echo " historyserver run the application history server"
|
||||
echo " rmadmin admin tools"
|
||||
echo " version print the version"
|
||||
echo " jar <jar> run a jar file"
|
||||
echo " application prints application(s) report/kill application"
|
||||
echo " applicationattempt prints applicationattempt(s) report"
|
||||
echo " container prints container(s) report"
|
||||
echo " node prints node report(s)"
|
||||
echo " logs dump container logs"
|
||||
echo " classpath prints the class path needed to get the"
|
||||
|
@ -145,6 +148,10 @@ if [ -d "$HADOOP_YARN_HOME/yarn-server/yarn-server-common/target/classes" ]; the
|
|||
fi
|
||||
if [ -d "$HADOOP_YARN_HOME/yarn-server/yarn-server-resourcemanager/target/classes" ]; then
|
||||
CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/yarn-server/yarn-server-resourcemanager/target/classes
|
||||
CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/yarn-server/yarn-server-applicationhistoryservice/target/classes
|
||||
fi
|
||||
if [ -d "$HADOOP_YARN_HOME/yarn-server/yarn-server-applicationhistoryservice/target/classes" ]; then
|
||||
CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/yarn-server/yarn-server-applicationhistoryservice/target/classes
|
||||
fi
|
||||
if [ -d "$HADOOP_YARN_HOME/build/test/classes" ]; then
|
||||
CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/target/test/classes
|
||||
|
@ -177,9 +184,12 @@ if [ "$COMMAND" = "classpath" ] ; then
|
|||
elif [ "$COMMAND" = "rmadmin" ] ; then
|
||||
CLASS='org.apache.hadoop.yarn.client.cli.RMAdminCLI'
|
||||
YARN_OPTS="$YARN_OPTS $YARN_CLIENT_OPTS"
|
||||
elif [ "$COMMAND" = "application" ] ; then
|
||||
elif [ "$COMMAND" = "application" ] ||
|
||||
[ "$COMMAND" = "applicationattempt" ] ||
|
||||
[ "$COMMAND" = "container" ]; then
|
||||
CLASS=org.apache.hadoop.yarn.client.cli.ApplicationCLI
|
||||
YARN_OPTS="$YARN_OPTS $YARN_CLIENT_OPTS"
|
||||
set -- $COMMAND $@
|
||||
elif [ "$COMMAND" = "node" ] ; then
|
||||
CLASS=org.apache.hadoop.yarn.client.cli.NodeCLI
|
||||
YARN_OPTS="$YARN_OPTS $YARN_CLIENT_OPTS"
|
||||
|
@ -190,6 +200,13 @@ elif [ "$COMMAND" = "resourcemanager" ] ; then
|
|||
if [ "$YARN_RESOURCEMANAGER_HEAPSIZE" != "" ]; then
|
||||
JAVA_HEAP_MAX="-Xmx""$YARN_RESOURCEMANAGER_HEAPSIZE""m"
|
||||
fi
|
||||
elif [ "$COMMAND" = "historyserver" ] ; then
|
||||
CLASSPATH=${CLASSPATH}:$YARN_CONF_DIR/ahs-config/log4j.properties
|
||||
CLASS='org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer'
|
||||
YARN_OPTS="$YARN_OPTS $YARN_HISTORYSERVER_OPTS"
|
||||
if [ "$YARN_RESOURCEMANAGER_HEAPSIZE" != "" ]; then
|
||||
JAVA_HEAP_MAX="-Xmx""$YARN_HISTORYSERVER_HEAPSIZE""m"
|
||||
fi
|
||||
elif [ "$COMMAND" = "nodemanager" ] ; then
|
||||
CLASSPATH=${CLASSPATH}:$YARN_CONF_DIR/nm-config/log4j.properties
|
||||
CLASS='org.apache.hadoop.yarn.server.nodemanager.NodeManager'
|
||||
|
|
|
@ -120,8 +120,11 @@ if "%1" == "--config" (
|
|||
|
||||
if exist %HADOOP_YARN_HOME%\yarn-server\yarn-server-resourcemanager\target\classes (
|
||||
set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-server\yarn-server-resourcemanager\target\classes
|
||||
set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-server\yarn-server-applicationhistoryservice\target\classes
|
||||
)
|
||||
if exist %HADOOP_YARN_HOME%\yarn-server\yarn-server-applicationhistoryservice\target\classes (
|
||||
set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\yarn-server\yarn-server-applicationhistoryservice\target\classes
|
||||
)
|
||||
|
||||
if exist %HADOOP_YARN_HOME%\build\test\classes (
|
||||
set CLASSPATH=%CLASSPATH%;%HADOOP_YARN_HOME%\build\test\classes
|
||||
)
|
||||
|
@ -138,7 +141,8 @@ if "%1" == "--config" (
|
|||
goto :eof
|
||||
)
|
||||
|
||||
set yarncommands=resourcemanager nodemanager proxyserver rmadmin version jar application node logs daemonlog
|
||||
set yarncommands=resourcemanager nodemanager proxyserver rmadmin version jar ^
|
||||
application applicationattempt container node logs daemonlog historyserver
|
||||
for %%i in ( %yarncommands% ) do (
|
||||
if %yarn-command% == %%i set yarncommand=true
|
||||
)
|
||||
|
@ -170,6 +174,19 @@ goto :eof
|
|||
:application
|
||||
set CLASS=org.apache.hadoop.yarn.client.cli.ApplicationCLI
|
||||
set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
|
||||
set yarn-command-arguments=%yarn-command% %yarn-command-arguments%
|
||||
goto :eof
|
||||
|
||||
:applicationattempt
|
||||
set CLASS=org.apache.hadoop.yarn.client.cli.ApplicationCLI
|
||||
set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
|
||||
set yarn-command-arguments=%yarn-command% %yarn-command-arguments%
|
||||
goto :eof
|
||||
|
||||
:container
|
||||
set CLASS=org.apache.hadoop.yarn.client.cli.ApplicationCLI
|
||||
set YARN_OPTS=%YARN_OPTS% %YARN_CLIENT_OPTS%
|
||||
set yarn-command-arguments=%yarn-command% %yarn-command-arguments%
|
||||
goto :eof
|
||||
|
||||
:node
|
||||
|
@ -186,6 +203,15 @@ goto :eof
|
|||
)
|
||||
goto :eof
|
||||
|
||||
:historyserver
|
||||
set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\ahs-config\log4j.properties
|
||||
set CLASS=org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryServer
|
||||
set YARN_OPTS=%YARN_OPTS% %HADOOP_HISTORYSERVER_OPTS%
|
||||
if defined YARN_RESOURCEMANAGER_HEAPSIZE (
|
||||
set JAVA_HEAP_MAX=-Xmx%YARN_HISTORYSERVER_HEAPSIZE%m
|
||||
)
|
||||
goto :eof
|
||||
|
||||
:nodemanager
|
||||
set CLASSPATH=%CLASSPATH%;%YARN_CONF_DIR%\nm-config\log4j.properties
|
||||
set CLASS=org.apache.hadoop.yarn.server.nodemanager.NodeManager
|
||||
|
@ -251,10 +277,13 @@ goto :eof
|
|||
@echo where COMMAND is one of:
|
||||
@echo resourcemanager run the ResourceManager
|
||||
@echo nodemanager run a nodemanager on each slave
|
||||
@echo historyserver run the application history server
|
||||
@echo rmadmin admin tools
|
||||
@echo version print the version
|
||||
@echo jar ^<jar^> run a jar file
|
||||
@echo application prints application(s) report/kill application
|
||||
@echo applicationattempt prints applicationattempt(s) report
|
||||
@echo container prints container(s) report
|
||||
@echo node prints node report(s)
|
||||
@echo logs dump container logs
|
||||
@echo classpath prints the class path needed to get the
|
||||
|
|
|
@ -54,6 +54,15 @@ fi
|
|||
# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
|
||||
#export YARN_RESOURCEMANAGER_HEAPSIZE=1000
|
||||
|
||||
# Specify the max Heapsize for the HistoryManager using a numerical value
|
||||
# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set
|
||||
# the value to 1000.
|
||||
# This value will be overridden by an Xmx setting specified in either YARN_OPTS
|
||||
# and/or YARN_HISTORYSERVER_OPTS.
|
||||
# If not specified, the default value will be picked from either YARN_HEAPMAX
|
||||
# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
|
||||
#export YARN_HISTORYSERVER_HEAPSIZE=1000
|
||||
|
||||
# Specify the JVM options to be used when starting the ResourceManager.
|
||||
# These options will be appended to the options specified as YARN_OPTS
|
||||
# and therefore may override any similar flags set in YARN_OPTS
|
||||
|
|
|
@ -112,6 +112,8 @@
|
|||
<include>containermanagement_protocol.proto</include>
|
||||
<include>server/yarn_server_resourcemanager_service_protos.proto</include>
|
||||
<include>server/resourcemanager_administration_protocol.proto</include>
|
||||
<include>application_history_client.proto</include>
|
||||
<include>server/application_history_server.proto</include>
|
||||
</includes>
|
||||
</source>
|
||||
<output>${project.build.directory}/generated-sources/java</output>
|
||||
|
|
|
@ -0,0 +1,334 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.Token;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The protocol between clients and the <code>ApplicationHistoryServer</code> to
|
||||
* get the information of completed applications etc.
|
||||
* </p>
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public interface ApplicationHistoryProtocol {
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The interface used by clients to get a report of an Application from the
|
||||
* <code>ResourceManager</code>.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The client, via {@link GetApplicationReportRequest} provides the
|
||||
* {@link ApplicationId} of the application.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* In secure mode,the <code>ApplicationHistoryServer</code> verifies access to
|
||||
* the application, queue etc. before accepting the request.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The <code>ApplicationHistoryServer</code> responds with a
|
||||
* {@link GetApplicationReportResponse} which includes the
|
||||
* {@link ApplicationReport} for the application.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* If the user does not have <code>VIEW_APP</code> access then the following
|
||||
* fields in the report will be set to stubbed values:
|
||||
* <ul>
|
||||
* <li>host - set to "N/A"</li>
|
||||
* <li>RPC port - set to -1</li>
|
||||
* <li>client token - set to "N/A"</li>
|
||||
* <li>diagnostics - set to "N/A"</li>
|
||||
* <li>tracking URL - set to "N/A"</li>
|
||||
* <li>original tracking URL - set to "N/A"</li>
|
||||
* <li>resource usage report - all values are -1</li>
|
||||
* </ul>
|
||||
* </p>
|
||||
*
|
||||
* @param request
|
||||
* request for an application report
|
||||
* @return application report
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public GetApplicationReportResponse getApplicationReport(
|
||||
GetApplicationReportRequest request) throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The interface used by clients to get a report of all Applications in the
|
||||
* cluster from the <code>ApplicationHistoryServer</code>.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The <code>ApplicationHistoryServer</code> responds with a
|
||||
* {@link GetApplicationsResponse} which includes a list of
|
||||
* {@link ApplicationReport} for all the applications.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* If the user does not have <code>VIEW_APP</code> access for an application
|
||||
* then the corresponding report will be filtered as described in
|
||||
* {@link #getApplicationReport(GetApplicationReportRequest)}.
|
||||
* </p>
|
||||
*
|
||||
* @param request
|
||||
* request for reports on all the applications
|
||||
* @return report on applications matching the given application types defined
|
||||
* in the request
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public GetApplicationsResponse
|
||||
getApplications(GetApplicationsRequest request) throws YarnException,
|
||||
IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The interface used by clients to get a report of an Application Attempt
|
||||
* from the <code>ApplicationHistoryServer</code>.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The client, via {@link GetApplicationAttemptReportRequest} provides the
|
||||
* {@link ApplicationAttemptId} of the application attempt.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* In secure mode,the <code>ApplicationHistoryServer</code> verifies access to
|
||||
* the method before accepting the request.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The <code>ApplicationHistoryServer</code> responds with a
|
||||
* {@link GetApplicationAttemptReportResponse} which includes the
|
||||
* {@link ApplicationAttemptReport} for the application attempt.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* If the user does not have <code>VIEW_APP</code> access then the following
|
||||
* fields in the report will be set to stubbed values:
|
||||
* <ul>
|
||||
* <li>host</li>
|
||||
* <li>RPC port</li>
|
||||
* <li>client token</li>
|
||||
* <li>diagnostics - set to "N/A"</li>
|
||||
* <li>tracking URL</li>
|
||||
* </ul>
|
||||
* </p>
|
||||
*
|
||||
* @param request
|
||||
* request for an application attempt report
|
||||
* @return application attempt report
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public GetApplicationAttemptReportResponse getApplicationAttemptReport(
|
||||
GetApplicationAttemptReportRequest request) throws YarnException,
|
||||
IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The interface used by clients to get a report of all Application attempts
|
||||
* in the cluster from the <code>ApplicationHistoryServer</code>.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The <code>ApplicationHistoryServer</code> responds with a
|
||||
* {@link GetApplicationAttemptsRequest} which includes the
|
||||
* {@link ApplicationAttemptReport} for all the applications attempts of a
|
||||
* specified application attempt.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* If the user does not have <code>VIEW_APP</code> access for an application
|
||||
* then the corresponding report will be filtered as described in
|
||||
* {@link #getApplicationAttemptReport(GetApplicationAttemptReportRequest)}.
|
||||
* </p>
|
||||
*
|
||||
* @param request
|
||||
* request for reports on all application attempts of an application
|
||||
* @return reports on all application attempts of an application
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public GetApplicationAttemptsResponse getApplicationAttempts(
|
||||
GetApplicationAttemptsRequest request) throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The interface used by clients to get a report of an Container from the
|
||||
* <code>ApplicationHistoryServer</code>.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The client, via {@link GetContainerReportRequest} provides the
|
||||
* {@link ContainerId} of the container.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* In secure mode,the <code>ApplicationHistoryServer</code> verifies access to
|
||||
* the method before accepting the request.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The <code>ApplicationHistoryServer</code> responds with a
|
||||
* {@link GetContainerReportResponse} which includes the
|
||||
* {@link ContainerReport} for the container.
|
||||
* </p>
|
||||
*
|
||||
* @param request
|
||||
* request for a container report
|
||||
* @return container report
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public GetContainerReportResponse getContainerReport(
|
||||
GetContainerReportRequest request) throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The interface used by clients to get a report of Containers for an
|
||||
* application attempt from the <code>ApplciationHistoryServer</code>.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The client, via {@link GetContainersRequest} provides the
|
||||
* {@link ApplicationAttemptId} of the application attempt.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* In secure mode,the <code>ApplicationHistoryServer</code> verifies access to
|
||||
* the method before accepting the request.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The <code>ApplicationHistoryServer</code> responds with a
|
||||
* {@link GetContainersResponse} which includes a list of
|
||||
* {@link ContainerReport} for all the containers of a specific application
|
||||
* attempt.
|
||||
* </p>
|
||||
*
|
||||
* @param request
|
||||
* request for a list of container reports of an application attempt.
|
||||
* @return reports on all containers of an application attempt
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public GetContainersResponse getContainers(GetContainersRequest request)
|
||||
throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The interface used by clients to get delegation token, enabling the
|
||||
* containers to be able to talk to the service using those tokens.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The <code>ApplicationHistoryServer</code> responds with the delegation
|
||||
* token {@link Token} that can be used by the client to speak to this
|
||||
* service.
|
||||
* </p>
|
||||
*
|
||||
* @param request
|
||||
* request to get a delegation token for the client.
|
||||
* @return delegation token that can be used to talk to this service
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public GetDelegationTokenResponse getDelegationToken(
|
||||
GetDelegationTokenRequest request) throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* Renew an existing delegation token.
|
||||
*
|
||||
* @param request
|
||||
* the delegation token to be renewed.
|
||||
* @return the new expiry time for the delegation token.
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
@Private
|
||||
@Unstable
|
||||
public RenewDelegationTokenResponse renewDelegationToken(
|
||||
RenewDelegationTokenRequest request) throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* Cancel an existing delegation token.
|
||||
*
|
||||
* @param request
|
||||
* the delegation token to be cancelled.
|
||||
* @return an empty response.
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
@Private
|
||||
@Unstable
|
||||
public CancelDelegationTokenResponse cancelDelegationToken(
|
||||
CancelDelegationTokenRequest request) throws YarnException, IOException;
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The request sent by a client to the <code>ResourceManager</code> to get an
|
||||
* {@link ApplicationAttemptReport} for an application attempt.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The request should include the {@link ApplicationAttemptId} of the
|
||||
* application attempt.
|
||||
* </p>
|
||||
*
|
||||
* @see ApplicationAttemptReport
|
||||
* @see ApplicationHistoryProtocol#getApplicationAttemptReport(GetApplicationAttemptReportRequest)
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class GetApplicationAttemptReportRequest {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static GetApplicationAttemptReportRequest newInstance(
|
||||
ApplicationAttemptId applicationAttemptId) {
|
||||
GetApplicationAttemptReportRequest request =
|
||||
Records.newRecord(GetApplicationAttemptReportRequest.class);
|
||||
request.setApplicationAttemptId(applicationAttemptId);
|
||||
return request;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the <code>ApplicationAttemptId</code> of an application attempt.
|
||||
*
|
||||
* @return <code>ApplicationAttemptId</code> of an application attempt
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ApplicationAttemptId getApplicationAttemptId();
|
||||
|
||||
/**
|
||||
* Set the <code>ApplicationAttemptId</code> of an application attempt
|
||||
*
|
||||
* @param applicationAttemptId
|
||||
* <code>ApplicationAttemptId</code> of an application attempt
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setApplicationAttemptId(
|
||||
ApplicationAttemptId applicationAttemptId);
|
||||
}
|
|
@ -0,0 +1,74 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The response sent by the <code>ResourceManager</code> to a client requesting
|
||||
* an application attempt report.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The response includes an {@link ApplicationAttemptReport} which has the
|
||||
* details about the particular application attempt
|
||||
* </p>
|
||||
*
|
||||
* @see ApplicationAttemptReport
|
||||
* @see ApplicationHistoryProtocol#getApplicationAttemptReport(GetApplicationAttemptReportRequest)
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class GetApplicationAttemptReportResponse {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static GetApplicationAttemptReportResponse newInstance(
|
||||
ApplicationAttemptReport ApplicationAttemptReport) {
|
||||
GetApplicationAttemptReportResponse response =
|
||||
Records.newRecord(GetApplicationAttemptReportResponse.class);
|
||||
response.setApplicationAttemptReport(ApplicationAttemptReport);
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the <code>ApplicationAttemptReport</code> for the application attempt.
|
||||
*
|
||||
* @return <code>ApplicationAttemptReport</code> for the application attempt
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ApplicationAttemptReport getApplicationAttemptReport();
|
||||
|
||||
/**
|
||||
* Get the <code>ApplicationAttemptReport</code> for the application attempt.
|
||||
*
|
||||
* @param applicationAttemptReport
|
||||
* <code>ApplicationAttemptReport</code> for the application attempt
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setApplicationAttemptReport(
|
||||
ApplicationAttemptReport applicationAttemptReport);
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The request from clients to get a list of application attempt reports of an
|
||||
* application from the <code>ResourceManager</code>.
|
||||
* </p>
|
||||
*
|
||||
* @see ApplicationHistoryProtocol#getApplicationAttempts(GetApplicationAttemptsRequest)
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class GetApplicationAttemptsRequest {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static GetApplicationAttemptsRequest newInstance(
|
||||
ApplicationId applicationId) {
|
||||
GetApplicationAttemptsRequest request =
|
||||
Records.newRecord(GetApplicationAttemptsRequest.class);
|
||||
request.setApplicationId(applicationId);
|
||||
return request;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the <code>ApplicationId</code> of an application
|
||||
*
|
||||
* @return <code>ApplicationId</code> of an application
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ApplicationId getApplicationId();
|
||||
|
||||
/**
|
||||
* Set the <code>ApplicationId</code> of an application
|
||||
*
|
||||
* @param applicationId
|
||||
* <code>ApplicationId</code> of an application
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setApplicationId(ApplicationId applicationId);
|
||||
}
|
|
@ -0,0 +1,76 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The response sent by the <code>ResourceManager</code> to a client requesting
|
||||
* a list of {@link ApplicationAttemptReport} for application attempts.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The <code>ApplicationAttemptReport</code> for each application includes the
|
||||
* details of an application attempt.
|
||||
* </p>
|
||||
*
|
||||
* @see ApplicationAttemptReport
|
||||
* @see ApplicationHistoryProtocol#getApplicationAttempts(GetApplicationAttemptsRequest)
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class GetApplicationAttemptsResponse {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static GetApplicationAttemptsResponse newInstance(
|
||||
List<ApplicationAttemptReport> applicationAttempts) {
|
||||
GetApplicationAttemptsResponse response =
|
||||
Records.newRecord(GetApplicationAttemptsResponse.class);
|
||||
response.setApplicationAttemptList(applicationAttempts);
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of <code>ApplicationReport</code> of an application.
|
||||
*
|
||||
* @return a list of <code>ApplicationReport</code> of an application
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract List<ApplicationAttemptReport> getApplicationAttemptList();
|
||||
|
||||
/**
|
||||
* Get a list of <code>ApplicationReport</code> of an application.
|
||||
*
|
||||
* @param applicationAttempts
|
||||
* a list of <code>ApplicationReport</code> of an application
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setApplicationAttemptList(
|
||||
List<ApplicationAttemptReport> applicationAttempts);
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The request sent by a client to the <code>ResourceManager</code> to get an
|
||||
* {@link ContainerReport} for a container.
|
||||
* </p>
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class GetContainerReportRequest {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static GetContainerReportRequest newInstance(ContainerId containerId) {
|
||||
GetContainerReportRequest request =
|
||||
Records.newRecord(GetContainerReportRequest.class);
|
||||
request.setContainerId(containerId);
|
||||
return request;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the <code>ContainerId</code> of the Container.
|
||||
*
|
||||
* @return <code>ContainerId</code> of the Container
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ContainerId getContainerId();
|
||||
|
||||
/**
|
||||
* Set the <code>ContainerId</code> of the container
|
||||
*
|
||||
* @param containerId
|
||||
* <code>ContainerId</code> of the container
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setContainerId(ContainerId containerId);
|
||||
}
|
|
@ -0,0 +1,63 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The response sent by the <code>ResourceManager</code> to a client requesting
|
||||
* a container report.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The response includes a {@link ContainerReport} which has details of a
|
||||
* container.
|
||||
* </p>
|
||||
*
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class GetContainerReportResponse {
|
||||
@Public
|
||||
@Unstable
|
||||
public static GetContainerReportResponse newInstance(
|
||||
ContainerReport containerReport) {
|
||||
GetContainerReportResponse response =
|
||||
Records.newRecord(GetContainerReportResponse.class);
|
||||
response.setContainerReport(containerReport);
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the <code>ContainerReport</code> for the container.
|
||||
*
|
||||
* @return <code>ContainerReport</code> for the container
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ContainerReport getContainerReport();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setContainerReport(ContainerReport containerReport);
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The request from clients to get a list of container reports, which belong to
|
||||
* an application attempt from the <code>ResourceManager</code>.
|
||||
* </p>
|
||||
*
|
||||
* @see ApplicationHistoryProtocol#getContainers(GetContainersRequest)
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class GetContainersRequest {
|
||||
@Public
|
||||
@Unstable
|
||||
public static GetContainersRequest newInstance(
|
||||
ApplicationAttemptId applicationAttemptId) {
|
||||
GetContainersRequest request =
|
||||
Records.newRecord(GetContainersRequest.class);
|
||||
request.setApplicationAttemptId(applicationAttemptId);
|
||||
return request;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the <code>ApplicationAttemptId</code> of an application attempt.
|
||||
*
|
||||
* @return <code>ApplicationAttemptId</code> of an application attempt
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ApplicationAttemptId getApplicationAttemptId();
|
||||
|
||||
/**
|
||||
* Set the <code>ApplicationAttemptId</code> of an application attempt
|
||||
*
|
||||
* @param applicationAttemptId
|
||||
* <code>ApplicationAttemptId</code> of an application attempt
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setApplicationAttemptId(
|
||||
ApplicationAttemptId applicationAttemptId);
|
||||
}
|
|
@ -0,0 +1,81 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The response sent by the <code>ResourceManager</code> to a client requesting
|
||||
* a list of {@link ContainerReport} for containers.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* The <code>ContainerReport</code> for each container includes the container
|
||||
* details.
|
||||
* </p>
|
||||
*
|
||||
* @see ContainerReport
|
||||
* @see ApplicationHistoryProtocol#getContainers(GetContainersRequest)
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class GetContainersResponse {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static GetContainersResponse newInstance(
|
||||
List<ContainerReport> containers) {
|
||||
GetContainersResponse response =
|
||||
Records.newRecord(GetContainersResponse.class);
|
||||
response.setContainerList(containers);
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of <code>ContainerReport</code> for all the containers of an
|
||||
* application attempt.
|
||||
*
|
||||
* @return a list of <code>ContainerReport</code> for all the containers of an
|
||||
* application attempt
|
||||
*
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract List<ContainerReport> getContainerList();
|
||||
|
||||
/**
|
||||
* Set a list of <code>ContainerReport</code> for all the containers of an
|
||||
* application attempt.
|
||||
*
|
||||
* @param containers
|
||||
* a list of <code>ContainerReport</code> for all the containers of
|
||||
* an application attempt
|
||||
*
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setContainerList(List<ContainerReport> containers);
|
||||
}
|
|
@ -0,0 +1,165 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* <code>ApplicationAttemptReport</code> is a report of an application attempt.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* It includes details such as:
|
||||
* <ul>
|
||||
* <li>{@link ApplicationAttemptId} of the application.</li>
|
||||
* <li>Host on which the <code>ApplicationMaster</code> of this attempt is
|
||||
* running.</li>
|
||||
* <li>RPC port of the <code>ApplicationMaster</code> of this attempt.</li>
|
||||
* <li>Tracking URL.</li>
|
||||
* <li>Diagnostic information in case of errors.</li>
|
||||
* <li>{@link YarnApplicationAttemptState} of the application attempt.</li>
|
||||
* <li>{@link ContainerId} of the master Container.</li>
|
||||
* </ul>
|
||||
* </p>
|
||||
*
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class ApplicationAttemptReport {
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public static ApplicationAttemptReport newInstance(
|
||||
ApplicationAttemptId applicationAttemptId, String host, int rpcPort,
|
||||
String url, String diagnostics, YarnApplicationAttemptState state,
|
||||
ContainerId amContainerId) {
|
||||
ApplicationAttemptReport report =
|
||||
Records.newRecord(ApplicationAttemptReport.class);
|
||||
report.setApplicationAttemptId(applicationAttemptId);
|
||||
report.setHost(host);
|
||||
report.setRpcPort(rpcPort);
|
||||
report.setTrackingUrl(url);
|
||||
report.setDiagnostics(diagnostics);
|
||||
report.setYarnApplicationAttemptState(state);
|
||||
report.setAMContainerId(amContainerId);
|
||||
return report;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the <em>YarnApplicationAttemptState</em> of the application attempt.
|
||||
*
|
||||
* @return <em>YarnApplicationAttemptState</em> of the application attempt
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract YarnApplicationAttemptState getYarnApplicationAttemptState();
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public abstract void setYarnApplicationAttemptState(
|
||||
YarnApplicationAttemptState yarnApplicationAttemptState);
|
||||
|
||||
/**
|
||||
* Get the <em>RPC port</em> of this attempt <code>ApplicationMaster</code>.
|
||||
*
|
||||
* @return <em>RPC port</em> of this attempt <code>ApplicationMaster</code>
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract int getRpcPort();
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public abstract void setRpcPort(int rpcPort);
|
||||
|
||||
/**
|
||||
* Get the <em>host</em> on which this attempt of
|
||||
* <code>ApplicationMaster</code> is running.
|
||||
*
|
||||
* @return <em>host</em> on which this attempt of
|
||||
* <code>ApplicationMaster</code> is running
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getHost();
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public abstract void setHost(String host);
|
||||
|
||||
/**
|
||||
* Get the <em>diagnositic information</em> of the application attempt in case
|
||||
* of errors.
|
||||
*
|
||||
* @return <em>diagnositic information</em> of the application attempt in case
|
||||
* of errors
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getDiagnostics();
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public abstract void setDiagnostics(String diagnostics);
|
||||
|
||||
/**
|
||||
* Get the <em>tracking url</em> for the application attempt.
|
||||
*
|
||||
* @return <em>tracking url</em> for the application attempt
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getTrackingUrl();
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public abstract void setTrackingUrl(String url);
|
||||
|
||||
/**
|
||||
* Get the <code>ApplicationAttemptId</code> of this attempt of the
|
||||
* application
|
||||
*
|
||||
* @return <code>ApplicationAttemptId</code> of the attempt
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ApplicationAttemptId getApplicationAttemptId();
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public abstract void setApplicationAttemptId(
|
||||
ApplicationAttemptId applicationAttemptId);
|
||||
|
||||
/**
|
||||
* Get the <code>ContainerId</code> of AMContainer for this attempt
|
||||
*
|
||||
* @return <code>ContainerId</code> of the attempt
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ContainerId getAMContainerId();
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public abstract void setAMContainerId(ContainerId amContainerId);
|
||||
}
|
|
@ -0,0 +1,202 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* <code>ContainerReport</code> is a report of an container.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* It includes details such as:
|
||||
* <ul>
|
||||
* <li>{@link ContainerId} of the container.</li>
|
||||
* <li>Allocated Resources to the container.</li>
|
||||
* <li>Assigned Node id.</li>
|
||||
* <li>Assigned Priority.</li>
|
||||
* <li>Start Time.</li>
|
||||
* <li>Finish Time.</li>
|
||||
* <li>Container Exit Status.</li>
|
||||
* <li>{@link ContainerState} of the container.</li>
|
||||
* <li>Diagnostic information in case of errors.</li>
|
||||
* <li>Log URL.</li>
|
||||
* </ul>
|
||||
* </p>
|
||||
*
|
||||
*/
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class ContainerReport {
|
||||
@Private
|
||||
@Unstable
|
||||
public static ContainerReport newInstance(ContainerId containerId,
|
||||
Resource allocatedResource, NodeId assignedNode, Priority priority,
|
||||
long startTime, long finishTime, String diagnosticInfo, String logUrl,
|
||||
int containerExitStatus, ContainerState containerState) {
|
||||
ContainerReport report = Records.newRecord(ContainerReport.class);
|
||||
report.setContainerId(containerId);
|
||||
report.setAllocatedResource(allocatedResource);
|
||||
report.setAssignedNode(assignedNode);
|
||||
report.setPriority(priority);
|
||||
report.setStartTime(startTime);
|
||||
report.setFinishTime(finishTime);
|
||||
report.setDiagnosticsInfo(diagnosticInfo);
|
||||
report.setLogUrl(logUrl);
|
||||
report.setContainerExitStatus(containerExitStatus);
|
||||
report.setContainerState(containerState);
|
||||
return report;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the <code>ContainerId</code> of the container.
|
||||
*
|
||||
* @return <code>ContainerId</code> of the container.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ContainerId getContainerId();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setContainerId(ContainerId containerId);
|
||||
|
||||
/**
|
||||
* Get the allocated <code>Resource</code> of the container.
|
||||
*
|
||||
* @return allocated <code>Resource</code> of the container.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract Resource getAllocatedResource();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setAllocatedResource(Resource resource);
|
||||
|
||||
/**
|
||||
* Get the allocated <code>NodeId</code> where container is running.
|
||||
*
|
||||
* @return allocated <code>NodeId</code> where container is running.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract NodeId getAssignedNode();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setAssignedNode(NodeId nodeId);
|
||||
|
||||
/**
|
||||
* Get the allocated <code>Priority</code> of the container.
|
||||
*
|
||||
* @return allocated <code>Priority</code> of the container.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract Priority getPriority();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setPriority(Priority priority);
|
||||
|
||||
/**
|
||||
* Get the Start time of the container.
|
||||
*
|
||||
* @return Start time of the container
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract long getStartTime();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setStartTime(long startTime);
|
||||
|
||||
/**
|
||||
* Get the Finish time of the container.
|
||||
*
|
||||
* @return Finish time of the container
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract long getFinishTime();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setFinishTime(long finishTime);
|
||||
|
||||
/**
|
||||
* Get the DiagnosticsInfo of the container.
|
||||
*
|
||||
* @return DiagnosticsInfo of the container
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getDiagnosticsInfo();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setDiagnosticsInfo(String diagnosticsInfo);
|
||||
|
||||
/**
|
||||
* Get the LogURL of the container.
|
||||
*
|
||||
* @return LogURL of the container
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getLogUrl();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setLogUrl(String logUrl);
|
||||
|
||||
/**
|
||||
* Get the final <code>ContainerState</code> of the container.
|
||||
*
|
||||
* @return final <code>ContainerState</code> of the container.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ContainerState getContainerState();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setContainerState(ContainerState containerState);
|
||||
|
||||
/**
|
||||
* Get the final <code>exit status</code> of the container.
|
||||
*
|
||||
* @return final <code>exit status</code> of the container.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract int getContainerExitStatus();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setContainerExitStatus(int containerExitStatus);
|
||||
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Stable;
|
||||
|
||||
/**
|
||||
* Enumeration of various states of a <code>RMAppAttempt</code>.
|
||||
*/
|
||||
@Public
|
||||
@Stable
|
||||
public enum YarnApplicationAttemptState {
|
||||
/** AppAttempt was just created. */
|
||||
NEW,
|
||||
|
||||
/** AppAttempt has been submitted. */
|
||||
SUBMITTED,
|
||||
|
||||
/** AppAttempt was scheduled */
|
||||
SCHEDULED,
|
||||
|
||||
/** Acquired AM Container from Scheduler and Saving AppAttempt Data */
|
||||
ALLOCATED_SAVING,
|
||||
|
||||
/** AppAttempt Data was saved */
|
||||
ALLOCATED,
|
||||
|
||||
/** AppAttempt was launched */
|
||||
LAUNCHED,
|
||||
|
||||
/** AppAttempt failed. */
|
||||
FAILED,
|
||||
|
||||
/** AppAttempt is currently running. */
|
||||
RUNNING,
|
||||
|
||||
/** AppAttempt is waiting for state bing saved */
|
||||
FINAL_SAVING,
|
||||
|
||||
/** AppAttempt is finishing. */
|
||||
FINISHING,
|
||||
|
||||
/** AppAttempt finished successfully. */
|
||||
FINISHED,
|
||||
|
||||
/** AppAttempt was terminated by a user or admin. */
|
||||
KILLED
|
||||
|
||||
}
|
|
@ -263,6 +263,17 @@ public class YarnConfiguration extends Configuration {
|
|||
RM_PREFIX + "nodemanagers.heartbeat-interval-ms";
|
||||
public static final long DEFAULT_RM_NM_HEARTBEAT_INTERVAL_MS = 1000;
|
||||
|
||||
/** Number of worker threads that write the history data. */
|
||||
public static final String RM_HISTORY_WRITER_MULTI_THREADED_DISPATCHER_POOL_SIZE =
|
||||
RM_PREFIX + "history-writer.multi-threaded-dispatcher.pool-size";
|
||||
public static final int DEFAULT_RM_HISTORY_WRITER_MULTI_THREADED_DISPATCHER_POOL_SIZE =
|
||||
10;
|
||||
|
||||
/** The implementation class of ApplicationHistoryStore, which is to be used
|
||||
* by RMApplicationHistoryWriter. */
|
||||
public static final String RM_HISTORY_WRITER_CLASS = RM_PREFIX
|
||||
+ "history-writer.class";
|
||||
|
||||
//Delegation token related keys
|
||||
public static final String DELEGATION_KEY_UPDATE_INTERVAL_KEY =
|
||||
RM_PREFIX + "delegation.key.update-interval";
|
||||
|
@ -931,6 +942,63 @@ public class YarnConfiguration extends Configuration {
|
|||
public static final String YARN_APP_CONTAINER_LOG_BACKUPS =
|
||||
YARN_PREFIX + "app.container.log.backups";
|
||||
|
||||
////////////////////////////////
|
||||
// AHS Configs
|
||||
////////////////////////////////
|
||||
|
||||
public static final String AHS_PREFIX = YARN_PREFIX + "ahs.";
|
||||
|
||||
/** The setting that controls whether history-service is enabled or not.. */
|
||||
public static final String YARN_HISTORY_SERVICE_ENABLED = AHS_PREFIX
|
||||
+ ".enabled";
|
||||
public static final boolean DEFAULT_YARN_HISTORY_SERVICE_ENABLED = false;
|
||||
|
||||
/** URI for FileSystemApplicationHistoryStore */
|
||||
public static final String FS_HISTORY_STORE_URI = AHS_PREFIX + "fs-history-store.uri";
|
||||
|
||||
/** T-file compression types used to compress history data.*/
|
||||
public static final String FS_HISTORY_STORE_COMPRESSION_TYPE = AHS_PREFIX + "fs-history-store.compression-type";
|
||||
public static final String DEFAULT_FS_HISTORY_STORE_COMPRESSION_TYPE = "none";
|
||||
|
||||
/** AHS store class */
|
||||
public static final String AHS_STORE = AHS_PREFIX + "store.class";
|
||||
|
||||
/** host:port address for Application History Server API. */
|
||||
public static final String AHS_ADDRESS = AHS_PREFIX + "address";
|
||||
public static final int DEFAULT_AHS_PORT = 10200;
|
||||
public static final String DEFAULT_AHS_ADDRESS = "0.0.0.0:"
|
||||
+ DEFAULT_AHS_PORT;
|
||||
|
||||
/** The number of threads to handle client API requests. */
|
||||
public static final String AHS_CLIENT_THREAD_COUNT = AHS_PREFIX
|
||||
+ "client.thread-count";
|
||||
public static final int DEFAULT_AHS_CLIENT_THREAD_COUNT = 10;
|
||||
|
||||
|
||||
/** The address of the AHS web application.*/
|
||||
public static final String AHS_WEBAPP_ADDRESS = AHS_PREFIX
|
||||
+ "webapp.address";
|
||||
|
||||
public static final int DEFAULT_AHS_WEBAPP_PORT = 8188;
|
||||
public static final String DEFAULT_AHS_WEBAPP_ADDRESS = "0.0.0.0:"
|
||||
+ DEFAULT_AHS_WEBAPP_PORT;
|
||||
|
||||
/** The https address of the AHS web application.*/
|
||||
public static final String AHS_WEBAPP_HTTPS_ADDRESS = AHS_PREFIX
|
||||
+ "webapp.https.address";
|
||||
|
||||
public static final int DEFAULT_AHS_WEBAPP_HTTPS_PORT = 8190;
|
||||
public static final String DEFAULT_AHS_WEBAPP_HTTPS_ADDRESS = "0.0.0.0:"
|
||||
+ DEFAULT_AHS_WEBAPP_HTTPS_PORT;
|
||||
|
||||
/**The kerberos principal to be used for spnego filter for AHS.*/
|
||||
public static final String AHS_WEBAPP_SPNEGO_USER_NAME_KEY =
|
||||
AHS_PREFIX + "webapp.spnego-principal";
|
||||
|
||||
/**The kerberos keytab to be used for spnego filter for AHS.*/
|
||||
public static final String AHS_WEBAPP_SPNEGO_KEYTAB_FILE_KEY =
|
||||
AHS_PREFIX + "webapp.spnego-keytab-file";
|
||||
|
||||
////////////////////////////////
|
||||
// Other Configs
|
||||
////////////////////////////////
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.exceptions;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
||||
|
||||
/**
|
||||
* This exception is thrown on
|
||||
* {@link ApplicationHistoryProtocol#getApplicationAttemptReport (GetApplicationAttemptReportRequest)}
|
||||
* API when the Application Attempt doesn't exist in Application History Server
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public class ApplicationAttemptNotFoundException extends YarnException {
|
||||
|
||||
private static final long serialVersionUID = 8694508L;
|
||||
|
||||
public ApplicationAttemptNotFoundException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
public ApplicationAttemptNotFoundException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public ApplicationAttemptNotFoundException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
|
@ -18,14 +18,19 @@
|
|||
|
||||
package org.apache.hadoop.yarn.exceptions;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
|
||||
|
||||
/**
|
||||
* This exception is thrown on
|
||||
* {@link ApplicationClientProtocol#getApplicationReport(GetApplicationReportRequest)} API
|
||||
* when the Application doesn't exist in RM
|
||||
* {@link ApplicationClientProtocol#getApplicationReport
|
||||
* (GetApplicationReportRequest)} API
|
||||
* when the Application doesn't exist in RM and AHS
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public class ApplicationNotFoundException extends YarnException{
|
||||
|
||||
private static final long serialVersionUID = 8694408L;
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.exceptions;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
|
||||
|
||||
/**
|
||||
* This exception is thrown on
|
||||
* {@link ApplicationHistoryProtocol#getContainerReport (GetContainerReportRequest)}
|
||||
* API when the container doesn't exist in AHS
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public class ContainerNotFoundException extends YarnException {
|
||||
|
||||
private static final long serialVersionUID = 8694608L;
|
||||
|
||||
public ContainerNotFoundException(Throwable cause) {
|
||||
super(cause);
|
||||
}
|
||||
|
||||
public ContainerNotFoundException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public ContainerNotFoundException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
option java_package = "org.apache.hadoop.yarn.proto";
|
||||
option java_outer_classname = "ApplicationHistoryProtocol";
|
||||
option java_generic_services = true;
|
||||
option java_generate_equals_and_hash = true;
|
||||
package hadoop.yarn;
|
||||
|
||||
import "Security.proto";
|
||||
import "yarn_service_protos.proto";
|
||||
|
||||
service ApplicationHistoryProtocolService {
|
||||
rpc getApplicationReport (GetApplicationReportRequestProto) returns (GetApplicationReportResponseProto);
|
||||
rpc getApplications (GetApplicationsRequestProto) returns (GetApplicationsResponseProto);
|
||||
rpc getApplicationAttemptReport (GetApplicationAttemptReportRequestProto) returns (GetApplicationAttemptReportResponseProto);
|
||||
rpc getApplicationAttempts (GetApplicationAttemptsRequestProto) returns (GetApplicationAttemptsResponseProto);
|
||||
rpc getContainerReport (GetContainerReportRequestProto) returns (GetContainerReportResponseProto);
|
||||
rpc getContainers (GetContainersRequestProto) returns (GetContainersResponseProto);
|
||||
rpc getDelegationToken(hadoop.common.GetDelegationTokenRequestProto) returns (hadoop.common.GetDelegationTokenResponseProto);
|
||||
rpc renewDelegationToken(hadoop.common.RenewDelegationTokenRequestProto) returns (hadoop.common.RenewDelegationTokenResponseProto);
|
||||
rpc cancelDelegationToken(hadoop.common.CancelDelegationTokenRequestProto) returns (hadoop.common.CancelDelegationTokenResponseProto);
|
||||
}
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
option java_package = "org.apache.hadoop.yarn.proto";
|
||||
option java_outer_classname = "ApplicationHistoryServerProtos";
|
||||
option java_generic_services = true;
|
||||
option java_generate_equals_and_hash = true;
|
||||
package hadoop.yarn;
|
||||
|
||||
import "yarn_protos.proto";
|
||||
|
||||
message ApplicationHistoryDataProto {
|
||||
optional ApplicationIdProto application_id = 1;
|
||||
optional string application_name = 2;
|
||||
optional string application_type = 3;
|
||||
optional string user = 4;
|
||||
optional string queue = 5;
|
||||
optional int64 submit_time = 6;
|
||||
optional int64 start_time = 7;
|
||||
optional int64 finish_time = 8;
|
||||
optional string diagnostics_info = 9;
|
||||
optional FinalApplicationStatusProto final_application_status = 10;
|
||||
optional YarnApplicationStateProto yarn_application_state = 11;
|
||||
}
|
||||
|
||||
message ApplicationStartDataProto {
|
||||
optional ApplicationIdProto application_id = 1;
|
||||
optional string application_name = 2;
|
||||
optional string application_type = 3;
|
||||
optional string user = 4;
|
||||
optional string queue = 5;
|
||||
optional int64 submit_time = 6;
|
||||
optional int64 start_time = 7;
|
||||
}
|
||||
|
||||
message ApplicationFinishDataProto {
|
||||
optional ApplicationIdProto application_id = 1;
|
||||
optional int64 finish_time = 2;
|
||||
optional string diagnostics_info = 3;
|
||||
optional FinalApplicationStatusProto final_application_status = 4;
|
||||
optional YarnApplicationStateProto yarn_application_state = 5;
|
||||
}
|
||||
|
||||
message ApplicationAttemptHistoryDataProto {
|
||||
optional ApplicationAttemptIdProto application_attempt_id = 1;
|
||||
optional string host = 2;
|
||||
optional int32 rpc_port = 3;
|
||||
optional string tracking_url = 4;
|
||||
optional string diagnostics_info = 5;
|
||||
optional FinalApplicationStatusProto final_application_status = 6;
|
||||
optional ContainerIdProto master_container_id = 7;
|
||||
optional YarnApplicationAttemptStateProto yarn_application_attempt_state = 8;
|
||||
}
|
||||
|
||||
message ApplicationAttemptStartDataProto {
|
||||
optional ApplicationAttemptIdProto application_attempt_id = 1;
|
||||
optional string host = 2;
|
||||
optional int32 rpc_port = 3;
|
||||
optional ContainerIdProto master_container_id = 4;
|
||||
}
|
||||
|
||||
message ApplicationAttemptFinishDataProto {
|
||||
optional ApplicationAttemptIdProto application_attempt_id = 1;
|
||||
optional string tracking_url = 2;
|
||||
optional string diagnostics_info = 3;
|
||||
optional FinalApplicationStatusProto final_application_status = 4;
|
||||
optional YarnApplicationAttemptStateProto yarn_application_attempt_state = 5;
|
||||
}
|
||||
|
||||
message ContainerHistoryDataProto {
|
||||
optional ContainerIdProto container_id = 1;
|
||||
optional ResourceProto allocated_resource = 2;
|
||||
optional NodeIdProto assigned_node_id = 3;
|
||||
optional PriorityProto priority = 4;
|
||||
optional int64 start_time = 5;
|
||||
optional int64 finish_time = 6;
|
||||
optional string diagnostics_info = 7;
|
||||
optional string log_url = 8;
|
||||
optional int32 container_exit_status = 9;
|
||||
optional ContainerStateProto container_state = 10;
|
||||
}
|
||||
|
||||
message ContainerStartDataProto {
|
||||
optional ContainerIdProto container_id = 1;
|
||||
optional ResourceProto allocated_resource = 2;
|
||||
optional NodeIdProto assigned_node_id = 3;
|
||||
optional PriorityProto priority = 4;
|
||||
optional int64 start_time = 5;
|
||||
}
|
||||
|
||||
message ContainerFinishDataProto {
|
||||
optional ContainerIdProto container_id = 1;
|
||||
optional int64 finish_time = 2;
|
||||
optional string diagnostics_info = 3;
|
||||
optional string log_url = 4;
|
||||
optional int32 container_exit_status = 5;
|
||||
optional ContainerStateProto container_state = 6;
|
||||
}
|
|
@ -87,6 +87,19 @@ message ContainerProto {
|
|||
optional hadoop.common.TokenProto container_token = 6;
|
||||
}
|
||||
|
||||
message ContainerReportProto {
|
||||
optional ContainerIdProto container_id = 1;
|
||||
optional ResourceProto resource = 2;
|
||||
optional NodeIdProto node_id = 3;
|
||||
optional PriorityProto priority = 4;
|
||||
optional int64 start_time = 5;
|
||||
optional int64 finish_time = 6;
|
||||
optional string diagnostics_info = 7 [default = "N/A"];
|
||||
optional string log_url = 8;
|
||||
optional int32 container_exit_status = 9;
|
||||
optional ContainerStateProto container_state = 10;
|
||||
}
|
||||
|
||||
enum YarnApplicationStateProto {
|
||||
NEW = 1;
|
||||
NEW_SAVING = 2;
|
||||
|
@ -98,6 +111,21 @@ enum YarnApplicationStateProto {
|
|||
KILLED = 8;
|
||||
}
|
||||
|
||||
enum YarnApplicationAttemptStateProto {
|
||||
APP_ATTEMPT_NEW = 1;
|
||||
APP_ATTEMPT_SUBMITTED = 2;
|
||||
APP_ATTEMPT_SCHEDULED = 3;
|
||||
APP_ATTEMPT_ALLOCATED_SAVING = 4;
|
||||
APP_ATTEMPT_ALLOCATED = 5;
|
||||
APP_ATTEMPT_LAUNCHED = 6;
|
||||
APP_ATTEMPT_FAILED = 7;
|
||||
APP_ATTEMPT_RUNNING = 8;
|
||||
APP_ATTEMPT_FINAL_SAVING = 9;
|
||||
APP_ATTEMPT_FINISHING = 10;
|
||||
APP_ATTEMPT_FINISHED = 11;
|
||||
APP_ATTEMPT_KILLED = 12;
|
||||
}
|
||||
|
||||
enum FinalApplicationStatusProto {
|
||||
APP_UNDEFINED = 0;
|
||||
APP_SUCCEEDED = 1;
|
||||
|
@ -164,6 +192,16 @@ message ApplicationReportProto {
|
|||
optional hadoop.common.TokenProto am_rm_token = 19;
|
||||
}
|
||||
|
||||
message ApplicationAttemptReportProto {
|
||||
optional ApplicationAttemptIdProto application_attempt_id = 1;
|
||||
optional string host = 2;
|
||||
optional int32 rpc_port = 3;
|
||||
optional string tracking_url = 4;
|
||||
optional string diagnostics = 5 [default = "N/A"];
|
||||
optional YarnApplicationAttemptStateProto yarn_application_attempt_state = 6;
|
||||
optional ContainerIdProto am_container_id = 7;
|
||||
}
|
||||
|
||||
enum NodeStateProto {
|
||||
NS_NEW = 1;
|
||||
NS_RUNNING = 2;
|
||||
|
|
|
@ -240,3 +240,39 @@ message GetContainerStatusesResponseProto {
|
|||
repeated ContainerStatusProto status = 1;
|
||||
repeated ContainerExceptionMapProto failed_requests = 2;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////
|
||||
/////// Application_History_Protocol /////////////////
|
||||
//////////////////////////////////////////////////////
|
||||
|
||||
message GetApplicationAttemptReportRequestProto {
|
||||
optional ApplicationAttemptIdProto application_attempt_id = 1;
|
||||
}
|
||||
|
||||
message GetApplicationAttemptReportResponseProto {
|
||||
optional ApplicationAttemptReportProto application_attempt_report = 1;
|
||||
}
|
||||
|
||||
message GetApplicationAttemptsRequestProto {
|
||||
optional ApplicationIdProto application_id = 1;
|
||||
}
|
||||
|
||||
message GetApplicationAttemptsResponseProto {
|
||||
repeated ApplicationAttemptReportProto application_attempts = 1;
|
||||
}
|
||||
|
||||
message GetContainerReportRequestProto {
|
||||
optional ContainerIdProto container_id = 1;
|
||||
}
|
||||
|
||||
message GetContainerReportResponseProto {
|
||||
optional ContainerReportProto container_report = 1;
|
||||
}
|
||||
|
||||
message GetContainersRequestProto {
|
||||
optional ApplicationAttemptIdProto application_attempt_id = 1;
|
||||
}
|
||||
|
||||
message GetContainersResponseProto {
|
||||
repeated ContainerReportProto containers = 1;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,180 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.client.api;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.service.AbstractService;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.client.api.impl.AHSClientImpl;
|
||||
import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException;
|
||||
import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Stable
|
||||
public abstract class AHSClient extends AbstractService {
|
||||
|
||||
/**
|
||||
* Create a new instance of AHSClient.
|
||||
*/
|
||||
@Public
|
||||
public static AHSClient createAHSClient() {
|
||||
AHSClient client = new AHSClientImpl();
|
||||
return client;
|
||||
}
|
||||
|
||||
@Private
|
||||
public AHSClient(String name) {
|
||||
super(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Get a report of the given Application.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* In secure mode, <code>YARN</code> verifies access to the application, queue
|
||||
* etc. before accepting the request.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* If the user does not have <code>VIEW_APP</code> access then the following
|
||||
* fields in the report will be set to stubbed values:
|
||||
* <ul>
|
||||
* <li>host - set to "N/A"</li>
|
||||
* <li>RPC port - set to -1</li>
|
||||
* <li>client token - set to "N/A"</li>
|
||||
* <li>diagnostics - set to "N/A"</li>
|
||||
* <li>tracking URL - set to "N/A"</li>
|
||||
* <li>original tracking URL - set to "N/A"</li>
|
||||
* <li>resource usage report - all values are -1</li>
|
||||
* </ul>
|
||||
* </p>
|
||||
*
|
||||
* @param appId
|
||||
* {@link ApplicationId} of the application that needs a report
|
||||
* @return application report
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract ApplicationReport getApplicationReport(ApplicationId appId)
|
||||
throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Get a report (ApplicationReport) of all Applications in the cluster.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* If the user does not have <code>VIEW_APP</code> access for an application
|
||||
* then the corresponding report will be filtered as described in
|
||||
* {@link #getApplicationReport(ApplicationId)}.
|
||||
* </p>
|
||||
*
|
||||
* @return a list of reports for all applications
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract List<ApplicationReport> getApplications()
|
||||
throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Get a report of the given ApplicationAttempt.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* In secure mode, <code>YARN</code> verifies access to the application, queue
|
||||
* etc. before accepting the request.
|
||||
* </p>
|
||||
*
|
||||
* @param applicationAttemptId
|
||||
* {@link ApplicationAttemptId} of the application attempt that needs
|
||||
* a report
|
||||
* @return application attempt report
|
||||
* @throws YarnException
|
||||
* @throws {@link ApplicationAttemptNotFoundException} if application attempt
|
||||
* not found
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract ApplicationAttemptReport getApplicationAttemptReport(
|
||||
ApplicationAttemptId applicationAttemptId) throws YarnException,
|
||||
IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Get a report of all (ApplicationAttempts) of Application in the cluster.
|
||||
* </p>
|
||||
*
|
||||
* @param applicationId
|
||||
* @return a list of reports for all application attempts for specified
|
||||
* application
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract List<ApplicationAttemptReport> getApplicationAttempts(
|
||||
ApplicationId applicationId) throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Get a report of the given Container.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* In secure mode, <code>YARN</code> verifies access to the application, queue
|
||||
* etc. before accepting the request.
|
||||
* </p>
|
||||
*
|
||||
* @param containerId
|
||||
* {@link ContainerId} of the container that needs a report
|
||||
* @return container report
|
||||
* @throws YarnException
|
||||
* @throws {@link ContainerNotFoundException} if container not found
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract ContainerReport getContainerReport(ContainerId containerId)
|
||||
throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Get a report of all (Containers) of ApplicationAttempt in the cluster.
|
||||
* </p>
|
||||
*
|
||||
* @param applicationAttemptId
|
||||
* @return a list of reports of all containers for specified application
|
||||
* attempt
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract List<ContainerReport> getContainers(
|
||||
ApplicationAttemptId applicationAttemptId) throws YarnException,
|
||||
IOException;
|
||||
}
|
|
@ -29,9 +29,13 @@ import org.apache.hadoop.classification.InterfaceAudience.Public;
|
|||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.service.AbstractService;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
||||
|
@ -40,6 +44,7 @@ import org.apache.hadoop.yarn.api.records.Token;
|
|||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
|
||||
import org.apache.hadoop.yarn.client.api.impl.YarnClientImpl;
|
||||
import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
|
||||
|
||||
|
@ -360,4 +365,75 @@ public abstract class YarnClient extends AbstractService {
|
|||
*/
|
||||
public abstract List<QueueUserACLInfo> getQueueAclsInfo() throws YarnException,
|
||||
IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Get a report of the given ApplicationAttempt.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* In secure mode, <code>YARN</code> verifies access to the application, queue
|
||||
* etc. before accepting the request.
|
||||
* </p>
|
||||
*
|
||||
* @param applicationAttemptId
|
||||
* {@link ApplicationAttemptId} of the application attempt that needs
|
||||
* a report
|
||||
* @return application attempt report
|
||||
* @throws YarnException
|
||||
* @throws {@link ApplicationAttemptNotFoundException} if application attempt
|
||||
* not found
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract ApplicationAttemptReport getApplicationAttemptReport(
|
||||
ApplicationAttemptId applicationAttemptId) throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Get a report of all (ApplicationAttempts) of Application in the cluster.
|
||||
* </p>
|
||||
*
|
||||
* @param applicationId
|
||||
* @return a list of reports for all application attempts for specified
|
||||
* application.
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract List<ApplicationAttemptReport> getApplicationAttempts(
|
||||
ApplicationId applicationId) throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Get a report of the given Container.
|
||||
* </p>
|
||||
*
|
||||
* <p>
|
||||
* In secure mode, <code>YARN</code> verifies access to the application, queue
|
||||
* etc. before accepting the request.
|
||||
* </p>
|
||||
*
|
||||
* @param containerId
|
||||
* {@link ContainerId} of the container that needs a report
|
||||
* @return container report
|
||||
* @throws YarnException
|
||||
* @throws {@link ContainerNotFoundException} if container not found.
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract ContainerReport getContainerReport(ContainerId containerId)
|
||||
throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Get a report of all (Containers) of ApplicationAttempt in the cluster.
|
||||
* </p>
|
||||
*
|
||||
* @param applicationAttemptId
|
||||
* @return a list of reports of all containers for specified application
|
||||
* attempts
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
public abstract List<ContainerReport> getContainers(
|
||||
ApplicationAttemptId applicationAttemptId) throws YarnException,
|
||||
IOException;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,155 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.client.api.impl;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.client.AHSProxy;
|
||||
import org.apache.hadoop.yarn.client.api.AHSClient;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public class AHSClientImpl extends AHSClient {
|
||||
|
||||
protected ApplicationHistoryProtocol ahsClient;
|
||||
protected InetSocketAddress ahsAddress;
|
||||
|
||||
public AHSClientImpl() {
|
||||
super(AHSClientImpl.class.getName());
|
||||
}
|
||||
|
||||
private static InetSocketAddress getAHSAddress(Configuration conf) {
|
||||
return conf.getSocketAddr(YarnConfiguration.AHS_ADDRESS,
|
||||
YarnConfiguration.DEFAULT_AHS_ADDRESS,
|
||||
YarnConfiguration.DEFAULT_AHS_PORT);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void serviceInit(Configuration conf) throws Exception {
|
||||
this.ahsAddress = getAHSAddress(conf);
|
||||
super.serviceInit(conf);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void serviceStart() throws Exception {
|
||||
try {
|
||||
ahsClient = AHSProxy.createAHSProxy(getConfig(),
|
||||
ApplicationHistoryProtocol.class, this.ahsAddress);
|
||||
} catch (IOException e) {
|
||||
throw new YarnRuntimeException(e);
|
||||
}
|
||||
super.serviceStart();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void serviceStop() throws Exception {
|
||||
if (this.ahsClient != null) {
|
||||
RPC.stopProxy(this.ahsClient);
|
||||
}
|
||||
super.serviceStop();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationReport getApplicationReport(ApplicationId appId)
|
||||
throws YarnException, IOException {
|
||||
GetApplicationReportRequest request = GetApplicationReportRequest
|
||||
.newInstance(appId);
|
||||
GetApplicationReportResponse response = ahsClient
|
||||
.getApplicationReport(request);
|
||||
return response.getApplicationReport();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ApplicationReport> getApplications() throws YarnException,
|
||||
IOException {
|
||||
GetApplicationsRequest request = GetApplicationsRequest.newInstance(null,
|
||||
null);
|
||||
GetApplicationsResponse response = ahsClient.getApplications(request);
|
||||
return response.getApplicationList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptReport getApplicationAttemptReport(
|
||||
ApplicationAttemptId applicationAttemptId) throws YarnException,
|
||||
IOException {
|
||||
GetApplicationAttemptReportRequest request = GetApplicationAttemptReportRequest
|
||||
.newInstance(applicationAttemptId);
|
||||
GetApplicationAttemptReportResponse response = ahsClient
|
||||
.getApplicationAttemptReport(request);
|
||||
return response.getApplicationAttemptReport();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ApplicationAttemptReport> getApplicationAttempts(
|
||||
ApplicationId appId) throws YarnException, IOException {
|
||||
GetApplicationAttemptsRequest request = GetApplicationAttemptsRequest
|
||||
.newInstance(appId);
|
||||
GetApplicationAttemptsResponse response = ahsClient
|
||||
.getApplicationAttempts(request);
|
||||
return response.getApplicationAttemptList();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerReport getContainerReport(ContainerId containerId)
|
||||
throws YarnException, IOException {
|
||||
GetContainerReportRequest request = GetContainerReportRequest
|
||||
.newInstance(containerId);
|
||||
GetContainerReportResponse response = ahsClient.getContainerReport(request);
|
||||
return response.getContainerReport();
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ContainerReport> getContainers(
|
||||
ApplicationAttemptId applicationAttemptId) throws YarnException,
|
||||
IOException {
|
||||
GetContainersRequest request = GetContainersRequest
|
||||
.newInstance(applicationAttemptId);
|
||||
GetContainersResponse response = ahsClient.getContainers(request);
|
||||
return response.getContainerList();
|
||||
}
|
||||
|
||||
}
|
|
@ -49,9 +49,13 @@ import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
|
|||
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
||||
|
@ -60,9 +64,11 @@ import org.apache.hadoop.yarn.api.records.Token;
|
|||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
|
||||
import org.apache.hadoop.yarn.client.ClientRMProxy;
|
||||
import org.apache.hadoop.yarn.client.api.AHSClient;
|
||||
import org.apache.hadoop.yarn.client.api.YarnClient;
|
||||
import org.apache.hadoop.yarn.client.api.YarnClientApplication;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
|
||||
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
|
||||
|
@ -80,6 +86,8 @@ public class YarnClientImpl extends YarnClient {
|
|||
protected ApplicationClientProtocol rmClient;
|
||||
protected long submitPollIntervalMillis;
|
||||
private long asyncApiPollIntervalMillis;
|
||||
protected AHSClient historyClient;
|
||||
private boolean historyServiceEnabled;
|
||||
|
||||
private static final String ROOT = "root";
|
||||
|
||||
|
@ -100,6 +108,14 @@ public class YarnClientImpl extends YarnClient {
|
|||
YarnConfiguration.YARN_CLIENT_APP_SUBMISSION_POLL_INTERVAL_MS,
|
||||
YarnConfiguration.DEFAULT_YARN_CLIENT_APPLICATION_CLIENT_PROTOCOL_POLL_INTERVAL_MS);
|
||||
}
|
||||
|
||||
if (conf.getBoolean(YarnConfiguration.YARN_HISTORY_SERVICE_ENABLED,
|
||||
YarnConfiguration.DEFAULT_YARN_HISTORY_SERVICE_ENABLED)) {
|
||||
historyServiceEnabled = true;
|
||||
historyClient = AHSClientImpl.createAHSClient();
|
||||
historyClient.init(getConfig());
|
||||
}
|
||||
|
||||
super.serviceInit(conf);
|
||||
}
|
||||
|
||||
|
@ -108,6 +124,9 @@ public class YarnClientImpl extends YarnClient {
|
|||
try {
|
||||
rmClient = ClientRMProxy.createRMProxy(getConfig(),
|
||||
ApplicationClientProtocol.class);
|
||||
if (historyServiceEnabled) {
|
||||
historyClient.start();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new YarnRuntimeException(e);
|
||||
}
|
||||
|
@ -119,6 +138,9 @@ public class YarnClientImpl extends YarnClient {
|
|||
if (this.rmClient != null) {
|
||||
RPC.stopProxy(this.rmClient);
|
||||
}
|
||||
if (historyServiceEnabled) {
|
||||
historyClient.stop();
|
||||
}
|
||||
super.serviceStop();
|
||||
}
|
||||
|
||||
|
@ -207,11 +229,27 @@ public class YarnClientImpl extends YarnClient {
|
|||
@Override
|
||||
public ApplicationReport getApplicationReport(ApplicationId appId)
|
||||
throws YarnException, IOException {
|
||||
GetApplicationReportRequest request =
|
||||
Records.newRecord(GetApplicationReportRequest.class);
|
||||
GetApplicationReportResponse response = null;
|
||||
try {
|
||||
GetApplicationReportRequest request = Records
|
||||
.newRecord(GetApplicationReportRequest.class);
|
||||
request.setApplicationId(appId);
|
||||
GetApplicationReportResponse response =
|
||||
rmClient.getApplicationReport(request);
|
||||
response = rmClient.getApplicationReport(request);
|
||||
} catch (YarnException e) {
|
||||
|
||||
if (!historyServiceEnabled) {
|
||||
// Just throw it as usual if historyService is not enabled.
|
||||
throw e;
|
||||
}
|
||||
|
||||
// Even if history-service is enabled, treat all exceptions still the same
|
||||
// except the following
|
||||
if (!(e.getClass() == ApplicationNotFoundException.class)) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
return historyClient.getApplicationReport(appId);
|
||||
}
|
||||
return response.getApplicationReport();
|
||||
}
|
||||
|
||||
|
@ -373,4 +411,41 @@ public class YarnClientImpl extends YarnClient {
|
|||
public void setRMClient(ApplicationClientProtocol rmClient) {
|
||||
this.rmClient = rmClient;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptReport getApplicationAttemptReport(
|
||||
ApplicationAttemptId appAttemptId) throws YarnException, IOException {
|
||||
if (historyServiceEnabled) {
|
||||
return historyClient.getApplicationAttemptReport(appAttemptId);
|
||||
}
|
||||
throw new YarnException("History service is not enabled.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ApplicationAttemptReport> getApplicationAttempts(
|
||||
ApplicationId appId) throws YarnException, IOException {
|
||||
if (historyServiceEnabled) {
|
||||
return historyClient.getApplicationAttempts(appId);
|
||||
}
|
||||
throw new YarnException("History service is not enabled.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerReport getContainerReport(ContainerId containerId)
|
||||
throws YarnException, IOException {
|
||||
if (historyServiceEnabled) {
|
||||
return historyClient.getContainerReport(containerId);
|
||||
}
|
||||
throw new YarnException("History service is not enabled.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ContainerReport> getContainers(
|
||||
ApplicationAttemptId applicationAttemptId) throws YarnException,
|
||||
IOException {
|
||||
if (historyServiceEnabled) {
|
||||
return historyClient.getContainers(applicationAttemptId);
|
||||
}
|
||||
throw new YarnException("History service is not enabled.");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,8 +35,10 @@ import org.apache.commons.cli.Options;
|
|||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||
|
@ -47,12 +49,21 @@ import com.google.common.annotations.VisibleForTesting;
|
|||
@Unstable
|
||||
public class ApplicationCLI extends YarnCLI {
|
||||
private static final String APPLICATIONS_PATTERN =
|
||||
"%30s\t%20s\t%20s\t%10s\t%10s\t%18s\t%18s\t%15s\t%35s" +
|
||||
System.getProperty("line.separator");
|
||||
"%30s\t%20s\t%20s\t%10s\t%10s\t%18s\t%18s\t%15s\t%35s"
|
||||
+ System.getProperty("line.separator");
|
||||
private static final String APPLICATION_ATTEMPTS_PATTERN =
|
||||
"%30s\t%20s\t%35s\t%35s"
|
||||
+ System.getProperty("line.separator");
|
||||
private static final String CONTAINER_PATTERN =
|
||||
"%30s\t%20s\t%20s\t%20s\t%20s\t%35s"
|
||||
+ System.getProperty("line.separator");
|
||||
|
||||
private static final String APP_TYPE_CMD = "appTypes";
|
||||
private static final String APP_STATE_CMD = "appStates";
|
||||
private static final String ALLSTATES_OPTION = "ALL";
|
||||
public static final String APPLICATION = "application";
|
||||
public static final String APPLICATION_ATTEMPT = "applicationattempt";
|
||||
public static final String CONTAINER = "container";
|
||||
|
||||
private boolean allAppStates;
|
||||
|
||||
|
@ -69,23 +80,33 @@ public class ApplicationCLI extends YarnCLI {
|
|||
public int run(String[] args) throws Exception {
|
||||
|
||||
Options opts = new Options();
|
||||
opts.addOption(STATUS_CMD, true, "Prints the status of the application.");
|
||||
opts.addOption(LIST_CMD, false, "List applications from the RM. " +
|
||||
"Supports optional use of -appTypes to filter applications " +
|
||||
"based on application type, " +
|
||||
"and -appStates to filter applications based on application state");
|
||||
opts.addOption(STATUS_CMD, true,
|
||||
"Prints the status of the application.");
|
||||
if (args.length > 0
|
||||
&& args[0].compareToIgnoreCase(APPLICATION_ATTEMPT) == 0) {
|
||||
opts.addOption(LIST_CMD, true,
|
||||
"List application attempts for aplication from AHS. ");
|
||||
} else if (args.length > 0 && args[0].compareToIgnoreCase("container") == 0) {
|
||||
opts.addOption(LIST_CMD, true,
|
||||
"List containers for application attempts from AHS. ");
|
||||
} else {
|
||||
opts.addOption(LIST_CMD, false, "List applications from the RM. "
|
||||
+ "Supports optional use of -appTypes to filter applications "
|
||||
+ "based on application type, "
|
||||
+ "and -appStates to filter applications based on application state");
|
||||
}
|
||||
opts.addOption(KILL_CMD, true, "Kills the application.");
|
||||
opts.addOption(HELP_CMD, false, "Displays help for all commands.");
|
||||
Option appTypeOpt = new Option(APP_TYPE_CMD, true, "Works with -list to " +
|
||||
"filter applications based on " +
|
||||
"input comma-separated list of application types.");
|
||||
Option appTypeOpt = new Option(APP_TYPE_CMD, true, "Works with -list to "
|
||||
+ "filter applications based on "
|
||||
+ "input comma-separated list of application types.");
|
||||
appTypeOpt.setValueSeparator(',');
|
||||
appTypeOpt.setArgs(Option.UNLIMITED_VALUES);
|
||||
appTypeOpt.setArgName("Types");
|
||||
opts.addOption(appTypeOpt);
|
||||
Option appStateOpt = new Option(APP_STATE_CMD, true, "Works with -list " +
|
||||
"to filter applications based on input comma-separated list of " +
|
||||
"application states. " + getAllValidApplicationStates());
|
||||
Option appStateOpt = new Option(APP_STATE_CMD, true, "Works with -list "
|
||||
+ "to filter applications based on input comma-separated list of "
|
||||
+ "application states. " + getAllValidApplicationStates());
|
||||
appStateOpt.setValueSeparator(',');
|
||||
appStateOpt.setArgs(Option.UNLIMITED_VALUES);
|
||||
appStateOpt.setArgName("States");
|
||||
|
@ -104,12 +125,38 @@ public class ApplicationCLI extends YarnCLI {
|
|||
}
|
||||
|
||||
if (cliParser.hasOption(STATUS_CMD)) {
|
||||
if (args.length != 2) {
|
||||
if ((args[0].compareToIgnoreCase(APPLICATION) == 0)
|
||||
|| (args[0].compareToIgnoreCase(APPLICATION_ATTEMPT) == 0)
|
||||
|| (args[0].compareToIgnoreCase(CONTAINER) == 0)) {
|
||||
if (args.length != 3) {
|
||||
printUsage(opts);
|
||||
return exitCode;
|
||||
}
|
||||
} else if (args.length != 2) {
|
||||
printUsage(opts);
|
||||
return exitCode;
|
||||
}
|
||||
if (args[0].compareToIgnoreCase(APPLICATION_ATTEMPT) == 0) {
|
||||
printApplicationAttemptReport(cliParser.getOptionValue(STATUS_CMD));
|
||||
} else if (args[0].compareToIgnoreCase(CONTAINER) == 0) {
|
||||
printContainerReport(cliParser.getOptionValue(STATUS_CMD));
|
||||
} else {
|
||||
printApplicationReport(cliParser.getOptionValue(STATUS_CMD));
|
||||
}
|
||||
} else if (cliParser.hasOption(LIST_CMD)) {
|
||||
if (args[0].compareToIgnoreCase(APPLICATION_ATTEMPT) == 0) {
|
||||
if (args.length != 3) {
|
||||
printUsage(opts);
|
||||
return exitCode;
|
||||
}
|
||||
listApplicationAttempts(cliParser.getOptionValue(LIST_CMD));
|
||||
} else if (args[0].compareToIgnoreCase(CONTAINER) == 0) {
|
||||
if (args.length != 3) {
|
||||
printUsage(opts);
|
||||
return exitCode;
|
||||
}
|
||||
listContainers(cliParser.getOptionValue(LIST_CMD));
|
||||
} else {
|
||||
allAppStates = false;
|
||||
Set<String> appTypes = new HashSet<String>();
|
||||
if (cliParser.hasOption(APP_TYPE_CMD)) {
|
||||
|
@ -123,8 +170,8 @@ public class ApplicationCLI extends YarnCLI {
|
|||
}
|
||||
}
|
||||
|
||||
EnumSet<YarnApplicationState> appStates =
|
||||
EnumSet.noneOf(YarnApplicationState.class);
|
||||
EnumSet<YarnApplicationState> appStates = EnumSet
|
||||
.noneOf(YarnApplicationState.class);
|
||||
if (cliParser.hasOption(APP_STATE_CMD)) {
|
||||
String[] states = cliParser.getOptionValues(APP_STATE_CMD);
|
||||
if (states != null) {
|
||||
|
@ -135,8 +182,8 @@ public class ApplicationCLI extends YarnCLI {
|
|||
break;
|
||||
}
|
||||
try {
|
||||
appStates.add(YarnApplicationState.valueOf(state.toUpperCase()
|
||||
.trim()));
|
||||
appStates.add(YarnApplicationState.valueOf(state
|
||||
.toUpperCase().trim()));
|
||||
} catch (IllegalArgumentException ex) {
|
||||
sysout.println("The application state " + state
|
||||
+ " is invalid.");
|
||||
|
@ -148,6 +195,7 @@ public class ApplicationCLI extends YarnCLI {
|
|||
}
|
||||
}
|
||||
listApplications(appTypes, appStates);
|
||||
}
|
||||
} else if (cliParser.hasOption(KILL_CMD)) {
|
||||
if (args.length != 2) {
|
||||
printUsage(opts);
|
||||
|
@ -175,8 +223,85 @@ public class ApplicationCLI extends YarnCLI {
|
|||
}
|
||||
|
||||
/**
|
||||
* Lists the applications matching the given application Types
|
||||
* And application States present in the Resource Manager
|
||||
* Prints the application attempt report for an application attempt id.
|
||||
*
|
||||
* @param applicationAttemptId
|
||||
* @throws YarnException
|
||||
*/
|
||||
private void printApplicationAttemptReport(String applicationAttemptId)
|
||||
throws YarnException, IOException {
|
||||
ApplicationAttemptReport appAttemptReport = client
|
||||
.getApplicationAttemptReport(ConverterUtils
|
||||
.toApplicationAttemptId(applicationAttemptId));
|
||||
// Use PrintWriter.println, which uses correct platform line ending.
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
PrintWriter appAttemptReportStr = new PrintWriter(baos);
|
||||
if (appAttemptReport != null) {
|
||||
appAttemptReportStr.println("Application Attempt Report : ");
|
||||
appAttemptReportStr.print("\tApplicationAttempt-Id : ");
|
||||
appAttemptReportStr.println(appAttemptReport.getApplicationAttemptId());
|
||||
appAttemptReportStr.print("\tState : ");
|
||||
appAttemptReportStr.println(appAttemptReport
|
||||
.getYarnApplicationAttemptState());
|
||||
appAttemptReportStr.print("\tAMContainer : ");
|
||||
appAttemptReportStr.println(appAttemptReport.getAMContainerId()
|
||||
.toString());
|
||||
appAttemptReportStr.print("\tTracking-URL : ");
|
||||
appAttemptReportStr.println(appAttemptReport.getTrackingUrl());
|
||||
appAttemptReportStr.print("\tRPC Port : ");
|
||||
appAttemptReportStr.println(appAttemptReport.getRpcPort());
|
||||
appAttemptReportStr.print("\tAM Host : ");
|
||||
appAttemptReportStr.println(appAttemptReport.getHost());
|
||||
appAttemptReportStr.print("\tDiagnostics : ");
|
||||
appAttemptReportStr.print(appAttemptReport.getDiagnostics());
|
||||
} else {
|
||||
appAttemptReportStr.print("Application Attempt with id '"
|
||||
+ applicationAttemptId + "' doesn't exist in History Server.");
|
||||
}
|
||||
appAttemptReportStr.close();
|
||||
sysout.println(baos.toString("UTF-8"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Prints the container report for an container id.
|
||||
*
|
||||
* @param containerId
|
||||
* @throws YarnException
|
||||
*/
|
||||
private void printContainerReport(String containerId) throws YarnException,
|
||||
IOException {
|
||||
ContainerReport containerReport = client.getContainerReport((ConverterUtils
|
||||
.toContainerId(containerId)));
|
||||
// Use PrintWriter.println, which uses correct platform line ending.
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
PrintWriter containerReportStr = new PrintWriter(baos);
|
||||
if (containerReport != null) {
|
||||
containerReportStr.println("Container Report : ");
|
||||
containerReportStr.print("\tContainer-Id : ");
|
||||
containerReportStr.println(containerReport.getContainerId());
|
||||
containerReportStr.print("\tStart-Time : ");
|
||||
containerReportStr.println(containerReport.getStartTime());
|
||||
containerReportStr.print("\tFinish-Time : ");
|
||||
containerReportStr.println(containerReport.getFinishTime());
|
||||
containerReportStr.print("\tState : ");
|
||||
containerReportStr.println(containerReport.getContainerState());
|
||||
containerReportStr.print("\tLOG-URL : ");
|
||||
containerReportStr.println(containerReport.getLogUrl());
|
||||
containerReportStr.print("\tHost : ");
|
||||
containerReportStr.println(containerReport.getAssignedNode());
|
||||
containerReportStr.print("\tDiagnostics : ");
|
||||
containerReportStr.print(containerReport.getDiagnosticsInfo());
|
||||
} else {
|
||||
containerReportStr.print("Container with id '" + containerId
|
||||
+ "' doesn't exist in Hostory Server.");
|
||||
}
|
||||
containerReportStr.close();
|
||||
sysout.println(baos.toString("UTF-8"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists the applications matching the given application Types And application
|
||||
* States present in the Resource Manager
|
||||
*
|
||||
* @param appTypes
|
||||
* @param appStates
|
||||
|
@ -199,23 +324,24 @@ public class ApplicationCLI extends YarnCLI {
|
|||
}
|
||||
}
|
||||
|
||||
List<ApplicationReport> appsReport =
|
||||
client.getApplications(appTypes, appStates);
|
||||
List<ApplicationReport> appsReport = client.getApplications(appTypes,
|
||||
appStates);
|
||||
|
||||
writer
|
||||
.println("Total number of applications (application-types: " + appTypes
|
||||
+ " and states: " + appStates + ")" + ":" + appsReport.size());
|
||||
writer.printf(APPLICATIONS_PATTERN, "Application-Id",
|
||||
"Application-Name","Application-Type", "User", "Queue",
|
||||
"State", "Final-State","Progress", "Tracking-URL");
|
||||
writer.println("Total number of applications (application-types: "
|
||||
+ appTypes + " and states: " + appStates + ")" + ":"
|
||||
+ appsReport.size());
|
||||
writer.printf(APPLICATIONS_PATTERN, "Application-Id", "Application-Name",
|
||||
"Application-Type", "User", "Queue", "State", "Final-State",
|
||||
"Progress", "Tracking-URL");
|
||||
for (ApplicationReport appReport : appsReport) {
|
||||
DecimalFormat formatter = new DecimalFormat("###.##%");
|
||||
String progress = formatter.format(appReport.getProgress());
|
||||
writer.printf(APPLICATIONS_PATTERN, appReport.getApplicationId(),
|
||||
appReport.getName(),appReport.getApplicationType(), appReport.getUser(),
|
||||
appReport.getQueue(),appReport.getYarnApplicationState(),
|
||||
appReport.getFinalApplicationStatus(),progress,
|
||||
appReport.getOriginalTrackingUrl());
|
||||
appReport.getName(), appReport.getApplicationType(), appReport
|
||||
.getUser(), appReport.getQueue(), appReport
|
||||
.getYarnApplicationState(),
|
||||
appReport.getFinalApplicationStatus(), progress, appReport
|
||||
.getOriginalTrackingUrl());
|
||||
}
|
||||
writer.flush();
|
||||
}
|
||||
|
@ -227,8 +353,8 @@ public class ApplicationCLI extends YarnCLI {
|
|||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
private void killApplication(String applicationId)
|
||||
throws YarnException, IOException {
|
||||
private void killApplication(String applicationId) throws YarnException,
|
||||
IOException {
|
||||
ApplicationId appId = ConverterUtils.toApplicationId(applicationId);
|
||||
ApplicationReport appReport = client.getApplicationReport(appId);
|
||||
if (appReport.getYarnApplicationState() == YarnApplicationState.FINISHED
|
||||
|
@ -296,14 +422,63 @@ public class ApplicationCLI extends YarnCLI {
|
|||
|
||||
private String getAllValidApplicationStates() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("The valid application state can be"
|
||||
+ " one of the following: ");
|
||||
sb.append("The valid application state can be" + " one of the following: ");
|
||||
sb.append(ALLSTATES_OPTION + ",");
|
||||
for (YarnApplicationState appState : YarnApplicationState
|
||||
.values()) {
|
||||
for (YarnApplicationState appState : YarnApplicationState.values()) {
|
||||
sb.append(appState + ",");
|
||||
}
|
||||
String output = sb.toString();
|
||||
return output.substring(0, output.length() - 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists the application attempts matching the given applicationid
|
||||
*
|
||||
* @param applicationId
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
private void listApplicationAttempts(String appId) throws YarnException,
|
||||
IOException {
|
||||
PrintWriter writer = new PrintWriter(sysout);
|
||||
|
||||
List<ApplicationAttemptReport> appAttemptsReport = client
|
||||
.getApplicationAttempts(ConverterUtils.toApplicationId(appId));
|
||||
writer.println("Total number of application attempts " + ":"
|
||||
+ appAttemptsReport.size());
|
||||
writer.printf(APPLICATION_ATTEMPTS_PATTERN, "ApplicationAttempt-Id",
|
||||
"State", "AM-Container-Id", "Tracking-URL");
|
||||
for (ApplicationAttemptReport appAttemptReport : appAttemptsReport) {
|
||||
writer.printf(APPLICATION_ATTEMPTS_PATTERN, appAttemptReport
|
||||
.getApplicationAttemptId(), appAttemptReport
|
||||
.getYarnApplicationAttemptState(), appAttemptReport
|
||||
.getAMContainerId().toString(), appAttemptReport.getTrackingUrl());
|
||||
}
|
||||
writer.flush();
|
||||
}
|
||||
|
||||
/**
|
||||
* Lists the containers matching the given application attempts
|
||||
*
|
||||
* @param appAttemptId
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
private void listContainers(String appAttemptId) throws YarnException,
|
||||
IOException {
|
||||
PrintWriter writer = new PrintWriter(sysout);
|
||||
|
||||
List<ContainerReport> appsReport = client
|
||||
.getContainers(ConverterUtils.toApplicationAttemptId(appAttemptId));
|
||||
writer.println("Total number of containers " + ":" + appsReport.size());
|
||||
writer.printf(CONTAINER_PATTERN, "Container-Id", "Start Time",
|
||||
"Finish Time", "State", "Host", "LOG-URL");
|
||||
for (ContainerReport containerReport : appsReport) {
|
||||
writer.printf(CONTAINER_PATTERN, containerReport.getContainerId(),
|
||||
containerReport.getStartTime(), containerReport.getFinishTime(),
|
||||
containerReport.getContainerState(), containerReport
|
||||
.getAssignedNode(), containerReport.getLogUrl());
|
||||
}
|
||||
writer.flush();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,415 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.client.api.impl;
|
||||
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerState;
|
||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.client.api.AHSClient;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestAHSClient {
|
||||
|
||||
@Test
|
||||
public void testClientStop() {
|
||||
Configuration conf = new Configuration();
|
||||
AHSClient client = AHSClient.createAHSClient();
|
||||
client.init(conf);
|
||||
client.start();
|
||||
client.stop();
|
||||
}
|
||||
|
||||
@Test(timeout = 10000)
|
||||
public void testGetApplications() throws YarnException, IOException {
|
||||
Configuration conf = new Configuration();
|
||||
final AHSClient client = new MockAHSClient();
|
||||
client.init(conf);
|
||||
client.start();
|
||||
|
||||
List<ApplicationReport> expectedReports =
|
||||
((MockAHSClient) client).getReports();
|
||||
|
||||
List<ApplicationReport> reports = client.getApplications();
|
||||
Assert.assertEquals(reports, expectedReports);
|
||||
|
||||
reports = client.getApplications();
|
||||
Assert.assertEquals(reports.size(), 4);
|
||||
client.stop();
|
||||
}
|
||||
|
||||
@Test(timeout = 10000)
|
||||
public void testGetApplicationReport() throws YarnException, IOException {
|
||||
Configuration conf = new Configuration();
|
||||
final AHSClient client = new MockAHSClient();
|
||||
client.init(conf);
|
||||
client.start();
|
||||
|
||||
List<ApplicationReport> expectedReports =
|
||||
((MockAHSClient) client).getReports();
|
||||
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
|
||||
ApplicationReport report = client.getApplicationReport(applicationId);
|
||||
Assert.assertEquals(report, expectedReports.get(0));
|
||||
Assert.assertEquals(report.getApplicationId().toString(), expectedReports
|
||||
.get(0).getApplicationId().toString());
|
||||
client.stop();
|
||||
}
|
||||
|
||||
@Test(timeout = 10000)
|
||||
public void testGetApplicationAttempts() throws YarnException, IOException {
|
||||
Configuration conf = new Configuration();
|
||||
final AHSClient client = new MockAHSClient();
|
||||
client.init(conf);
|
||||
client.start();
|
||||
|
||||
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
|
||||
List<ApplicationAttemptReport> reports =
|
||||
client.getApplicationAttempts(applicationId);
|
||||
Assert.assertNotNull(reports);
|
||||
Assert.assertEquals(reports.get(0).getApplicationAttemptId(),
|
||||
ApplicationAttemptId.newInstance(applicationId, 1));
|
||||
Assert.assertEquals(reports.get(1).getApplicationAttemptId(),
|
||||
ApplicationAttemptId.newInstance(applicationId, 2));
|
||||
client.stop();
|
||||
}
|
||||
|
||||
@Test(timeout = 10000)
|
||||
public void testGetApplicationAttempt() throws YarnException, IOException {
|
||||
Configuration conf = new Configuration();
|
||||
final AHSClient client = new MockAHSClient();
|
||||
client.init(conf);
|
||||
client.start();
|
||||
|
||||
List<ApplicationReport> expectedReports =
|
||||
((MockAHSClient) client).getReports();
|
||||
|
||||
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(applicationId, 1);
|
||||
ApplicationAttemptReport report =
|
||||
client.getApplicationAttemptReport(appAttemptId);
|
||||
Assert.assertNotNull(report);
|
||||
Assert.assertEquals(report.getApplicationAttemptId().toString(),
|
||||
expectedReports.get(0).getCurrentApplicationAttemptId().toString());
|
||||
client.stop();
|
||||
}
|
||||
|
||||
@Test(timeout = 10000)
|
||||
public void testGetContainers() throws YarnException, IOException {
|
||||
Configuration conf = new Configuration();
|
||||
final AHSClient client = new MockAHSClient();
|
||||
client.init(conf);
|
||||
client.start();
|
||||
|
||||
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(applicationId, 1);
|
||||
List<ContainerReport> reports = client.getContainers(appAttemptId);
|
||||
Assert.assertNotNull(reports);
|
||||
Assert.assertEquals(reports.get(0).getContainerId(),
|
||||
(ContainerId.newInstance(appAttemptId, 1)));
|
||||
Assert.assertEquals(reports.get(1).getContainerId(),
|
||||
(ContainerId.newInstance(appAttemptId, 2)));
|
||||
client.stop();
|
||||
}
|
||||
|
||||
@Test(timeout = 10000)
|
||||
public void testGetContainerReport() throws YarnException, IOException {
|
||||
Configuration conf = new Configuration();
|
||||
final AHSClient client = new MockAHSClient();
|
||||
client.init(conf);
|
||||
client.start();
|
||||
|
||||
List<ApplicationReport> expectedReports =
|
||||
((MockAHSClient) client).getReports();
|
||||
|
||||
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(applicationId, 1);
|
||||
ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
|
||||
ContainerReport report = client.getContainerReport(containerId);
|
||||
Assert.assertNotNull(report);
|
||||
Assert.assertEquals(report.getContainerId().toString(), (ContainerId
|
||||
.newInstance(expectedReports.get(0).getCurrentApplicationAttemptId(), 1))
|
||||
.toString());
|
||||
client.stop();
|
||||
}
|
||||
|
||||
private static class MockAHSClient extends AHSClientImpl {
|
||||
// private ApplicationReport mockReport;
|
||||
private List<ApplicationReport> reports =
|
||||
new ArrayList<ApplicationReport>();
|
||||
private HashMap<ApplicationId, List<ApplicationAttemptReport>> attempts =
|
||||
new HashMap<ApplicationId, List<ApplicationAttemptReport>>();
|
||||
private HashMap<ApplicationAttemptId, List<ContainerReport>> containers =
|
||||
new HashMap<ApplicationAttemptId, List<ContainerReport>>();
|
||||
GetApplicationsResponse mockAppResponse =
|
||||
mock(GetApplicationsResponse.class);
|
||||
GetApplicationReportResponse mockResponse =
|
||||
mock(GetApplicationReportResponse.class);
|
||||
GetApplicationAttemptsResponse mockAppAttemptsResponse =
|
||||
mock(GetApplicationAttemptsResponse.class);
|
||||
GetApplicationAttemptReportResponse mockAttemptResponse =
|
||||
mock(GetApplicationAttemptReportResponse.class);
|
||||
GetContainersResponse mockContainersResponse =
|
||||
mock(GetContainersResponse.class);
|
||||
GetContainerReportResponse mockContainerResponse =
|
||||
mock(GetContainerReportResponse.class);
|
||||
|
||||
public MockAHSClient() {
|
||||
super();
|
||||
createAppReports();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void start() {
|
||||
ahsClient = mock(ApplicationHistoryProtocol.class);
|
||||
|
||||
try {
|
||||
when(
|
||||
ahsClient
|
||||
.getApplicationReport(any(GetApplicationReportRequest.class)))
|
||||
.thenReturn(mockResponse);
|
||||
when(ahsClient.getApplications(any(GetApplicationsRequest.class)))
|
||||
.thenReturn(mockAppResponse);
|
||||
when(
|
||||
ahsClient
|
||||
.getApplicationAttemptReport(any(GetApplicationAttemptReportRequest.class)))
|
||||
.thenReturn(mockAttemptResponse);
|
||||
when(
|
||||
ahsClient
|
||||
.getApplicationAttempts(any(GetApplicationAttemptsRequest.class)))
|
||||
.thenReturn(mockAppAttemptsResponse);
|
||||
when(ahsClient.getContainers(any(GetContainersRequest.class)))
|
||||
.thenReturn(mockContainersResponse);
|
||||
|
||||
when(ahsClient.getContainerReport(any(GetContainerReportRequest.class)))
|
||||
.thenReturn(mockContainerResponse);
|
||||
|
||||
} catch (YarnException e) {
|
||||
Assert.fail("Exception is not expected.");
|
||||
} catch (IOException e) {
|
||||
Assert.fail("Exception is not expected.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ApplicationReport> getApplications() throws YarnException,
|
||||
IOException {
|
||||
when(mockAppResponse.getApplicationList()).thenReturn(reports);
|
||||
return super.getApplications();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationReport getApplicationReport(ApplicationId appId)
|
||||
throws YarnException, IOException {
|
||||
when(mockResponse.getApplicationReport()).thenReturn(getReport(appId));
|
||||
return super.getApplicationReport(appId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ApplicationAttemptReport> getApplicationAttempts(
|
||||
ApplicationId appId) throws YarnException, IOException {
|
||||
when(mockAppAttemptsResponse.getApplicationAttemptList()).thenReturn(
|
||||
getAttempts(appId));
|
||||
return super.getApplicationAttempts(appId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptReport getApplicationAttemptReport(
|
||||
ApplicationAttemptId appAttemptId) throws YarnException, IOException {
|
||||
when(mockAttemptResponse.getApplicationAttemptReport()).thenReturn(
|
||||
getAttempt(appAttemptId));
|
||||
return super.getApplicationAttemptReport(appAttemptId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ContainerReport>
|
||||
getContainers(ApplicationAttemptId appAttemptId) throws YarnException,
|
||||
IOException {
|
||||
when(mockContainersResponse.getContainerList()).thenReturn(
|
||||
getContainersReport(appAttemptId));
|
||||
return super.getContainers(appAttemptId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerReport getContainerReport(ContainerId containerId)
|
||||
throws YarnException, IOException {
|
||||
when(mockContainerResponse.getContainerReport()).thenReturn(
|
||||
getContainer(containerId));
|
||||
return super.getContainerReport(containerId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void stop() {
|
||||
}
|
||||
|
||||
public ApplicationReport getReport(ApplicationId appId) {
|
||||
for (int i = 0; i < reports.size(); ++i) {
|
||||
if (appId.toString().equalsIgnoreCase(
|
||||
reports.get(i).getApplicationId().toString())) {
|
||||
return reports.get(i);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public List<ApplicationAttemptReport> getAttempts(ApplicationId appId) {
|
||||
return attempts.get(appId);
|
||||
}
|
||||
|
||||
public ApplicationAttemptReport
|
||||
getAttempt(ApplicationAttemptId appAttemptId) {
|
||||
return attempts.get(appAttemptId.getApplicationId()).get(0);
|
||||
}
|
||||
|
||||
public List<ContainerReport> getContainersReport(
|
||||
ApplicationAttemptId appAttemptId) {
|
||||
return containers.get(appAttemptId);
|
||||
}
|
||||
|
||||
public ContainerReport getContainer(ContainerId containerId) {
|
||||
return containers.get(containerId.getApplicationAttemptId()).get(0);
|
||||
}
|
||||
|
||||
public List<ApplicationReport> getReports() {
|
||||
return this.reports;
|
||||
}
|
||||
|
||||
private void createAppReports() {
|
||||
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
|
||||
ApplicationReport newApplicationReport =
|
||||
ApplicationReport.newInstance(applicationId,
|
||||
ApplicationAttemptId.newInstance(applicationId, 1), "user",
|
||||
"queue", "appname", "host", 124, null,
|
||||
YarnApplicationState.RUNNING, "diagnostics", "url", 0, 0,
|
||||
FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.53789f, "YARN",
|
||||
null);
|
||||
List<ApplicationReport> applicationReports =
|
||||
new ArrayList<ApplicationReport>();
|
||||
applicationReports.add(newApplicationReport);
|
||||
List<ApplicationAttemptReport> appAttempts =
|
||||
new ArrayList<ApplicationAttemptReport>();
|
||||
ApplicationAttemptReport attempt =
|
||||
ApplicationAttemptReport.newInstance(
|
||||
ApplicationAttemptId.newInstance(applicationId, 1),
|
||||
"host",
|
||||
124,
|
||||
"url",
|
||||
"diagnostics",
|
||||
YarnApplicationAttemptState.FINISHED,
|
||||
ContainerId.newInstance(
|
||||
newApplicationReport.getCurrentApplicationAttemptId(), 1));
|
||||
appAttempts.add(attempt);
|
||||
ApplicationAttemptReport attempt1 =
|
||||
ApplicationAttemptReport.newInstance(
|
||||
ApplicationAttemptId.newInstance(applicationId, 2),
|
||||
"host",
|
||||
124,
|
||||
"url",
|
||||
"diagnostics",
|
||||
YarnApplicationAttemptState.FINISHED,
|
||||
ContainerId.newInstance(
|
||||
newApplicationReport.getCurrentApplicationAttemptId(), 2));
|
||||
appAttempts.add(attempt1);
|
||||
attempts.put(applicationId, appAttempts);
|
||||
|
||||
List<ContainerReport> containerReports = new ArrayList<ContainerReport>();
|
||||
ContainerReport container =
|
||||
ContainerReport.newInstance(
|
||||
ContainerId.newInstance(attempt.getApplicationAttemptId(), 1),
|
||||
null, NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234,
|
||||
5678, "diagnosticInfo", "logURL", 0, ContainerState.COMPLETE);
|
||||
containerReports.add(container);
|
||||
|
||||
ContainerReport container1 =
|
||||
ContainerReport.newInstance(
|
||||
ContainerId.newInstance(attempt.getApplicationAttemptId(), 2),
|
||||
null, NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234,
|
||||
5678, "diagnosticInfo", "logURL", 0, ContainerState.COMPLETE);
|
||||
containerReports.add(container1);
|
||||
containers.put(attempt.getApplicationAttemptId(), containerReports);
|
||||
|
||||
ApplicationId applicationId2 = ApplicationId.newInstance(1234, 6);
|
||||
ApplicationReport newApplicationReport2 =
|
||||
ApplicationReport.newInstance(applicationId2,
|
||||
ApplicationAttemptId.newInstance(applicationId2, 2), "user2",
|
||||
"queue2", "appname2", "host2", 125, null,
|
||||
YarnApplicationState.FINISHED, "diagnostics2", "url2", 2, 2,
|
||||
FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.63789f,
|
||||
"NON-YARN", null);
|
||||
applicationReports.add(newApplicationReport2);
|
||||
|
||||
ApplicationId applicationId3 = ApplicationId.newInstance(1234, 7);
|
||||
ApplicationReport newApplicationReport3 =
|
||||
ApplicationReport.newInstance(applicationId3,
|
||||
ApplicationAttemptId.newInstance(applicationId3, 3), "user3",
|
||||
"queue3", "appname3", "host3", 126, null,
|
||||
YarnApplicationState.RUNNING, "diagnostics3", "url3", 3, 3,
|
||||
FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.73789f,
|
||||
"MAPREDUCE", null);
|
||||
applicationReports.add(newApplicationReport3);
|
||||
|
||||
ApplicationId applicationId4 = ApplicationId.newInstance(1234, 8);
|
||||
ApplicationReport newApplicationReport4 =
|
||||
ApplicationReport.newInstance(applicationId4,
|
||||
ApplicationAttemptId.newInstance(applicationId4, 4), "user4",
|
||||
"queue4", "appname4", "host4", 127, null,
|
||||
YarnApplicationState.FAILED, "diagnostics4", "url4", 4, 4,
|
||||
FinalApplicationStatus.SUCCEEDED, null, "N/A", 0.83789f,
|
||||
"NON-MAPREDUCE", null);
|
||||
applicationReports.add(newApplicationReport4);
|
||||
reports = applicationReports;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -58,12 +58,10 @@ import org.apache.hadoop.yarn.client.api.YarnClient;
|
|||
import org.apache.hadoop.yarn.client.api.YarnClientApplication;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
|
||||
import org.apache.hadoop.yarn.server.MiniYARNCluster;
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.LogManager;
|
||||
|
|
|
@ -43,19 +43,26 @@ import junit.framework.Assert;
|
|||
|
||||
import org.apache.commons.lang.time.DateFormatUtils;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerState;
|
||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.Resource;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.client.api.YarnClient;
|
||||
import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.mortbay.log.Log;
|
||||
|
||||
import org.apache.commons.cli.Options;
|
||||
|
||||
|
@ -113,20 +120,181 @@ public class TestYarnCLI {
|
|||
verify(sysOut, times(1)).println(isA(String.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetApplicationAttemptReport() throws Exception {
|
||||
ApplicationCLI cli = createAndGetAppCLI();
|
||||
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
|
||||
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(
|
||||
applicationId, 1);
|
||||
ApplicationAttemptReport attemptReport = ApplicationAttemptReport
|
||||
.newInstance(attemptId, "host", 124, "url", "diagnostics",
|
||||
YarnApplicationAttemptState.FINISHED, ContainerId.newInstance(
|
||||
attemptId, 1));
|
||||
when(
|
||||
client
|
||||
.getApplicationAttemptReport(any(ApplicationAttemptId.class)))
|
||||
.thenReturn(attemptReport);
|
||||
int result = cli.run(new String[] { "applicationattempt", "-status",
|
||||
attemptId.toString() });
|
||||
assertEquals(0, result);
|
||||
verify(client).getApplicationAttemptReport(attemptId);
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
PrintWriter pw = new PrintWriter(baos);
|
||||
pw.println("Application Attempt Report : ");
|
||||
pw.println("\tApplicationAttempt-Id : appattempt_1234_0005_000001");
|
||||
pw.println("\tState : FINISHED");
|
||||
pw.println("\tAMContainer : container_1234_0005_01_000001");
|
||||
pw.println("\tTracking-URL : url");
|
||||
pw.println("\tRPC Port : 124");
|
||||
pw.println("\tAM Host : host");
|
||||
pw.println("\tDiagnostics : diagnostics");
|
||||
pw.close();
|
||||
String appReportStr = baos.toString("UTF-8");
|
||||
Assert.assertEquals(appReportStr, sysOutStream.toString());
|
||||
verify(sysOut, times(1)).println(isA(String.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetApplicationAttempts() throws Exception {
|
||||
ApplicationCLI cli = createAndGetAppCLI();
|
||||
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
|
||||
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(
|
||||
applicationId, 1);
|
||||
ApplicationAttemptId attemptId1 = ApplicationAttemptId.newInstance(
|
||||
applicationId, 2);
|
||||
ApplicationAttemptReport attemptReport = ApplicationAttemptReport
|
||||
.newInstance(attemptId, "host", 124, "url", "diagnostics",
|
||||
YarnApplicationAttemptState.FINISHED, ContainerId.newInstance(
|
||||
attemptId, 1));
|
||||
ApplicationAttemptReport attemptReport1 = ApplicationAttemptReport
|
||||
.newInstance(attemptId1, "host", 124, "url", "diagnostics",
|
||||
YarnApplicationAttemptState.FINISHED, ContainerId.newInstance(
|
||||
attemptId1, 1));
|
||||
List<ApplicationAttemptReport> reports = new ArrayList<ApplicationAttemptReport>();
|
||||
reports.add(attemptReport);
|
||||
reports.add(attemptReport1);
|
||||
when(client.getApplicationAttempts(any(ApplicationId.class)))
|
||||
.thenReturn(reports);
|
||||
int result = cli.run(new String[] { "applicationattempt", "-list",
|
||||
applicationId.toString() });
|
||||
assertEquals(0, result);
|
||||
verify(client).getApplicationAttempts(applicationId);
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
PrintWriter pw = new PrintWriter(baos);
|
||||
pw.println("Total number of application attempts :2");
|
||||
pw.print(" ApplicationAttempt-Id");
|
||||
pw.print("\t State");
|
||||
pw.print("\t AM-Container-Id");
|
||||
pw.println("\t Tracking-URL");
|
||||
pw.print(" appattempt_1234_0005_000001");
|
||||
pw.print("\t FINISHED");
|
||||
pw.print("\t container_1234_0005_01_000001");
|
||||
pw.println("\t url");
|
||||
pw.print(" appattempt_1234_0005_000002");
|
||||
pw.print("\t FINISHED");
|
||||
pw.print("\t container_1234_0005_02_000001");
|
||||
pw.println("\t url");
|
||||
pw.close();
|
||||
String appReportStr = baos.toString("UTF-8");
|
||||
Assert.assertEquals(appReportStr, sysOutStream.toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetContainerReport() throws Exception {
|
||||
ApplicationCLI cli = createAndGetAppCLI();
|
||||
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
|
||||
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(
|
||||
applicationId, 1);
|
||||
ContainerId containerId = ContainerId.newInstance(attemptId, 1);
|
||||
ContainerReport container = ContainerReport.newInstance(containerId, null,
|
||||
NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234, 5678,
|
||||
"diagnosticInfo", "logURL", 0, ContainerState.COMPLETE);
|
||||
when(client.getContainerReport(any(ContainerId.class))).thenReturn(
|
||||
container);
|
||||
int result = cli.run(new String[] { "container", "-status",
|
||||
containerId.toString() });
|
||||
assertEquals(0, result);
|
||||
verify(client).getContainerReport(containerId);
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
PrintWriter pw = new PrintWriter(baos);
|
||||
pw.println("Container Report : ");
|
||||
pw.println("\tContainer-Id : container_1234_0005_01_000001");
|
||||
pw.println("\tStart-Time : 1234");
|
||||
pw.println("\tFinish-Time : 5678");
|
||||
pw.println("\tState : COMPLETE");
|
||||
pw.println("\tLOG-URL : logURL");
|
||||
pw.println("\tHost : host:1234");
|
||||
pw.println("\tDiagnostics : diagnosticInfo");
|
||||
pw.close();
|
||||
String appReportStr = baos.toString("UTF-8");
|
||||
Assert.assertEquals(appReportStr, sysOutStream.toString());
|
||||
verify(sysOut, times(1)).println(isA(String.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetContainers() throws Exception {
|
||||
ApplicationCLI cli = createAndGetAppCLI();
|
||||
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
|
||||
ApplicationAttemptId attemptId = ApplicationAttemptId.newInstance(
|
||||
applicationId, 1);
|
||||
ContainerId containerId = ContainerId.newInstance(attemptId, 1);
|
||||
ContainerId containerId1 = ContainerId.newInstance(attemptId, 2);
|
||||
ContainerReport container = ContainerReport.newInstance(containerId, null,
|
||||
NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234, 5678,
|
||||
"diagnosticInfo", "logURL", 0, ContainerState.COMPLETE);
|
||||
ContainerReport container1 = ContainerReport.newInstance(containerId1, null,
|
||||
NodeId.newInstance("host", 1234), Priority.UNDEFINED, 1234, 5678,
|
||||
"diagnosticInfo", "logURL", 0, ContainerState.COMPLETE);
|
||||
List<ContainerReport> reports = new ArrayList<ContainerReport>();
|
||||
reports.add(container);
|
||||
reports.add(container1);
|
||||
when(client.getContainers(any(ApplicationAttemptId.class))).thenReturn(
|
||||
reports);
|
||||
int result = cli.run(new String[] { "container", "-list",
|
||||
attemptId.toString() });
|
||||
assertEquals(0, result);
|
||||
verify(client).getContainers(attemptId);
|
||||
Log.info(sysOutStream.toString());
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
PrintWriter pw = new PrintWriter(baos);
|
||||
pw.println("Total number of containers :2");
|
||||
pw.print(" Container-Id");
|
||||
pw.print("\t Start Time");
|
||||
pw.print("\t Finish Time");
|
||||
pw.print("\t State");
|
||||
pw.print("\t Host");
|
||||
pw.println("\t LOG-URL");
|
||||
pw.print(" container_1234_0005_01_000001");
|
||||
pw.print("\t 1234");
|
||||
pw.print("\t 5678");
|
||||
pw.print("\t COMPLETE");
|
||||
pw.print("\t host:1234");
|
||||
pw.println("\t logURL");
|
||||
pw.print(" container_1234_0005_01_000002");
|
||||
pw.print("\t 1234");
|
||||
pw.print("\t 5678");
|
||||
pw.print("\t COMPLETE");
|
||||
pw.print("\t host:1234");
|
||||
pw.println("\t logURL");
|
||||
pw.close();
|
||||
String appReportStr = baos.toString("UTF-8");
|
||||
Assert.assertEquals(appReportStr, sysOutStream.toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetApplicationReportException() throws Exception {
|
||||
ApplicationCLI cli = createAndGetAppCLI();
|
||||
ApplicationId applicationId = ApplicationId.newInstance(1234, 5);
|
||||
when(client.getApplicationReport(any(ApplicationId.class))).thenThrow(
|
||||
new ApplicationNotFoundException("Application with id '"
|
||||
+ applicationId + "' doesn't exist in RM."));
|
||||
new ApplicationNotFoundException("History file for application"
|
||||
+ applicationId + " is not found"));
|
||||
try {
|
||||
cli.run(new String[] { "-status", applicationId.toString() });
|
||||
Assert.fail();
|
||||
} catch (Exception ex) {
|
||||
Assert.assertTrue(ex instanceof ApplicationNotFoundException);
|
||||
Assert.assertEquals("Application with id '" + applicationId
|
||||
+ "' doesn't exist in RM.", ex.getMessage());
|
||||
Assert.assertEquals("History file for application"
|
||||
+ applicationId + " is not found", ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.ipc.ProtocolInfo;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryProtocol.ApplicationHistoryProtocolService;
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
@ProtocolInfo(
|
||||
protocolName = "org.apache.hadoop.yarn.api.ApplicationHistoryProtocolPB",
|
||||
protocolVersion = 1)
|
||||
public interface ApplicationHistoryProtocolPB extends
|
||||
ApplicationHistoryProtocolService.BlockingInterface {
|
||||
}
|
|
@ -0,0 +1,230 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.impl.pb.client;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto;
|
||||
import org.apache.hadoop.yarn.api.ApplicationClientProtocolPB;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocolPB;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.CancelDelegationTokenRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.CancelDelegationTokenResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptReportRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptReportResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptsRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptsResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerReportRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerReportResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainersRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainersResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetDelegationTokenRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetDelegationTokenResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.apache.hadoop.yarn.ipc.RPCUtil;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto;
|
||||
|
||||
import com.google.protobuf.ServiceException;
|
||||
|
||||
public class ApplicationHistoryProtocolPBClientImpl implements
|
||||
ApplicationHistoryProtocol, Closeable {
|
||||
|
||||
private ApplicationHistoryProtocolPB proxy;
|
||||
|
||||
public ApplicationHistoryProtocolPBClientImpl(long clientVersion,
|
||||
InetSocketAddress addr, Configuration conf) throws IOException {
|
||||
RPC.setProtocolEngine(conf, ApplicationHistoryProtocolPB.class,
|
||||
ProtobufRpcEngine.class);
|
||||
proxy =
|
||||
RPC.getProxy(ApplicationHistoryProtocolPB.class, clientVersion, addr,
|
||||
conf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (this.proxy != null) {
|
||||
RPC.stopProxy(this.proxy);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationReportResponse getApplicationReport(
|
||||
GetApplicationReportRequest request) throws YarnException, IOException {
|
||||
GetApplicationReportRequestProto requestProto =
|
||||
((GetApplicationReportRequestPBImpl) request).getProto();
|
||||
try {
|
||||
return new GetApplicationReportResponsePBImpl(proxy.getApplicationReport(
|
||||
null, requestProto));
|
||||
} catch (ServiceException e) {
|
||||
RPCUtil.unwrapAndThrowException(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationsResponse
|
||||
getApplications(GetApplicationsRequest request) throws YarnException,
|
||||
IOException {
|
||||
GetApplicationsRequestProto requestProto =
|
||||
((GetApplicationsRequestPBImpl) request).getProto();
|
||||
try {
|
||||
return new GetApplicationsResponsePBImpl(proxy.getApplications(null,
|
||||
requestProto));
|
||||
} catch (ServiceException e) {
|
||||
RPCUtil.unwrapAndThrowException(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationAttemptReportResponse getApplicationAttemptReport(
|
||||
GetApplicationAttemptReportRequest request) throws YarnException,
|
||||
IOException {
|
||||
GetApplicationAttemptReportRequestProto requestProto =
|
||||
((GetApplicationAttemptReportRequestPBImpl) request).getProto();
|
||||
try {
|
||||
return new GetApplicationAttemptReportResponsePBImpl(
|
||||
proxy.getApplicationAttemptReport(null, requestProto));
|
||||
} catch (ServiceException e) {
|
||||
RPCUtil.unwrapAndThrowException(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationAttemptsResponse getApplicationAttempts(
|
||||
GetApplicationAttemptsRequest request) throws YarnException, IOException {
|
||||
GetApplicationAttemptsRequestProto requestProto =
|
||||
((GetApplicationAttemptsRequestPBImpl) request).getProto();
|
||||
try {
|
||||
return new GetApplicationAttemptsResponsePBImpl(
|
||||
proxy.getApplicationAttempts(null, requestProto));
|
||||
} catch (ServiceException e) {
|
||||
RPCUtil.unwrapAndThrowException(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetContainerReportResponse getContainerReport(
|
||||
GetContainerReportRequest request) throws YarnException, IOException {
|
||||
GetContainerReportRequestProto requestProto =
|
||||
((GetContainerReportRequestPBImpl) request).getProto();
|
||||
try {
|
||||
return new GetContainerReportResponsePBImpl(proxy.getContainerReport(
|
||||
null, requestProto));
|
||||
} catch (ServiceException e) {
|
||||
RPCUtil.unwrapAndThrowException(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetContainersResponse getContainers(GetContainersRequest request)
|
||||
throws YarnException, IOException {
|
||||
GetContainersRequestProto requestProto =
|
||||
((GetContainersRequestPBImpl) request).getProto();
|
||||
try {
|
||||
return new GetContainersResponsePBImpl(proxy.getContainers(null,
|
||||
requestProto));
|
||||
} catch (ServiceException e) {
|
||||
RPCUtil.unwrapAndThrowException(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetDelegationTokenResponse getDelegationToken(
|
||||
GetDelegationTokenRequest request) throws YarnException, IOException {
|
||||
GetDelegationTokenRequestProto requestProto =
|
||||
((GetDelegationTokenRequestPBImpl) request).getProto();
|
||||
try {
|
||||
return new GetDelegationTokenResponsePBImpl(proxy.getDelegationToken(
|
||||
null, requestProto));
|
||||
} catch (ServiceException e) {
|
||||
RPCUtil.unwrapAndThrowException(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public RenewDelegationTokenResponse renewDelegationToken(
|
||||
RenewDelegationTokenRequest request) throws YarnException, IOException {
|
||||
RenewDelegationTokenRequestProto requestProto =
|
||||
((RenewDelegationTokenRequestPBImpl) request).getProto();
|
||||
try {
|
||||
return new RenewDelegationTokenResponsePBImpl(proxy.renewDelegationToken(
|
||||
null, requestProto));
|
||||
} catch (ServiceException e) {
|
||||
RPCUtil.unwrapAndThrowException(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CancelDelegationTokenResponse cancelDelegationToken(
|
||||
CancelDelegationTokenRequest request) throws YarnException, IOException {
|
||||
CancelDelegationTokenRequestProto requestProto =
|
||||
((CancelDelegationTokenRequestPBImpl) request).getProto();
|
||||
try {
|
||||
return new CancelDelegationTokenResponsePBImpl(
|
||||
proxy.cancelDelegationToken(null, requestProto));
|
||||
|
||||
} catch (ServiceException e) {
|
||||
RPCUtil.unwrapAndThrowException(e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,230 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.impl.pb.service;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocolPB;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.CancelDelegationTokenRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.CancelDelegationTokenResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptReportRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptReportResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptsRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationAttemptsResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationReportResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetApplicationsResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerReportRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainerReportResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainersRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetContainersResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetDelegationTokenRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.GetDelegationTokenResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.RenewDelegationTokenResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationReportResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationsResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto;
|
||||
|
||||
import com.google.protobuf.RpcController;
|
||||
import com.google.protobuf.ServiceException;
|
||||
|
||||
@Private
|
||||
public class ApplicationHistoryProtocolPBServiceImpl implements
|
||||
ApplicationHistoryProtocolPB {
|
||||
private ApplicationHistoryProtocol real;
|
||||
|
||||
public ApplicationHistoryProtocolPBServiceImpl(ApplicationHistoryProtocol impl) {
|
||||
this.real = impl;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationReportResponseProto getApplicationReport(
|
||||
RpcController arg0, GetApplicationReportRequestProto proto)
|
||||
throws ServiceException {
|
||||
GetApplicationReportRequestPBImpl request =
|
||||
new GetApplicationReportRequestPBImpl(proto);
|
||||
try {
|
||||
GetApplicationReportResponse response =
|
||||
real.getApplicationReport(request);
|
||||
return ((GetApplicationReportResponsePBImpl) response).getProto();
|
||||
} catch (YarnException e) {
|
||||
throw new ServiceException(e);
|
||||
} catch (IOException e) {
|
||||
throw new ServiceException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationsResponseProto getApplications(RpcController controller,
|
||||
GetApplicationsRequestProto proto) throws ServiceException {
|
||||
GetApplicationsRequestPBImpl request =
|
||||
new GetApplicationsRequestPBImpl(proto);
|
||||
try {
|
||||
GetApplicationsResponse response = real.getApplications(request);
|
||||
return ((GetApplicationsResponsePBImpl) response).getProto();
|
||||
} catch (YarnException e) {
|
||||
throw new ServiceException(e);
|
||||
} catch (IOException e) {
|
||||
throw new ServiceException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationAttemptReportResponseProto getApplicationAttemptReport(
|
||||
RpcController controller, GetApplicationAttemptReportRequestProto proto)
|
||||
throws ServiceException {
|
||||
GetApplicationAttemptReportRequestPBImpl request =
|
||||
new GetApplicationAttemptReportRequestPBImpl(proto);
|
||||
try {
|
||||
GetApplicationAttemptReportResponse response =
|
||||
real.getApplicationAttemptReport(request);
|
||||
return ((GetApplicationAttemptReportResponsePBImpl) response).getProto();
|
||||
} catch (YarnException e) {
|
||||
throw new ServiceException(e);
|
||||
} catch (IOException e) {
|
||||
throw new ServiceException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationAttemptsResponseProto getApplicationAttempts(
|
||||
RpcController controller, GetApplicationAttemptsRequestProto proto)
|
||||
throws ServiceException {
|
||||
GetApplicationAttemptsRequestPBImpl request =
|
||||
new GetApplicationAttemptsRequestPBImpl(proto);
|
||||
try {
|
||||
GetApplicationAttemptsResponse response =
|
||||
real.getApplicationAttempts(request);
|
||||
return ((GetApplicationAttemptsResponsePBImpl) response).getProto();
|
||||
} catch (YarnException e) {
|
||||
throw new ServiceException(e);
|
||||
} catch (IOException e) {
|
||||
throw new ServiceException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetContainerReportResponseProto getContainerReport(
|
||||
RpcController controller, GetContainerReportRequestProto proto)
|
||||
throws ServiceException {
|
||||
GetContainerReportRequestPBImpl request =
|
||||
new GetContainerReportRequestPBImpl(proto);
|
||||
try {
|
||||
GetContainerReportResponse response = real.getContainerReport(request);
|
||||
return ((GetContainerReportResponsePBImpl) response).getProto();
|
||||
} catch (YarnException e) {
|
||||
throw new ServiceException(e);
|
||||
} catch (IOException e) {
|
||||
throw new ServiceException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetContainersResponseProto getContainers(RpcController controller,
|
||||
GetContainersRequestProto proto) throws ServiceException {
|
||||
GetContainersRequestPBImpl request = new GetContainersRequestPBImpl(proto);
|
||||
try {
|
||||
GetContainersResponse response = real.getContainers(request);
|
||||
return ((GetContainersResponsePBImpl) response).getProto();
|
||||
} catch (YarnException e) {
|
||||
throw new ServiceException(e);
|
||||
} catch (IOException e) {
|
||||
throw new ServiceException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetDelegationTokenResponseProto getDelegationToken(
|
||||
RpcController controller, GetDelegationTokenRequestProto proto)
|
||||
throws ServiceException {
|
||||
GetDelegationTokenRequestPBImpl request =
|
||||
new GetDelegationTokenRequestPBImpl(proto);
|
||||
try {
|
||||
GetDelegationTokenResponse response = real.getDelegationToken(request);
|
||||
return ((GetDelegationTokenResponsePBImpl) response).getProto();
|
||||
} catch (YarnException e) {
|
||||
throw new ServiceException(e);
|
||||
} catch (IOException e) {
|
||||
throw new ServiceException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public RenewDelegationTokenResponseProto renewDelegationToken(
|
||||
RpcController controller, RenewDelegationTokenRequestProto proto)
|
||||
throws ServiceException {
|
||||
RenewDelegationTokenRequestPBImpl request =
|
||||
new RenewDelegationTokenRequestPBImpl(proto);
|
||||
try {
|
||||
RenewDelegationTokenResponse response =
|
||||
real.renewDelegationToken(request);
|
||||
return ((RenewDelegationTokenResponsePBImpl) response).getProto();
|
||||
} catch (YarnException e) {
|
||||
throw new ServiceException(e);
|
||||
} catch (IOException e) {
|
||||
throw new ServiceException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public CancelDelegationTokenResponseProto cancelDelegationToken(
|
||||
RpcController controller, CancelDelegationTokenRequestProto proto)
|
||||
throws ServiceException {
|
||||
CancelDelegationTokenRequestPBImpl request =
|
||||
new CancelDelegationTokenRequestPBImpl(proto);
|
||||
try {
|
||||
CancelDelegationTokenResponse response =
|
||||
real.cancelDelegationToken(request);
|
||||
return ((CancelDelegationTokenResponsePBImpl) response).getProto();
|
||||
} catch (YarnException e) {
|
||||
throw new ServiceException(e);
|
||||
} catch (IOException e) {
|
||||
throw new ServiceException(e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProtoOrBuilder;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public class GetApplicationAttemptReportRequestPBImpl extends
|
||||
GetApplicationAttemptReportRequest {
|
||||
|
||||
GetApplicationAttemptReportRequestProto proto =
|
||||
GetApplicationAttemptReportRequestProto.getDefaultInstance();
|
||||
GetApplicationAttemptReportRequestProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
private ApplicationAttemptId applicationAttemptId = null;
|
||||
|
||||
public GetApplicationAttemptReportRequestPBImpl() {
|
||||
builder = GetApplicationAttemptReportRequestProto.newBuilder();
|
||||
}
|
||||
|
||||
public GetApplicationAttemptReportRequestPBImpl(
|
||||
GetApplicationAttemptReportRequestProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
public GetApplicationAttemptReportRequestProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null) {
|
||||
return false;
|
||||
}
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (applicationAttemptId != null) {
|
||||
builder
|
||||
.setApplicationAttemptId(convertToProtoFormat(this.applicationAttemptId));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = GetApplicationAttemptReportRequestProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptId getApplicationAttemptId() {
|
||||
if (this.applicationAttemptId != null) {
|
||||
return this.applicationAttemptId;
|
||||
}
|
||||
GetApplicationAttemptReportRequestProtoOrBuilder p =
|
||||
viaProto ? proto : builder;
|
||||
if (!p.hasApplicationAttemptId()) {
|
||||
return null;
|
||||
}
|
||||
this.applicationAttemptId =
|
||||
convertFromProtoFormat(p.getApplicationAttemptId());
|
||||
return this.applicationAttemptId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void
|
||||
setApplicationAttemptId(ApplicationAttemptId applicationAttemptId) {
|
||||
maybeInitBuilder();
|
||||
if (applicationAttemptId == null) {
|
||||
builder.clearApplicationAttemptId();
|
||||
}
|
||||
this.applicationAttemptId = applicationAttemptId;
|
||||
}
|
||||
|
||||
private ApplicationAttemptIdPBImpl convertFromProtoFormat(
|
||||
ApplicationAttemptIdProto p) {
|
||||
return new ApplicationAttemptIdPBImpl(p);
|
||||
}
|
||||
|
||||
private ApplicationAttemptIdProto
|
||||
convertToProtoFormat(ApplicationAttemptId t) {
|
||||
return ((ApplicationAttemptIdPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,140 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptReportPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProtoOrBuilder;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public class GetApplicationAttemptReportResponsePBImpl extends
|
||||
GetApplicationAttemptReportResponse {
|
||||
|
||||
GetApplicationAttemptReportResponseProto proto =
|
||||
GetApplicationAttemptReportResponseProto.getDefaultInstance();
|
||||
GetApplicationAttemptReportResponseProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
private ApplicationAttemptReport applicationAttemptReport = null;
|
||||
|
||||
public GetApplicationAttemptReportResponsePBImpl() {
|
||||
builder = GetApplicationAttemptReportResponseProto.newBuilder();
|
||||
}
|
||||
|
||||
public GetApplicationAttemptReportResponsePBImpl(
|
||||
GetApplicationAttemptReportResponseProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
public GetApplicationAttemptReportResponseProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null) {
|
||||
return false;
|
||||
}
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.applicationAttemptReport != null) {
|
||||
builder
|
||||
.setApplicationAttemptReport(convertToProtoFormat(this.applicationAttemptReport));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = GetApplicationAttemptReportResponseProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptReport getApplicationAttemptReport() {
|
||||
if (this.applicationAttemptReport != null) {
|
||||
return this.applicationAttemptReport;
|
||||
}
|
||||
GetApplicationAttemptReportResponseProtoOrBuilder p =
|
||||
viaProto ? proto : builder;
|
||||
if (!p.hasApplicationAttemptReport()) {
|
||||
return null;
|
||||
}
|
||||
this.applicationAttemptReport =
|
||||
convertFromProtoFormat(p.getApplicationAttemptReport());
|
||||
return this.applicationAttemptReport;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setApplicationAttemptReport(
|
||||
ApplicationAttemptReport ApplicationAttemptReport) {
|
||||
maybeInitBuilder();
|
||||
if (ApplicationAttemptReport == null) {
|
||||
builder.clearApplicationAttemptReport();
|
||||
}
|
||||
this.applicationAttemptReport = ApplicationAttemptReport;
|
||||
}
|
||||
|
||||
private ApplicationAttemptReportPBImpl convertFromProtoFormat(
|
||||
ApplicationAttemptReportProto p) {
|
||||
return new ApplicationAttemptReportPBImpl(p);
|
||||
}
|
||||
|
||||
private ApplicationAttemptReportProto convertToProtoFormat(
|
||||
ApplicationAttemptReport t) {
|
||||
return ((ApplicationAttemptReportPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,134 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProtoOrBuilder;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public class GetApplicationAttemptsRequestPBImpl extends
|
||||
GetApplicationAttemptsRequest {
|
||||
|
||||
GetApplicationAttemptsRequestProto proto = GetApplicationAttemptsRequestProto
|
||||
.getDefaultInstance();
|
||||
GetApplicationAttemptsRequestProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
ApplicationId applicationId = null;
|
||||
|
||||
public GetApplicationAttemptsRequestPBImpl() {
|
||||
builder = GetApplicationAttemptsRequestProto.newBuilder();
|
||||
}
|
||||
|
||||
public GetApplicationAttemptsRequestPBImpl(
|
||||
GetApplicationAttemptsRequestProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
public GetApplicationAttemptsRequestProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null) {
|
||||
return false;
|
||||
}
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (applicationId != null) {
|
||||
builder.setApplicationId(convertToProtoFormat(this.applicationId));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = GetApplicationAttemptsRequestProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationId getApplicationId() {
|
||||
if (this.applicationId != null) {
|
||||
return this.applicationId;
|
||||
}
|
||||
GetApplicationAttemptsRequestProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasApplicationId()) {
|
||||
return null;
|
||||
}
|
||||
this.applicationId = convertFromProtoFormat(p.getApplicationId());
|
||||
return this.applicationId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setApplicationId(ApplicationId applicationId) {
|
||||
maybeInitBuilder();
|
||||
if (applicationId == null) {
|
||||
builder.clearApplicationId();
|
||||
}
|
||||
this.applicationId = applicationId;
|
||||
}
|
||||
|
||||
private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto p) {
|
||||
return new ApplicationIdPBImpl(p);
|
||||
}
|
||||
|
||||
private ApplicationIdProto convertToProtoFormat(ApplicationId t) {
|
||||
return ((ApplicationIdPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,186 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptReportPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsResponseProtoOrBuilder;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public class GetApplicationAttemptsResponsePBImpl extends
|
||||
GetApplicationAttemptsResponse {
|
||||
|
||||
GetApplicationAttemptsResponseProto proto =
|
||||
GetApplicationAttemptsResponseProto.getDefaultInstance();
|
||||
GetApplicationAttemptsResponseProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
List<ApplicationAttemptReport> applicationAttemptList;
|
||||
|
||||
public GetApplicationAttemptsResponsePBImpl() {
|
||||
builder = GetApplicationAttemptsResponseProto.newBuilder();
|
||||
}
|
||||
|
||||
public GetApplicationAttemptsResponsePBImpl(
|
||||
GetApplicationAttemptsResponseProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ApplicationAttemptReport> getApplicationAttemptList() {
|
||||
initLocalApplicationAttemptsList();
|
||||
return this.applicationAttemptList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setApplicationAttemptList(
|
||||
List<ApplicationAttemptReport> applicationAttempts) {
|
||||
maybeInitBuilder();
|
||||
if (applicationAttempts == null) {
|
||||
builder.clearApplicationAttempts();
|
||||
}
|
||||
this.applicationAttemptList = applicationAttempts;
|
||||
}
|
||||
|
||||
public GetApplicationAttemptsResponseProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null) {
|
||||
return false;
|
||||
}
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.applicationAttemptList != null) {
|
||||
addLocalApplicationAttemptsToProto();
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = GetApplicationAttemptsResponseProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
// Once this is called. containerList will never be null - until a getProto
|
||||
// is called.
|
||||
private void initLocalApplicationAttemptsList() {
|
||||
if (this.applicationAttemptList != null) {
|
||||
return;
|
||||
}
|
||||
GetApplicationAttemptsResponseProtoOrBuilder p = viaProto ? proto : builder;
|
||||
List<ApplicationAttemptReportProto> list = p.getApplicationAttemptsList();
|
||||
applicationAttemptList = new ArrayList<ApplicationAttemptReport>();
|
||||
|
||||
for (ApplicationAttemptReportProto a : list) {
|
||||
applicationAttemptList.add(convertFromProtoFormat(a));
|
||||
}
|
||||
}
|
||||
|
||||
private void addLocalApplicationAttemptsToProto() {
|
||||
maybeInitBuilder();
|
||||
builder.clearApplicationAttempts();
|
||||
if (applicationAttemptList == null) {
|
||||
return;
|
||||
}
|
||||
Iterable<ApplicationAttemptReportProto> iterable =
|
||||
new Iterable<ApplicationAttemptReportProto>() {
|
||||
@Override
|
||||
public Iterator<ApplicationAttemptReportProto> iterator() {
|
||||
return new Iterator<ApplicationAttemptReportProto>() {
|
||||
|
||||
Iterator<ApplicationAttemptReport> iter = applicationAttemptList
|
||||
.iterator();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return iter.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptReportProto next() {
|
||||
return convertToProtoFormat(iter.next());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
};
|
||||
builder.addAllApplicationAttempts(iterable);
|
||||
}
|
||||
|
||||
private ApplicationAttemptReportPBImpl convertFromProtoFormat(
|
||||
ApplicationAttemptReportProto p) {
|
||||
return new ApplicationAttemptReportPBImpl(p);
|
||||
}
|
||||
|
||||
private ApplicationAttemptReportProto convertToProtoFormat(
|
||||
ApplicationAttemptReport t) {
|
||||
return ((ApplicationAttemptReportPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,129 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportRequestProtoOrBuilder;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public class GetContainerReportRequestPBImpl extends GetContainerReportRequest {
|
||||
GetContainerReportRequestProto proto = GetContainerReportRequestProto
|
||||
.getDefaultInstance();
|
||||
GetContainerReportRequestProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
private ContainerId containerId = null;
|
||||
|
||||
public GetContainerReportRequestPBImpl() {
|
||||
builder = GetContainerReportRequestProto.newBuilder();
|
||||
}
|
||||
|
||||
public GetContainerReportRequestPBImpl(GetContainerReportRequestProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
public GetContainerReportRequestProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null)
|
||||
return false;
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (containerId != null) {
|
||||
builder.setContainerId(convertToProtoFormat(this.containerId));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto)
|
||||
maybeInitBuilder();
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = GetContainerReportRequestProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerId getContainerId() {
|
||||
if (this.containerId != null) {
|
||||
return this.containerId;
|
||||
}
|
||||
GetContainerReportRequestProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasContainerId()) {
|
||||
return null;
|
||||
}
|
||||
this.containerId = convertFromProtoFormat(p.getContainerId());
|
||||
return this.containerId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setContainerId(ContainerId containerId) {
|
||||
maybeInitBuilder();
|
||||
if (containerId == null) {
|
||||
builder.clearContainerId();
|
||||
}
|
||||
this.containerId = containerId;
|
||||
}
|
||||
|
||||
private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) {
|
||||
return new ContainerIdPBImpl(p);
|
||||
}
|
||||
|
||||
private ContainerIdProto convertToProtoFormat(ContainerId t) {
|
||||
return ((ContainerIdPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,127 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerReportPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainerReportResponseProtoOrBuilder;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
public class GetContainerReportResponsePBImpl extends
|
||||
GetContainerReportResponse {
|
||||
|
||||
GetContainerReportResponseProto proto = GetContainerReportResponseProto
|
||||
.getDefaultInstance();
|
||||
GetContainerReportResponseProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
private ContainerReport containerReport = null;
|
||||
|
||||
public GetContainerReportResponsePBImpl() {
|
||||
builder = GetContainerReportResponseProto.newBuilder();
|
||||
}
|
||||
|
||||
public GetContainerReportResponsePBImpl(GetContainerReportResponseProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
public GetContainerReportResponseProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null)
|
||||
return false;
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.containerReport != null) {
|
||||
builder.setContainerReport(convertToProtoFormat(this.containerReport));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto)
|
||||
maybeInitBuilder();
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = GetContainerReportResponseProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerReport getContainerReport() {
|
||||
if (this.containerReport != null) {
|
||||
return this.containerReport;
|
||||
}
|
||||
GetContainerReportResponseProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasContainerReport()) {
|
||||
return null;
|
||||
}
|
||||
this.containerReport = convertFromProtoFormat(p.getContainerReport());
|
||||
return this.containerReport;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setContainerReport(ContainerReport containerReport) {
|
||||
maybeInitBuilder();
|
||||
if (containerReport == null) {
|
||||
builder.clearContainerReport();
|
||||
}
|
||||
this.containerReport = containerReport;
|
||||
}
|
||||
|
||||
private ContainerReportPBImpl convertFromProtoFormat(ContainerReportProto p) {
|
||||
return new ContainerReportPBImpl(p);
|
||||
}
|
||||
|
||||
private ContainerReportProto convertToProtoFormat(ContainerReport t) {
|
||||
return ((ContainerReportPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,131 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersRequestProtoOrBuilder;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
public class GetContainersRequestPBImpl extends GetContainersRequest {
|
||||
GetContainersRequestProto proto = GetContainersRequestProto
|
||||
.getDefaultInstance();
|
||||
GetContainersRequestProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
private ApplicationAttemptId applicationAttemptId = null;
|
||||
|
||||
public GetContainersRequestPBImpl() {
|
||||
builder = GetContainersRequestProto.newBuilder();
|
||||
}
|
||||
|
||||
public GetContainersRequestPBImpl(GetContainersRequestProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
public GetContainersRequestProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null) {
|
||||
return false;
|
||||
}
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (applicationAttemptId != null) {
|
||||
builder
|
||||
.setApplicationAttemptId(convertToProtoFormat(this.applicationAttemptId));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = GetContainersRequestProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptId getApplicationAttemptId() {
|
||||
if (this.applicationAttemptId != null) {
|
||||
return this.applicationAttemptId;
|
||||
}
|
||||
GetContainersRequestProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasApplicationAttemptId()) {
|
||||
return null;
|
||||
}
|
||||
this.applicationAttemptId =
|
||||
convertFromProtoFormat(p.getApplicationAttemptId());
|
||||
return this.applicationAttemptId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void
|
||||
setApplicationAttemptId(ApplicationAttemptId applicationAttemptId) {
|
||||
maybeInitBuilder();
|
||||
if (applicationAttemptId == null) {
|
||||
builder.clearApplicationAttemptId();
|
||||
}
|
||||
this.applicationAttemptId = applicationAttemptId;
|
||||
}
|
||||
|
||||
private ApplicationAttemptIdPBImpl convertFromProtoFormat(
|
||||
ApplicationAttemptIdProto p) {
|
||||
return new ApplicationAttemptIdPBImpl(p);
|
||||
}
|
||||
|
||||
private ApplicationAttemptIdProto
|
||||
convertToProtoFormat(ApplicationAttemptId t) {
|
||||
return ((ApplicationAttemptIdPBImpl) t).getProto();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,180 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerReportPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetContainersResponseProtoOrBuilder;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
@Private
|
||||
@Unstable
|
||||
public class GetContainersResponsePBImpl extends GetContainersResponse {
|
||||
|
||||
GetContainersResponseProto proto = GetContainersResponseProto
|
||||
.getDefaultInstance();
|
||||
GetContainersResponseProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
List<ContainerReport> containerList;
|
||||
|
||||
public GetContainersResponsePBImpl() {
|
||||
builder = GetContainersResponseProto.newBuilder();
|
||||
}
|
||||
|
||||
public GetContainersResponsePBImpl(GetContainersResponseProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ContainerReport> getContainerList() {
|
||||
initLocalContainerList();
|
||||
return this.containerList;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setContainerList(List<ContainerReport> containers) {
|
||||
maybeInitBuilder();
|
||||
if (containers == null) {
|
||||
builder.clearContainers();
|
||||
}
|
||||
this.containerList = containers;
|
||||
}
|
||||
|
||||
public GetContainersResponseProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null) {
|
||||
return false;
|
||||
}
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.containerList != null) {
|
||||
addLocalContainersToProto();
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = GetContainersResponseProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
// Once this is called. containerList will never be null - until a getProto
|
||||
// is called.
|
||||
private void initLocalContainerList() {
|
||||
if (this.containerList != null) {
|
||||
return;
|
||||
}
|
||||
GetContainersResponseProtoOrBuilder p = viaProto ? proto : builder;
|
||||
List<ContainerReportProto> list = p.getContainersList();
|
||||
containerList = new ArrayList<ContainerReport>();
|
||||
|
||||
for (ContainerReportProto c : list) {
|
||||
containerList.add(convertFromProtoFormat(c));
|
||||
}
|
||||
}
|
||||
|
||||
private void addLocalContainersToProto() {
|
||||
maybeInitBuilder();
|
||||
builder.clearContainers();
|
||||
if (containerList == null) {
|
||||
return;
|
||||
}
|
||||
Iterable<ContainerReportProto> iterable =
|
||||
new Iterable<ContainerReportProto>() {
|
||||
@Override
|
||||
public Iterator<ContainerReportProto> iterator() {
|
||||
return new Iterator<ContainerReportProto>() {
|
||||
|
||||
Iterator<ContainerReport> iter = containerList.iterator();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return iter.hasNext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerReportProto next() {
|
||||
return convertToProtoFormat(iter.next());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void remove() {
|
||||
throw new UnsupportedOperationException();
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
};
|
||||
builder.addAllContainers(iterable);
|
||||
}
|
||||
|
||||
private ContainerReportPBImpl convertFromProtoFormat(ContainerReportProto p) {
|
||||
return new ContainerReportPBImpl(p);
|
||||
}
|
||||
|
||||
private ContainerReportProto convertToProtoFormat(ContainerReport t) {
|
||||
return ((ContainerReportPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,270 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
public class ApplicationAttemptReportPBImpl extends ApplicationAttemptReport {
|
||||
ApplicationAttemptReportProto proto = ApplicationAttemptReportProto
|
||||
.getDefaultInstance();
|
||||
ApplicationAttemptReportProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
private ApplicationAttemptId ApplicationAttemptId;
|
||||
private ContainerId amContainerId;
|
||||
|
||||
public ApplicationAttemptReportPBImpl() {
|
||||
builder = ApplicationAttemptReportProto.newBuilder();
|
||||
}
|
||||
|
||||
public ApplicationAttemptReportPBImpl(ApplicationAttemptReportProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptId getApplicationAttemptId() {
|
||||
if (this.ApplicationAttemptId != null) {
|
||||
return this.ApplicationAttemptId;
|
||||
}
|
||||
|
||||
ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasApplicationAttemptId()) {
|
||||
return null;
|
||||
}
|
||||
this.ApplicationAttemptId =
|
||||
convertFromProtoFormat(p.getApplicationAttemptId());
|
||||
return this.ApplicationAttemptId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHost() {
|
||||
ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasHost()) {
|
||||
return null;
|
||||
}
|
||||
return p.getHost();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getRpcPort() {
|
||||
ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getRpcPort();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTrackingUrl() {
|
||||
ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasTrackingUrl()) {
|
||||
return null;
|
||||
}
|
||||
return p.getTrackingUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDiagnostics() {
|
||||
ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasDiagnostics()) {
|
||||
return null;
|
||||
}
|
||||
return p.getDiagnostics();
|
||||
}
|
||||
|
||||
@Override
|
||||
public YarnApplicationAttemptState getYarnApplicationAttemptState() {
|
||||
ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasYarnApplicationAttemptState()) {
|
||||
return null;
|
||||
}
|
||||
return convertFromProtoFormat(p.getYarnApplicationAttemptState());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setYarnApplicationAttemptState(YarnApplicationAttemptState state) {
|
||||
maybeInitBuilder();
|
||||
if (state == null) {
|
||||
builder.clearYarnApplicationAttemptState();
|
||||
return;
|
||||
}
|
||||
builder.setYarnApplicationAttemptState(convertToProtoFormat(state));
|
||||
}
|
||||
|
||||
private YarnApplicationAttemptStateProto convertToProtoFormat(
|
||||
YarnApplicationAttemptState state) {
|
||||
return ProtoUtils.convertToProtoFormat(state);
|
||||
}
|
||||
|
||||
private YarnApplicationAttemptState convertFromProtoFormat(
|
||||
YarnApplicationAttemptStateProto yarnApplicationAttemptState) {
|
||||
return ProtoUtils.convertFromProtoFormat(yarnApplicationAttemptState);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void
|
||||
setApplicationAttemptId(ApplicationAttemptId applicationAttemptId) {
|
||||
maybeInitBuilder();
|
||||
if (applicationAttemptId == null)
|
||||
builder.clearApplicationAttemptId();
|
||||
this.ApplicationAttemptId = applicationAttemptId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setHost(String host) {
|
||||
maybeInitBuilder();
|
||||
if (host == null) {
|
||||
builder.clearHost();
|
||||
return;
|
||||
}
|
||||
builder.setHost(host);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setRpcPort(int rpcPort) {
|
||||
maybeInitBuilder();
|
||||
builder.setRpcPort(rpcPort);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setTrackingUrl(String url) {
|
||||
maybeInitBuilder();
|
||||
if (url == null) {
|
||||
builder.clearTrackingUrl();
|
||||
return;
|
||||
}
|
||||
builder.setTrackingUrl(url);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDiagnostics(String diagnostics) {
|
||||
maybeInitBuilder();
|
||||
if (diagnostics == null) {
|
||||
builder.clearDiagnostics();
|
||||
return;
|
||||
}
|
||||
builder.setDiagnostics(diagnostics);
|
||||
}
|
||||
|
||||
public ApplicationAttemptReportProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null)
|
||||
return false;
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = ApplicationAttemptReportProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto)
|
||||
maybeInitBuilder();
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.ApplicationAttemptId != null
|
||||
&& !((ApplicationAttemptIdPBImpl) this.ApplicationAttemptId).getProto()
|
||||
.equals(builder.getApplicationAttemptId())) {
|
||||
builder
|
||||
.setApplicationAttemptId(convertToProtoFormat(this.ApplicationAttemptId));
|
||||
}
|
||||
|
||||
if (this.amContainerId != null
|
||||
&& !((ContainerIdPBImpl) this.amContainerId).getProto().equals(
|
||||
builder.getAmContainerId())) {
|
||||
builder.setAmContainerId(convertToProtoFormat(this.amContainerId));
|
||||
}
|
||||
}
|
||||
|
||||
private ContainerIdProto convertToProtoFormat(ContainerId amContainerId) {
|
||||
return ((ContainerIdPBImpl) amContainerId).getProto();
|
||||
}
|
||||
|
||||
private ContainerIdPBImpl convertFromProtoFormat(
|
||||
ContainerIdProto amContainerId) {
|
||||
return new ContainerIdPBImpl(amContainerId);
|
||||
}
|
||||
|
||||
private ApplicationAttemptIdProto
|
||||
convertToProtoFormat(ApplicationAttemptId t) {
|
||||
return ((ApplicationAttemptIdPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
private ApplicationAttemptIdPBImpl convertFromProtoFormat(
|
||||
ApplicationAttemptIdProto applicationAttemptId) {
|
||||
return new ApplicationAttemptIdPBImpl(applicationAttemptId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerId getAMContainerId() {
|
||||
if (this.amContainerId != null) {
|
||||
return this.amContainerId;
|
||||
}
|
||||
|
||||
ApplicationAttemptReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasAmContainerId()) {
|
||||
return null;
|
||||
}
|
||||
this.amContainerId = convertFromProtoFormat(p.getAmContainerId());
|
||||
return this.amContainerId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAMContainerId(ContainerId amContainerId) {
|
||||
maybeInitBuilder();
|
||||
if (amContainerId == null)
|
||||
builder.clearAmContainerId();
|
||||
this.amContainerId = amContainerId;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,346 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerState;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerStatus;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.Resource;
|
||||
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
public class ContainerReportPBImpl extends ContainerReport {
|
||||
|
||||
ContainerReportProto proto = ContainerReportProto.getDefaultInstance();
|
||||
ContainerReportProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
private ContainerId containerId = null;
|
||||
private Resource resource = null;
|
||||
private NodeId nodeId = null;
|
||||
private Priority priority = null;
|
||||
|
||||
public ContainerReportPBImpl() {
|
||||
builder = ContainerReportProto.newBuilder();
|
||||
}
|
||||
|
||||
public ContainerReportPBImpl(ContainerReportProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Resource getAllocatedResource() {
|
||||
if (this.resource != null) {
|
||||
return this.resource;
|
||||
}
|
||||
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasResource()) {
|
||||
return null;
|
||||
}
|
||||
this.resource = convertFromProtoFormat(p.getResource());
|
||||
return this.resource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NodeId getAssignedNode() {
|
||||
if (this.nodeId != null) {
|
||||
return this.nodeId;
|
||||
}
|
||||
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasNodeId()) {
|
||||
return null;
|
||||
}
|
||||
this.nodeId = convertFromProtoFormat(p.getNodeId());
|
||||
return this.nodeId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerId getContainerId() {
|
||||
if (this.containerId != null) {
|
||||
return this.containerId;
|
||||
}
|
||||
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasContainerId()) {
|
||||
return null;
|
||||
}
|
||||
this.containerId = convertFromProtoFormat(p.getContainerId());
|
||||
return this.containerId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDiagnosticsInfo() {
|
||||
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasDiagnosticsInfo()) {
|
||||
return null;
|
||||
}
|
||||
return (p.getDiagnosticsInfo());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerState getContainerState() {
|
||||
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasContainerState()) {
|
||||
return null;
|
||||
}
|
||||
return convertFromProtoFormat(p.getContainerState());
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getFinishTime() {
|
||||
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getFinishTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLogUrl() {
|
||||
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasLogUrl()) {
|
||||
return null;
|
||||
}
|
||||
return (p.getLogUrl());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Priority getPriority() {
|
||||
if (this.priority != null) {
|
||||
return this.priority;
|
||||
}
|
||||
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasPriority()) {
|
||||
return null;
|
||||
}
|
||||
this.priority = convertFromProtoFormat(p.getPriority());
|
||||
return this.priority;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getStartTime() {
|
||||
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getStartTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAllocatedResource(Resource resource) {
|
||||
maybeInitBuilder();
|
||||
if (resource == null)
|
||||
builder.clearResource();
|
||||
this.resource = resource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAssignedNode(NodeId nodeId) {
|
||||
maybeInitBuilder();
|
||||
if (nodeId == null)
|
||||
builder.clearNodeId();
|
||||
this.nodeId = nodeId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setContainerId(ContainerId containerId) {
|
||||
maybeInitBuilder();
|
||||
if (containerId == null)
|
||||
builder.clearContainerId();
|
||||
this.containerId = containerId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDiagnosticsInfo(String diagnosticsInfo) {
|
||||
maybeInitBuilder();
|
||||
if (diagnosticsInfo == null) {
|
||||
builder.clearDiagnosticsInfo();
|
||||
return;
|
||||
}
|
||||
builder.setDiagnosticsInfo(diagnosticsInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setContainerState(ContainerState containerState) {
|
||||
maybeInitBuilder();
|
||||
if (containerState == null) {
|
||||
builder.clearContainerState();
|
||||
return;
|
||||
}
|
||||
builder.setContainerState(convertToProtoFormat(containerState));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getContainerExitStatus() {
|
||||
ContainerReportProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getContainerExitStatus();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setContainerExitStatus(int containerExitStatus) {
|
||||
maybeInitBuilder();
|
||||
builder.setContainerExitStatus(containerExitStatus);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFinishTime(long finishTime) {
|
||||
maybeInitBuilder();
|
||||
builder.setFinishTime(finishTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLogUrl(String logUrl) {
|
||||
maybeInitBuilder();
|
||||
if (logUrl == null) {
|
||||
builder.clearLogUrl();
|
||||
return;
|
||||
}
|
||||
builder.setLogUrl(logUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setPriority(Priority priority) {
|
||||
maybeInitBuilder();
|
||||
if (priority == null) {
|
||||
builder.clearPriority();
|
||||
}
|
||||
this.priority = priority;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setStartTime(long startTime) {
|
||||
maybeInitBuilder();
|
||||
builder.setStartTime(startTime);
|
||||
}
|
||||
|
||||
public ContainerReportProto getProto() {
|
||||
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return this.getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null)
|
||||
return false;
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.containerId != null
|
||||
&& !((ContainerIdPBImpl) containerId).getProto().equals(
|
||||
builder.getContainerId())) {
|
||||
builder.setContainerId(convertToProtoFormat(this.containerId));
|
||||
}
|
||||
if (this.nodeId != null
|
||||
&& !((NodeIdPBImpl) nodeId).getProto().equals(builder.getNodeId())) {
|
||||
builder.setNodeId(convertToProtoFormat(this.nodeId));
|
||||
}
|
||||
if (this.resource != null
|
||||
&& !((ResourcePBImpl) this.resource).getProto().equals(
|
||||
builder.getResource())) {
|
||||
builder.setResource(convertToProtoFormat(this.resource));
|
||||
}
|
||||
if (this.priority != null
|
||||
&& !((PriorityPBImpl) this.priority).getProto().equals(
|
||||
builder.getPriority())) {
|
||||
builder.setPriority(convertToProtoFormat(this.priority));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto)
|
||||
maybeInitBuilder();
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = ContainerReportProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
private ContainerIdPBImpl convertFromProtoFormat(ContainerIdProto p) {
|
||||
return new ContainerIdPBImpl(p);
|
||||
}
|
||||
|
||||
private NodeIdPBImpl convertFromProtoFormat(NodeIdProto p) {
|
||||
return new NodeIdPBImpl(p);
|
||||
}
|
||||
|
||||
private ContainerIdProto convertToProtoFormat(ContainerId t) {
|
||||
return ((ContainerIdPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
private NodeIdProto convertToProtoFormat(NodeId t) {
|
||||
return ((NodeIdPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
private ResourcePBImpl convertFromProtoFormat(ResourceProto p) {
|
||||
return new ResourcePBImpl(p);
|
||||
}
|
||||
|
||||
private ResourceProto convertToProtoFormat(Resource t) {
|
||||
return ((ResourcePBImpl) t).getProto();
|
||||
}
|
||||
|
||||
private PriorityPBImpl convertFromProtoFormat(PriorityProto p) {
|
||||
return new PriorityPBImpl(p);
|
||||
}
|
||||
|
||||
private PriorityProto convertToProtoFormat(Priority p) {
|
||||
return ((PriorityPBImpl) p).getProto();
|
||||
}
|
||||
|
||||
private ContainerStateProto
|
||||
convertToProtoFormat(ContainerState containerState) {
|
||||
return ProtoUtils.convertToProtoFormat(containerState);
|
||||
}
|
||||
|
||||
private ContainerState convertFromProtoFormat(
|
||||
ContainerStateProto containerState) {
|
||||
return ProtoUtils.convertFromProtoFormat(containerState);
|
||||
}
|
||||
}
|
|
@ -33,6 +33,7 @@ import org.apache.hadoop.yarn.api.records.NodeId;
|
|||
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||
import org.apache.hadoop.yarn.api.records.QueueACL;
|
||||
import org.apache.hadoop.yarn.api.records.QueueState;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.AMCommandProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto;
|
||||
|
@ -45,6 +46,7 @@ import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
|
|||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto;
|
||||
|
||||
import com.google.protobuf.ByteString;
|
||||
|
@ -96,6 +98,21 @@ public class ProtoUtils {
|
|||
return YarnApplicationState.valueOf(e.name());
|
||||
}
|
||||
|
||||
/*
|
||||
* YarnApplicationAttemptState
|
||||
*/
|
||||
private static String YARN_APPLICATION_ATTEMPT_STATE_PREFIX = "APP_ATTEMPT_";
|
||||
public static YarnApplicationAttemptStateProto convertToProtoFormat(
|
||||
YarnApplicationAttemptState e) {
|
||||
return YarnApplicationAttemptStateProto
|
||||
.valueOf(YARN_APPLICATION_ATTEMPT_STATE_PREFIX + e.name());
|
||||
}
|
||||
public static YarnApplicationAttemptState convertFromProtoFormat(
|
||||
YarnApplicationAttemptStateProto e) {
|
||||
return YarnApplicationAttemptState.valueOf(e.name().replace(
|
||||
YARN_APPLICATION_ATTEMPT_STATE_PREFIX, ""));
|
||||
}
|
||||
|
||||
/*
|
||||
* ApplicationResourceUsageReport
|
||||
*/
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.client;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.security.PrivilegedAction;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.yarn.ipc.YarnRPC;
|
||||
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
@SuppressWarnings("unchecked")
|
||||
public class AHSProxy<T> {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(AHSProxy.class);
|
||||
|
||||
public static <T> T createAHSProxy(final Configuration conf,
|
||||
final Class<T> protocol, InetSocketAddress ahsAddress) throws IOException {
|
||||
LOG.info("Connecting to Application History server at " + ahsAddress);
|
||||
return (T) getProxy(conf, protocol, ahsAddress);
|
||||
}
|
||||
|
||||
protected static <T> T getProxy(final Configuration conf,
|
||||
final Class<T> protocol, final InetSocketAddress rmAddress)
|
||||
throws IOException {
|
||||
return UserGroupInformation.getCurrentUser().doAs(
|
||||
new PrivilegedAction<T>() {
|
||||
@Override
|
||||
public T run() {
|
||||
return (T) YarnRPC.create(conf).getProxy(protocol, rmAddress, conf);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -178,4 +178,8 @@ public final class StringHelper {
|
|||
public static String percent(double value) {
|
||||
return String.format("%.2f", value * 100);
|
||||
}
|
||||
|
||||
public static String getPartUrl(String url, String part) {
|
||||
return url.substring(url.indexOf(part));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
|||
public interface YarnWebParams {
|
||||
String NM_NODENAME = "nm.id";
|
||||
String APPLICATION_ID = "app.id";
|
||||
String APPLICATION_ATTEMPT_ID = "appattempt.id";
|
||||
String CONTAINER_ID = "container.id";
|
||||
String CONTAINER_LOG_TYPE= "log.type";
|
||||
String ENTITY_STRING = "entity.string";
|
||||
|
|
|
@ -17,6 +17,8 @@
|
|||
*/
|
||||
package org.apache.hadoop.yarn.webapp.util;
|
||||
|
||||
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
@ -27,7 +29,9 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.http.HttpConfig;
|
||||
import org.apache.hadoop.http.HttpConfig.Policy;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
|
||||
|
@ -145,6 +149,16 @@ public class WebAppUtils {
|
|||
}
|
||||
}
|
||||
|
||||
public static String getAHSWebAppURLWithoutScheme(Configuration conf) {
|
||||
if (HttpConfig.isSecure()) {
|
||||
return conf.get(YarnConfiguration.AHS_WEBAPP_HTTPS_ADDRESS,
|
||||
YarnConfiguration.DEFAULT_AHS_WEBAPP_HTTPS_ADDRESS);
|
||||
} else {
|
||||
return conf.get(YarnConfiguration.AHS_WEBAPP_ADDRESS,
|
||||
YarnConfiguration.DEFAULT_AHS_WEBAPP_ADDRESS);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* if url has scheme then it will be returned as it is else it will return
|
||||
* url with scheme.
|
||||
|
@ -160,4 +174,11 @@ public class WebAppUtils {
|
|||
return schemePrefix + url;
|
||||
}
|
||||
}
|
||||
|
||||
public static String getLogUrl(String nodeHttpAddress, String allocatedNode,
|
||||
ContainerId containerId, String user) {
|
||||
return join(HttpConfig.getSchemePrefix(), nodeHttpAddress, "/logs", "/",
|
||||
allocatedNode, "/", ConverterUtils.toString(containerId), "/",
|
||||
ConverterUtils.toString(containerId), "/", user);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -564,6 +564,30 @@
|
|||
<value>org.apache.hadoop.yarn.server.resourcemanager.monitor.capacity.ProportionalCapacityPreemptionPolicy</value>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<description>Indicate to ResourceManager as well as clients whether
|
||||
history-service is enabled or not. If enabled, ResourceManager starts
|
||||
recording historical data that ApplicationHistory service can consume.
|
||||
Similarly, clients can redirect to the history service when applications
|
||||
finish if this is enabled.</description>
|
||||
<name>yarn.ahs.enabled</name>
|
||||
<value>false</value>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<description>Number of worker threads that write the history data.</description>
|
||||
<name>yarn.resourcemanager.history-writer.multi-threaded-dispatcher.pool-size</name>
|
||||
<value>10</value>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<description>The implementation class of ApplicationHistoryStore, which is
|
||||
to be used by RMApplicationHistoryWriter.
|
||||
</description>
|
||||
<name>yarn.resourcemanager.history-writer.class</name>
|
||||
<value>org.apache.hadoop.yarn.server.applicationhistoryservice.NullApplicationHistoryStore</value>
|
||||
</property>
|
||||
|
||||
<!-- Node Manager Configs -->
|
||||
<property>
|
||||
<description>The hostname of the NM.</description>
|
||||
|
@ -1041,6 +1065,61 @@
|
|||
<value></value>
|
||||
</property>
|
||||
|
||||
<!-- Application History Service's Configuration-->
|
||||
|
||||
<property>
|
||||
<description>The hostname of the AHS.</description>
|
||||
<name>yarn.ahs.hostname</name>
|
||||
<value>0.0.0.0</value>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<description>The http address of the AHS web application.</description>
|
||||
<name>yarn.ahs.webapp.address</name>
|
||||
<value>${yarn.ahs.hostname}:8188</value>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<description>The https adddress of the AHS web application.</description>
|
||||
<name>yarn.ahs.webapp.https.address</name>
|
||||
<value>${yarn.ahs.hostname}:8190</value>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<description>URI pointing to the location of the FileSystem path where
|
||||
the history will be persisted. This must be supplied when using
|
||||
org.apache.hadoop.yarn.server.applicationhistoryservice.FileSystemApplicationHistoryStore
|
||||
as the value for yarn.resourcemanager.history-writer.store.class</description>
|
||||
<name>yarn.ahs.fs-history-store.uri</name>
|
||||
<value>${hadoop.log.dir}/yarn/system/ahstore</value>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<description>This is default address for the Application History server
|
||||
to start the RPC server.</description>
|
||||
<name>yarn.ahs.address</name>
|
||||
<value>0.0.0.0:10200</value>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<description>CLient thread count to serve the client requests.</description>
|
||||
<name>yarn.ahs.client.thread-count</name>
|
||||
<value>10</value>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<description>T-file compression types used to compress history data.</description>
|
||||
<name>yarn.ahs.fs-history-store.compression-type</name>
|
||||
<value>none</value>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<description> Store class name for history store, defaulting to file
|
||||
system store </description>
|
||||
<name>yarn.ahs.store.class</name>
|
||||
<value>org.apache.hadoop.yarn.server.applicationhistoryservice.FileSystemApplicationHistoryStore</value>
|
||||
</property>
|
||||
|
||||
<!-- Other configuration -->
|
||||
<property>
|
||||
<description>The interval that the yarn client library uses to poll the
|
||||
|
|
|
@ -0,0 +1,172 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!--
|
||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
contributor license agreements. See the NOTICE file distributed with
|
||||
this work for additional information regarding copyright ownership.
|
||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
(the "License"); you may not use this file except in compliance with
|
||||
the License. You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
-->
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
||||
http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<parent>
|
||||
<artifactId>hadoop-yarn-server</artifactId>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<version>3.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-server-applicationhistoryservice</artifactId>
|
||||
<version>3.0.0-SNAPSHOT</version>
|
||||
<name>hadoop-yarn-server-applicationhistoryservice</name>
|
||||
|
||||
<properties>
|
||||
<!-- Needed for generating FindBugs warnings using parent pom -->
|
||||
<yarn.basedir>${project.parent.parent.basedir}</yarn.basedir>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
<artifactId>servlet-api</artifactId>
|
||||
</dependency>
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
<scope>provided</scope>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-el</groupId>
|
||||
<artifactId>commons-el</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>tomcat</groupId>
|
||||
<artifactId>jasper-runtime</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>tomcat</groupId>
|
||||
<artifactId>jasper-compiler</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.mortbay.jetty</groupId>
|
||||
<artifactId>jsp-2.1-jetty</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-annotations</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.mockito</groupId>
|
||||
<artifactId>mockito-all</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.inject.extensions</groupId>
|
||||
<artifactId>guice-servlet</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
|
||||
<dependency>
|
||||
<groupId>com.google.inject</groupId>
|
||||
<artifactId>guice</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey.jersey-test-framework</groupId>
|
||||
<artifactId>jersey-test-framework-core</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-json</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey.contribs</groupId>
|
||||
<artifactId>jersey-guice</artifactId>
|
||||
</dependency>
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-common</artifactId>
|
||||
<type>test-jar</type>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-common</artifactId>
|
||||
</dependency>
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.xml.bind</groupId>
|
||||
<artifactId>jaxb-api</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.codehaus.jettison</groupId>
|
||||
<artifactId>jettison</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-core</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey</groupId>
|
||||
<artifactId>jersey-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
<artifactId>guava</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-yarn-server-common</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
|
||||
<dependency>
|
||||
<groupId>com.sun.jersey.jersey-test-framework</groupId>
|
||||
<artifactId>jersey-test-framework-grizzly2</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
</project>
|
|
@ -0,0 +1,211 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.ArrayList;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.ipc.Server;
|
||||
import org.apache.hadoop.service.AbstractService;
|
||||
import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException;
|
||||
import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
|
||||
import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.apache.hadoop.yarn.ipc.YarnRPC;
|
||||
|
||||
public class ApplicationHistoryClientService extends AbstractService {
|
||||
private static final Log LOG = LogFactory
|
||||
.getLog(ApplicationHistoryClientService.class);
|
||||
private ApplicationHistoryManager history;
|
||||
private ApplicationHistoryProtocol protocolHandler;
|
||||
private Server server;
|
||||
private InetSocketAddress bindAddress;
|
||||
|
||||
public ApplicationHistoryClientService(ApplicationHistoryManager history) {
|
||||
super("ApplicationHistoryClientService");
|
||||
this.history = history;
|
||||
this.protocolHandler = new ApplicationHSClientProtocolHandler();
|
||||
}
|
||||
|
||||
protected void serviceStart() throws Exception {
|
||||
Configuration conf = getConfig();
|
||||
YarnRPC rpc = YarnRPC.create(conf);
|
||||
InetSocketAddress address =
|
||||
conf.getSocketAddr(YarnConfiguration.AHS_ADDRESS,
|
||||
YarnConfiguration.DEFAULT_AHS_ADDRESS,
|
||||
YarnConfiguration.DEFAULT_AHS_PORT);
|
||||
|
||||
server =
|
||||
rpc.getServer(ApplicationHistoryProtocol.class, protocolHandler,
|
||||
address, conf, null, conf.getInt(
|
||||
YarnConfiguration.AHS_CLIENT_THREAD_COUNT,
|
||||
YarnConfiguration.DEFAULT_AHS_CLIENT_THREAD_COUNT));
|
||||
|
||||
server.start();
|
||||
this.bindAddress =
|
||||
conf.updateConnectAddr(YarnConfiguration.AHS_ADDRESS,
|
||||
server.getListenerAddress());
|
||||
LOG.info("Instantiated ApplicationHistoryClientService at "
|
||||
+ this.bindAddress);
|
||||
|
||||
super.serviceStart();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void serviceStop() throws Exception {
|
||||
if (server != null) {
|
||||
server.stop();
|
||||
}
|
||||
super.serviceStop();
|
||||
}
|
||||
|
||||
@Private
|
||||
public ApplicationHistoryProtocol getClientHandler() {
|
||||
return this.protocolHandler;
|
||||
}
|
||||
|
||||
@Private
|
||||
public InetSocketAddress getBindAddress() {
|
||||
return this.bindAddress;
|
||||
}
|
||||
|
||||
private class ApplicationHSClientProtocolHandler implements
|
||||
ApplicationHistoryProtocol {
|
||||
|
||||
@Override
|
||||
public CancelDelegationTokenResponse cancelDelegationToken(
|
||||
CancelDelegationTokenRequest request) throws YarnException, IOException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationAttemptReportResponse getApplicationAttemptReport(
|
||||
GetApplicationAttemptReportRequest request) throws YarnException,
|
||||
IOException {
|
||||
try {
|
||||
GetApplicationAttemptReportResponse response =
|
||||
GetApplicationAttemptReportResponse.newInstance(history
|
||||
.getApplicationAttempt(request.getApplicationAttemptId()));
|
||||
return response;
|
||||
} catch (IOException e) {
|
||||
throw new ApplicationAttemptNotFoundException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationAttemptsResponse getApplicationAttempts(
|
||||
GetApplicationAttemptsRequest request) throws YarnException,
|
||||
IOException {
|
||||
GetApplicationAttemptsResponse response =
|
||||
GetApplicationAttemptsResponse
|
||||
.newInstance(new ArrayList<ApplicationAttemptReport>(history
|
||||
.getApplicationAttempts(request.getApplicationId()).values()));
|
||||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationReportResponse getApplicationReport(
|
||||
GetApplicationReportRequest request) throws YarnException, IOException {
|
||||
try {
|
||||
ApplicationId applicationId = request.getApplicationId();
|
||||
GetApplicationReportResponse response =
|
||||
GetApplicationReportResponse.newInstance(history
|
||||
.getApplication(applicationId));
|
||||
return response;
|
||||
} catch (IOException e) {
|
||||
throw new ApplicationNotFoundException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetApplicationsResponse getApplications(
|
||||
GetApplicationsRequest request) throws YarnException, IOException {
|
||||
GetApplicationsResponse response =
|
||||
GetApplicationsResponse.newInstance(new ArrayList<ApplicationReport>(
|
||||
history.getAllApplications().values()));
|
||||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetContainerReportResponse getContainerReport(
|
||||
GetContainerReportRequest request) throws YarnException, IOException {
|
||||
try {
|
||||
GetContainerReportResponse response =
|
||||
GetContainerReportResponse.newInstance(history.getContainer(request
|
||||
.getContainerId()));
|
||||
return response;
|
||||
} catch (IOException e) {
|
||||
throw new ContainerNotFoundException(e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetContainersResponse getContainers(GetContainersRequest request)
|
||||
throws YarnException, IOException {
|
||||
GetContainersResponse response =
|
||||
GetContainersResponse.newInstance(new ArrayList<ContainerReport>(
|
||||
history.getContainers(request.getApplicationAttemptId()).values()));
|
||||
return response;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetDelegationTokenResponse getDelegationToken(
|
||||
GetDelegationTokenRequest request) throws YarnException, IOException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RenewDelegationTokenResponse renewDelegationToken(
|
||||
RenewDelegationTokenRequest request) throws YarnException, IOException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.yarn.server.api.ApplicationContext;
|
||||
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Unstable
|
||||
public interface ApplicationHistoryManager extends ApplicationContext {
|
||||
}
|
|
@ -0,0 +1,222 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.service.AbstractService;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
public class ApplicationHistoryManagerImpl extends AbstractService implements
|
||||
ApplicationHistoryManager {
|
||||
private static final Log LOG = LogFactory
|
||||
.getLog(ApplicationHistoryManagerImpl.class);
|
||||
private static final String UNAVAILABLE = "N/A";
|
||||
|
||||
private ApplicationHistoryStore historyStore;
|
||||
|
||||
public ApplicationHistoryManagerImpl() {
|
||||
super(ApplicationHistoryManagerImpl.class.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void serviceInit(Configuration conf) throws Exception {
|
||||
LOG.info("ApplicationHistory Init");
|
||||
historyStore = createApplicationHistoryStore(conf);
|
||||
historyStore.init(conf);
|
||||
super.serviceInit(conf);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void serviceStart() throws Exception {
|
||||
LOG.info("Starting ApplicationHistory");
|
||||
historyStore.start();
|
||||
super.serviceStart();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void serviceStop() throws Exception {
|
||||
LOG.info("Stopping ApplicationHistory");
|
||||
historyStore.stop();
|
||||
super.serviceStop();
|
||||
}
|
||||
|
||||
protected ApplicationHistoryStore createApplicationHistoryStore(
|
||||
Configuration conf) {
|
||||
return ReflectionUtils.newInstance(conf.getClass(
|
||||
YarnConfiguration.AHS_STORE, FileSystemApplicationHistoryStore.class,
|
||||
ApplicationHistoryStore.class), conf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerReport getAMContainer(ApplicationAttemptId appAttemptId)
|
||||
throws IOException {
|
||||
return convertToContainerReport(historyStore.getAMContainer(appAttemptId));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ApplicationId, ApplicationReport> getAllApplications()
|
||||
throws IOException {
|
||||
Map<ApplicationId, ApplicationHistoryData> histData =
|
||||
historyStore.getAllApplications();
|
||||
HashMap<ApplicationId, ApplicationReport> applicationsReport =
|
||||
new HashMap<ApplicationId, ApplicationReport>();
|
||||
for (Entry<ApplicationId, ApplicationHistoryData> entry : histData
|
||||
.entrySet()) {
|
||||
applicationsReport.put(entry.getKey(),
|
||||
convertToApplicationReport(entry.getValue()));
|
||||
}
|
||||
return applicationsReport;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationReport getApplication(ApplicationId appId)
|
||||
throws IOException {
|
||||
return convertToApplicationReport(historyStore.getApplication(appId));
|
||||
}
|
||||
|
||||
private ApplicationReport convertToApplicationReport(
|
||||
ApplicationHistoryData appHistory) throws IOException {
|
||||
ApplicationAttemptId currentApplicationAttemptId = null;
|
||||
String trackingUrl = UNAVAILABLE;
|
||||
String host = UNAVAILABLE;
|
||||
int rpcPort = -1;
|
||||
|
||||
ApplicationAttemptHistoryData lastAttempt =
|
||||
getLastAttempt(appHistory.getApplicationId());
|
||||
if (lastAttempt != null) {
|
||||
currentApplicationAttemptId = lastAttempt.getApplicationAttemptId();
|
||||
trackingUrl = lastAttempt.getTrackingURL();
|
||||
host = lastAttempt.getHost();
|
||||
rpcPort = lastAttempt.getRPCPort();
|
||||
}
|
||||
return ApplicationReport.newInstance(appHistory.getApplicationId(),
|
||||
currentApplicationAttemptId, appHistory.getUser(), appHistory.getQueue(),
|
||||
appHistory.getApplicationName(), host, rpcPort, null,
|
||||
appHistory.getYarnApplicationState(), appHistory.getDiagnosticsInfo(),
|
||||
trackingUrl, appHistory.getStartTime(), appHistory.getFinishTime(),
|
||||
appHistory.getFinalApplicationStatus(), null, "", 100,
|
||||
appHistory.getApplicationType(), null);
|
||||
}
|
||||
|
||||
private ApplicationAttemptHistoryData getLastAttempt(ApplicationId appId)
|
||||
throws IOException {
|
||||
Map<ApplicationAttemptId, ApplicationAttemptHistoryData> attempts =
|
||||
historyStore.getApplicationAttempts(appId);
|
||||
ApplicationAttemptId prevMaxAttemptId = null;
|
||||
for (ApplicationAttemptId attemptId : attempts.keySet()) {
|
||||
if (prevMaxAttemptId == null) {
|
||||
prevMaxAttemptId = attemptId;
|
||||
} else {
|
||||
if (prevMaxAttemptId.getAttemptId() < attemptId.getAttemptId()) {
|
||||
prevMaxAttemptId = attemptId;
|
||||
}
|
||||
}
|
||||
}
|
||||
return attempts.get(prevMaxAttemptId);
|
||||
}
|
||||
|
||||
private ApplicationAttemptReport convertToApplicationAttemptReport(
|
||||
ApplicationAttemptHistoryData appAttemptHistory) {
|
||||
return ApplicationAttemptReport.newInstance(
|
||||
appAttemptHistory.getApplicationAttemptId(), appAttemptHistory.getHost(),
|
||||
appAttemptHistory.getRPCPort(), appAttemptHistory.getTrackingURL(),
|
||||
appAttemptHistory.getDiagnosticsInfo(),
|
||||
appAttemptHistory.getYarnApplicationAttemptState(),
|
||||
appAttemptHistory.getMasterContainerId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptReport getApplicationAttempt(
|
||||
ApplicationAttemptId appAttemptId) throws IOException {
|
||||
return convertToApplicationAttemptReport(historyStore
|
||||
.getApplicationAttempt(appAttemptId));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ApplicationAttemptId, ApplicationAttemptReport>
|
||||
getApplicationAttempts(ApplicationId appId) throws IOException {
|
||||
Map<ApplicationAttemptId, ApplicationAttemptHistoryData> histData =
|
||||
historyStore.getApplicationAttempts(appId);
|
||||
HashMap<ApplicationAttemptId, ApplicationAttemptReport> applicationAttemptsReport =
|
||||
new HashMap<ApplicationAttemptId, ApplicationAttemptReport>();
|
||||
for (Entry<ApplicationAttemptId, ApplicationAttemptHistoryData> entry : histData
|
||||
.entrySet()) {
|
||||
applicationAttemptsReport.put(entry.getKey(),
|
||||
convertToApplicationAttemptReport(entry.getValue()));
|
||||
}
|
||||
return applicationAttemptsReport;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerReport getContainer(ContainerId containerId)
|
||||
throws IOException {
|
||||
return convertToContainerReport(historyStore.getContainer(containerId));
|
||||
}
|
||||
|
||||
private ContainerReport convertToContainerReport(
|
||||
ContainerHistoryData containerHistory) {
|
||||
return ContainerReport.newInstance(containerHistory.getContainerId(),
|
||||
containerHistory.getAllocatedResource(),
|
||||
containerHistory.getAssignedNode(), containerHistory.getPriority(),
|
||||
containerHistory.getStartTime(), containerHistory.getFinishTime(),
|
||||
containerHistory.getDiagnosticsInfo(), containerHistory.getLogURL(),
|
||||
containerHistory.getContainerExitStatus(),
|
||||
containerHistory.getContainerState());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ContainerId, ContainerReport> getContainers(
|
||||
ApplicationAttemptId appAttemptId) throws IOException {
|
||||
Map<ContainerId, ContainerHistoryData> histData =
|
||||
historyStore.getContainers(appAttemptId);
|
||||
HashMap<ContainerId, ContainerReport> containersReport =
|
||||
new HashMap<ContainerId, ContainerReport>();
|
||||
for (Entry<ContainerId, ContainerHistoryData> entry : histData.entrySet()) {
|
||||
containersReport.put(entry.getKey(),
|
||||
convertToContainerReport(entry.getValue()));
|
||||
}
|
||||
return containersReport;
|
||||
}
|
||||
|
||||
@Private
|
||||
@VisibleForTesting
|
||||
public ApplicationHistoryStore getHistoryStore() {
|
||||
return this.historyStore;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,117 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
|
||||
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Unstable
|
||||
public interface ApplicationHistoryReader {
|
||||
|
||||
/**
|
||||
* This method returns Application {@link ApplicationHistoryData} for the
|
||||
* specified {@link ApplicationId}.
|
||||
*
|
||||
* @param appId
|
||||
*
|
||||
* @return {@link ApplicationHistoryData} for the ApplicationId.
|
||||
* @throws IOException
|
||||
*/
|
||||
ApplicationHistoryData getApplication(ApplicationId appId) throws IOException;
|
||||
|
||||
/**
|
||||
* This method returns all Application {@link ApplicationHistoryData}s
|
||||
*
|
||||
* @return map of {@link ApplicationId} to {@link ApplicationHistoryData}s.
|
||||
* @throws IOException
|
||||
*/
|
||||
Map<ApplicationId, ApplicationHistoryData> getAllApplications()
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Application can have multiple application attempts
|
||||
* {@link ApplicationAttemptHistoryData}. This method returns the all
|
||||
* {@link ApplicationAttemptHistoryData}s for the Application.
|
||||
*
|
||||
* @param appId
|
||||
*
|
||||
* @return all {@link ApplicationAttemptHistoryData}s for the Application.
|
||||
* @throws IOException
|
||||
*/
|
||||
Map<ApplicationAttemptId, ApplicationAttemptHistoryData>
|
||||
getApplicationAttempts(ApplicationId appId) throws IOException;
|
||||
|
||||
/**
|
||||
* This method returns {@link ApplicationAttemptHistoryData} for specified
|
||||
* {@link ApplicationId}.
|
||||
*
|
||||
* @param appAttemptId
|
||||
* {@link ApplicationAttemptId}
|
||||
* @return {@link ApplicationAttemptHistoryData} for ApplicationAttemptId
|
||||
* @throws IOException
|
||||
*/
|
||||
ApplicationAttemptHistoryData getApplicationAttempt(
|
||||
ApplicationAttemptId appAttemptId) throws IOException;
|
||||
|
||||
/**
|
||||
* This method returns {@link ContainerHistoryData} for specified
|
||||
* {@link ContainerId}.
|
||||
*
|
||||
* @param containerId
|
||||
* {@link ContainerId}
|
||||
* @return {@link ContainerHistoryData} for ContainerId
|
||||
* @throws IOException
|
||||
*/
|
||||
ContainerHistoryData getContainer(ContainerId containerId) throws IOException;
|
||||
|
||||
/**
|
||||
* This method returns {@link ContainerHistoryData} for specified
|
||||
* {@link ApplicationAttemptId}.
|
||||
*
|
||||
* @param appAttemptId
|
||||
* {@link ApplicationAttemptId}
|
||||
* @return {@link ContainerHistoryData} for ApplicationAttemptId
|
||||
* @throws IOException
|
||||
*/
|
||||
ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* This method returns Map{@link ContainerId} to {@link ContainerHistoryData}
|
||||
* for specified {@link ApplicationAttemptId}.
|
||||
*
|
||||
* @param appAttemptId
|
||||
* {@link ApplicationAttemptId}
|
||||
* @return Map{@link ContainerId} to {@link ContainerHistoryData} for
|
||||
* ApplicationAttemptId
|
||||
* @throws IOException
|
||||
*/
|
||||
Map<ContainerId, ContainerHistoryData> getContainers(
|
||||
ApplicationAttemptId appAttemptId) throws IOException;
|
||||
}
|
|
@ -0,0 +1,159 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
|
||||
import org.apache.hadoop.metrics2.source.JvmMetrics;
|
||||
import org.apache.hadoop.service.CompositeService;
|
||||
import org.apache.hadoop.service.Service;
|
||||
import org.apache.hadoop.util.ExitUtil;
|
||||
import org.apache.hadoop.util.ShutdownHookManager;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.yarn.YarnUncaughtExceptionHandler;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSWebApp;
|
||||
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||
import org.apache.hadoop.yarn.webapp.WebApps;
|
||||
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
/**
|
||||
* History server that keeps track of all types of history in the cluster.
|
||||
* Application specific history to start with.
|
||||
*/
|
||||
public class ApplicationHistoryServer extends CompositeService {
|
||||
|
||||
public static final int SHUTDOWN_HOOK_PRIORITY = 30;
|
||||
private static final Log LOG = LogFactory
|
||||
.getLog(ApplicationHistoryServer.class);
|
||||
|
||||
ApplicationHistoryClientService ahsClientService;
|
||||
ApplicationHistoryManager historyManager;
|
||||
private WebApp webApp;
|
||||
|
||||
public ApplicationHistoryServer() {
|
||||
super(ApplicationHistoryServer.class.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void serviceInit(Configuration conf) throws Exception {
|
||||
historyManager = createApplicationHistory();
|
||||
ahsClientService = createApplicationHistoryClientService(historyManager);
|
||||
addService(ahsClientService);
|
||||
addService((Service) historyManager);
|
||||
super.serviceInit(conf);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void serviceStart() throws Exception {
|
||||
DefaultMetricsSystem.initialize("ApplicationHistoryServer");
|
||||
JvmMetrics.initSingleton("ApplicationHistoryServer", null);
|
||||
|
||||
startWebApp();
|
||||
super.serviceStart();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void serviceStop() throws Exception {
|
||||
if (webApp != null) {
|
||||
webApp.stop();
|
||||
}
|
||||
|
||||
DefaultMetricsSystem.shutdown();
|
||||
super.serviceStop();
|
||||
}
|
||||
|
||||
@Private
|
||||
@VisibleForTesting
|
||||
public ApplicationHistoryClientService getClientService() {
|
||||
return this.ahsClientService;
|
||||
}
|
||||
|
||||
protected ApplicationHistoryClientService
|
||||
createApplicationHistoryClientService(
|
||||
ApplicationHistoryManager historyManager) {
|
||||
return new ApplicationHistoryClientService(historyManager);
|
||||
}
|
||||
|
||||
protected ApplicationHistoryManager createApplicationHistory() {
|
||||
return new ApplicationHistoryManagerImpl();
|
||||
}
|
||||
|
||||
protected ApplicationHistoryManager getApplicationHistory() {
|
||||
return this.historyManager;
|
||||
}
|
||||
|
||||
static ApplicationHistoryServer launchAppHistoryServer(String[] args) {
|
||||
Thread
|
||||
.setDefaultUncaughtExceptionHandler(new YarnUncaughtExceptionHandler());
|
||||
StringUtils.startupShutdownMessage(ApplicationHistoryServer.class, args,
|
||||
LOG);
|
||||
ApplicationHistoryServer appHistoryServer = null;
|
||||
try {
|
||||
appHistoryServer = new ApplicationHistoryServer();
|
||||
ShutdownHookManager.get().addShutdownHook(
|
||||
new CompositeServiceShutdownHook(appHistoryServer),
|
||||
SHUTDOWN_HOOK_PRIORITY);
|
||||
YarnConfiguration conf = new YarnConfiguration();
|
||||
appHistoryServer.init(conf);
|
||||
appHistoryServer.start();
|
||||
} catch (Throwable t) {
|
||||
LOG.fatal("Error starting ApplicationHistoryServer", t);
|
||||
ExitUtil.terminate(-1, "Error starting ApplicationHistoryServer");
|
||||
}
|
||||
return appHistoryServer;
|
||||
}
|
||||
|
||||
public static void main(String[] args) {
|
||||
launchAppHistoryServer(args);
|
||||
}
|
||||
|
||||
protected ApplicationHistoryManager createApplicationHistoryManager(
|
||||
Configuration conf) {
|
||||
return new ApplicationHistoryManagerImpl();
|
||||
}
|
||||
|
||||
protected void startWebApp() {
|
||||
String bindAddress = WebAppUtils.getAHSWebAppURLWithoutScheme(getConfig());
|
||||
LOG.info("Instantiating AHSWebApp at " + bindAddress);
|
||||
try {
|
||||
webApp =
|
||||
WebApps
|
||||
.$for("applicationhistory", ApplicationHistoryClientService.class,
|
||||
ahsClientService, "ws")
|
||||
.with(getConfig())
|
||||
.withHttpSpnegoPrincipalKey(
|
||||
YarnConfiguration.AHS_WEBAPP_SPNEGO_USER_NAME_KEY)
|
||||
.withHttpSpnegoKeytabKey(
|
||||
YarnConfiguration.AHS_WEBAPP_SPNEGO_KEYTAB_FILE_KEY)
|
||||
.at(bindAddress).start(new AHSWebApp(historyManager));
|
||||
} catch (Exception e) {
|
||||
String msg = "AHSWebApp failed to start.";
|
||||
LOG.error(msg, e);
|
||||
throw new YarnRuntimeException(msg, e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.service.Service;
|
||||
|
||||
/**
|
||||
* This class is the abstract of the storage of the application history data. It
|
||||
* is a {@link Service}, such that the implementation of this class can make use
|
||||
* of the service life cycle to initialize and cleanup the storage. Users can
|
||||
* access the storage via {@link ApplicationHistoryReader} and
|
||||
* {@link ApplicationHistoryWriter} interfaces.
|
||||
*
|
||||
*/
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Unstable
|
||||
public interface ApplicationHistoryStore extends Service,
|
||||
ApplicationHistoryReader, ApplicationHistoryWriter {
|
||||
}
|
|
@ -0,0 +1,112 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
|
||||
|
||||
/**
|
||||
* It is the interface of writing the application history, exposing the methods
|
||||
* of writing {@link ApplicationStartData}, {@link ApplicationFinishData}
|
||||
* {@link ApplicationAttemptStartData}, {@link ApplicationAttemptFinishData},
|
||||
* {@link ContainerStartData} and {@link ContainerFinishData}.
|
||||
*/
|
||||
@Private
|
||||
@Unstable
|
||||
public interface ApplicationHistoryWriter {
|
||||
|
||||
/**
|
||||
* This method writes the information of <code>RMApp</code> that is available
|
||||
* when it starts.
|
||||
*
|
||||
* @param appStart
|
||||
* the record of the information of <code>RMApp</code> that is
|
||||
* available when it starts
|
||||
* @throws IOException
|
||||
*/
|
||||
void applicationStarted(ApplicationStartData appStart) throws IOException;
|
||||
|
||||
/**
|
||||
* This method writes the information of <code>RMApp</code> that is available
|
||||
* when it finishes.
|
||||
*
|
||||
* @param appFinish
|
||||
* the record of the information of <code>RMApp</code> that is
|
||||
* available when it finishes
|
||||
* @throws IOException
|
||||
*/
|
||||
void applicationFinished(ApplicationFinishData appFinish) throws IOException;
|
||||
|
||||
/**
|
||||
* This method writes the information of <code>RMAppAttempt</code> that is
|
||||
* available when it starts.
|
||||
*
|
||||
* @param appAttemptStart
|
||||
* the record of the information of <code>RMAppAttempt</code> that is
|
||||
* available when it starts
|
||||
* @throws IOException
|
||||
*/
|
||||
void applicationAttemptStarted(ApplicationAttemptStartData appAttemptStart)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* This method writes the information of <code>RMAppAttempt</code> that is
|
||||
* available when it finishes.
|
||||
*
|
||||
* @param appAttemptFinish
|
||||
* the record of the information of <code>RMAppAttempt</code> that is
|
||||
* available when it finishes
|
||||
* @throws IOException
|
||||
*/
|
||||
void
|
||||
applicationAttemptFinished(ApplicationAttemptFinishData appAttemptFinish)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* This method writes the information of <code>RMContainer</code> that is
|
||||
* available when it starts.
|
||||
*
|
||||
* @param containerStart
|
||||
* the record of the information of <code>RMContainer</code> that is
|
||||
* available when it starts
|
||||
* @throws IOException
|
||||
*/
|
||||
void containerStarted(ContainerStartData containerStart) throws IOException;
|
||||
|
||||
/**
|
||||
* This method writes the information of <code>RMContainer</code> that is
|
||||
* available when it finishes.
|
||||
*
|
||||
* @param containerFinish
|
||||
* the record of the information of <code>RMContainer</code> that is
|
||||
* available when it finishes
|
||||
* @throws IOException
|
||||
*/
|
||||
void containerFinished(ContainerFinishData containerFinish)
|
||||
throws IOException;
|
||||
|
||||
}
|
|
@ -0,0 +1,841 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataInputStream;
|
||||
import java.io.DataOutput;
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.io.file.tfile.TFile;
|
||||
import org.apache.hadoop.service.AbstractService;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptFinishDataPBImpl;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationAttemptStartDataPBImpl;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationFinishDataPBImpl;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ApplicationStartDataPBImpl;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ContainerFinishDataPBImpl;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb.ContainerStartDataPBImpl;
|
||||
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||
|
||||
import com.google.protobuf.InvalidProtocolBufferException;
|
||||
|
||||
/**
|
||||
* File system implementation of {@link ApplicationHistoryStore}. In this
|
||||
* implementation, one application will have just one file in the file system,
|
||||
* which contains all the history data of one application, and its attempts and
|
||||
* containers. {@link #applicationStarted(ApplicationStartData)} is supposed to
|
||||
* be invoked first when writing any history data of one application and it will
|
||||
* open a file, while {@link #applicationFinished(ApplicationFinishData)} is
|
||||
* supposed to be last writing operation and will close the file.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public class FileSystemApplicationHistoryStore extends AbstractService
|
||||
implements ApplicationHistoryStore {
|
||||
|
||||
private static final Log LOG = LogFactory
|
||||
.getLog(FileSystemApplicationHistoryStore.class);
|
||||
|
||||
private static final String ROOT_DIR_NAME = "ApplicationHistoryDataRoot";
|
||||
private static final int MIN_BLOCK_SIZE = 256 * 1024;
|
||||
private static final String START_DATA_SUFFIX = "_start";
|
||||
private static final String FINISH_DATA_SUFFIX = "_finish";
|
||||
private static final FsPermission ROOT_DIR_UMASK = FsPermission
|
||||
.createImmutable((short) 0740);
|
||||
private static final FsPermission HISTORY_FILE_UMASK = FsPermission
|
||||
.createImmutable((short) 0640);
|
||||
|
||||
private FileSystem fs;
|
||||
private Path rootDirPath;
|
||||
|
||||
private ConcurrentMap<ApplicationId, HistoryFileWriter> outstandingWriters =
|
||||
new ConcurrentHashMap<ApplicationId, HistoryFileWriter>();
|
||||
|
||||
public FileSystemApplicationHistoryStore() {
|
||||
super(FileSystemApplicationHistoryStore.class.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serviceInit(Configuration conf) throws Exception {
|
||||
Path fsWorkingPath =
|
||||
new Path(conf.get(YarnConfiguration.FS_HISTORY_STORE_URI));
|
||||
rootDirPath = new Path(fsWorkingPath, ROOT_DIR_NAME);
|
||||
try {
|
||||
fs = fsWorkingPath.getFileSystem(conf);
|
||||
fs.mkdirs(rootDirPath);
|
||||
fs.setPermission(rootDirPath, ROOT_DIR_UMASK);
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error when initializing FileSystemHistoryStorage", e);
|
||||
throw e;
|
||||
}
|
||||
super.serviceInit(conf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void serviceStop() throws Exception {
|
||||
try {
|
||||
for (Entry<ApplicationId, HistoryFileWriter> entry : outstandingWriters
|
||||
.entrySet()) {
|
||||
entry.getValue().close();
|
||||
}
|
||||
outstandingWriters.clear();
|
||||
} finally {
|
||||
IOUtils.cleanup(LOG, fs);
|
||||
}
|
||||
super.serviceStop();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationHistoryData getApplication(ApplicationId appId)
|
||||
throws IOException {
|
||||
HistoryFileReader hfReader = getHistoryFileReader(appId);
|
||||
try {
|
||||
boolean readStartData = false;
|
||||
boolean readFinishData = false;
|
||||
ApplicationHistoryData historyData =
|
||||
ApplicationHistoryData.newInstance(appId, null, null, null, null,
|
||||
Long.MIN_VALUE, Long.MIN_VALUE, Long.MAX_VALUE, null,
|
||||
FinalApplicationStatus.UNDEFINED, null);
|
||||
while ((!readStartData || !readFinishData) && hfReader.hasNext()) {
|
||||
HistoryFileReader.Entry entry = hfReader.next();
|
||||
if (entry.key.id.equals(appId.toString())) {
|
||||
if (entry.key.suffix.equals(START_DATA_SUFFIX)) {
|
||||
ApplicationStartData startData =
|
||||
parseApplicationStartData(entry.value);
|
||||
mergeApplicationHistoryData(historyData, startData);
|
||||
readStartData = true;
|
||||
} else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) {
|
||||
ApplicationFinishData finishData =
|
||||
parseApplicationFinishData(entry.value);
|
||||
mergeApplicationHistoryData(historyData, finishData);
|
||||
readFinishData = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!readStartData && !readFinishData) {
|
||||
return null;
|
||||
}
|
||||
if (!readStartData) {
|
||||
LOG.warn("Start information is missing for application " + appId);
|
||||
}
|
||||
if (!readFinishData) {
|
||||
LOG.warn("Finish information is missing for application " + appId);
|
||||
}
|
||||
LOG.info("Completed reading history information of application " + appId);
|
||||
return historyData;
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error when reading history file of application " + appId);
|
||||
throw e;
|
||||
} finally {
|
||||
hfReader.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ApplicationId, ApplicationHistoryData> getAllApplications()
|
||||
throws IOException {
|
||||
Map<ApplicationId, ApplicationHistoryData> historyDataMap =
|
||||
new HashMap<ApplicationId, ApplicationHistoryData>();
|
||||
FileStatus[] files = fs.listStatus(rootDirPath);
|
||||
for (FileStatus file : files) {
|
||||
ApplicationId appId =
|
||||
ConverterUtils.toApplicationId(file.getPath().getName());
|
||||
try {
|
||||
ApplicationHistoryData historyData = getApplication(appId);
|
||||
if (historyData != null) {
|
||||
historyDataMap.put(appId, historyData);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Eat the exception not to disturb the getting the next
|
||||
// ApplicationHistoryData
|
||||
LOG.error("History information of application " + appId
|
||||
+ " is not included into the result due to the exception", e);
|
||||
}
|
||||
}
|
||||
return historyDataMap;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ApplicationAttemptId, ApplicationAttemptHistoryData>
|
||||
getApplicationAttempts(ApplicationId appId) throws IOException {
|
||||
Map<ApplicationAttemptId, ApplicationAttemptHistoryData> historyDataMap =
|
||||
new HashMap<ApplicationAttemptId, ApplicationAttemptHistoryData>();
|
||||
Map<ApplicationAttemptId, StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData>> startFinshDataMap =
|
||||
new HashMap<ApplicationAttemptId, StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData>>();
|
||||
HistoryFileReader hfReader = getHistoryFileReader(appId);
|
||||
try {
|
||||
while (hfReader.hasNext()) {
|
||||
HistoryFileReader.Entry entry = hfReader.next();
|
||||
if (entry.key.id.startsWith(ConverterUtils.APPLICATION_ATTEMPT_PREFIX)) {
|
||||
if (entry.key.suffix.equals(START_DATA_SUFFIX)) {
|
||||
retrieveStartFinishData(appId, entry, startFinshDataMap, true);
|
||||
} else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) {
|
||||
retrieveStartFinishData(appId, entry, startFinshDataMap, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
LOG.info("Completed reading history information of all application"
|
||||
+ " attempts of application " + appId);
|
||||
} catch (IOException e) {
|
||||
LOG.info("Error when reading history information of some application"
|
||||
+ " attempts of application " + appId);
|
||||
} finally {
|
||||
hfReader.close();
|
||||
}
|
||||
for (Map.Entry<ApplicationAttemptId, StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData>> entry : startFinshDataMap
|
||||
.entrySet()) {
|
||||
ApplicationAttemptHistoryData historyData =
|
||||
ApplicationAttemptHistoryData.newInstance(entry.getKey(), null, -1,
|
||||
null, null, null, FinalApplicationStatus.UNDEFINED, null);
|
||||
mergeApplicationAttemptHistoryData(historyData,
|
||||
entry.getValue().startData);
|
||||
mergeApplicationAttemptHistoryData(historyData,
|
||||
entry.getValue().finishData);
|
||||
historyDataMap.put(entry.getKey(), historyData);
|
||||
}
|
||||
return historyDataMap;
|
||||
}
|
||||
|
||||
private
|
||||
void
|
||||
retrieveStartFinishData(
|
||||
ApplicationId appId,
|
||||
HistoryFileReader.Entry entry,
|
||||
Map<ApplicationAttemptId, StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData>> startFinshDataMap,
|
||||
boolean start) throws IOException {
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ConverterUtils.toApplicationAttemptId(entry.key.id);
|
||||
if (appAttemptId.getApplicationId().equals(appId)) {
|
||||
StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData> pair =
|
||||
startFinshDataMap.get(appAttemptId);
|
||||
if (pair == null) {
|
||||
pair =
|
||||
new StartFinishDataPair<ApplicationAttemptStartData, ApplicationAttemptFinishData>();
|
||||
startFinshDataMap.put(appAttemptId, pair);
|
||||
}
|
||||
if (start) {
|
||||
pair.startData = parseApplicationAttemptStartData(entry.value);
|
||||
} else {
|
||||
pair.finishData = parseApplicationAttemptFinishData(entry.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptHistoryData getApplicationAttempt(
|
||||
ApplicationAttemptId appAttemptId) throws IOException {
|
||||
HistoryFileReader hfReader =
|
||||
getHistoryFileReader(appAttemptId.getApplicationId());
|
||||
try {
|
||||
boolean readStartData = false;
|
||||
boolean readFinishData = false;
|
||||
ApplicationAttemptHistoryData historyData =
|
||||
ApplicationAttemptHistoryData.newInstance(appAttemptId, null, -1,
|
||||
null, null, null, FinalApplicationStatus.UNDEFINED, null);
|
||||
while ((!readStartData || !readFinishData) && hfReader.hasNext()) {
|
||||
HistoryFileReader.Entry entry = hfReader.next();
|
||||
if (entry.key.id.equals(appAttemptId.toString())) {
|
||||
if (entry.key.suffix.equals(START_DATA_SUFFIX)) {
|
||||
ApplicationAttemptStartData startData =
|
||||
parseApplicationAttemptStartData(entry.value);
|
||||
mergeApplicationAttemptHistoryData(historyData, startData);
|
||||
readStartData = true;
|
||||
} else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) {
|
||||
ApplicationAttemptFinishData finishData =
|
||||
parseApplicationAttemptFinishData(entry.value);
|
||||
mergeApplicationAttemptHistoryData(historyData, finishData);
|
||||
readFinishData = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!readStartData && !readFinishData) {
|
||||
return null;
|
||||
}
|
||||
if (!readStartData) {
|
||||
LOG.warn("Start information is missing for application attempt "
|
||||
+ appAttemptId);
|
||||
}
|
||||
if (!readFinishData) {
|
||||
LOG.warn("Finish information is missing for application attempt "
|
||||
+ appAttemptId);
|
||||
}
|
||||
LOG.info("Completed reading history information of application attempt "
|
||||
+ appAttemptId);
|
||||
return historyData;
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error when reading history file of application attempt"
|
||||
+ appAttemptId);
|
||||
throw e;
|
||||
} finally {
|
||||
hfReader.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerHistoryData getContainer(ContainerId containerId)
|
||||
throws IOException {
|
||||
HistoryFileReader hfReader =
|
||||
getHistoryFileReader(containerId.getApplicationAttemptId()
|
||||
.getApplicationId());
|
||||
try {
|
||||
boolean readStartData = false;
|
||||
boolean readFinishData = false;
|
||||
ContainerHistoryData historyData =
|
||||
ContainerHistoryData
|
||||
.newInstance(containerId, null, null, null, Long.MIN_VALUE,
|
||||
Long.MAX_VALUE, null, null, Integer.MAX_VALUE, null);
|
||||
while ((!readStartData || !readFinishData) && hfReader.hasNext()) {
|
||||
HistoryFileReader.Entry entry = hfReader.next();
|
||||
if (entry.key.id.equals(containerId.toString())) {
|
||||
if (entry.key.suffix.equals(START_DATA_SUFFIX)) {
|
||||
ContainerStartData startData = parseContainerStartData(entry.value);
|
||||
mergeContainerHistoryData(historyData, startData);
|
||||
readStartData = true;
|
||||
} else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) {
|
||||
ContainerFinishData finishData =
|
||||
parseContainerFinishData(entry.value);
|
||||
mergeContainerHistoryData(historyData, finishData);
|
||||
readFinishData = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!readStartData && !readFinishData) {
|
||||
return null;
|
||||
}
|
||||
if (!readStartData) {
|
||||
LOG.warn("Start information is missing for container " + containerId);
|
||||
}
|
||||
if (!readFinishData) {
|
||||
LOG.warn("Finish information is missing for container " + containerId);
|
||||
}
|
||||
LOG.info("Completed reading history information of container "
|
||||
+ containerId);
|
||||
return historyData;
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error when reading history file of container " + containerId);
|
||||
throw e;
|
||||
} finally {
|
||||
hfReader.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId)
|
||||
throws IOException {
|
||||
ApplicationAttemptHistoryData attemptHistoryData =
|
||||
getApplicationAttempt(appAttemptId);
|
||||
if (attemptHistoryData == null
|
||||
|| attemptHistoryData.getMasterContainerId() == null) {
|
||||
return null;
|
||||
}
|
||||
return getContainer(attemptHistoryData.getMasterContainerId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ContainerId, ContainerHistoryData> getContainers(
|
||||
ApplicationAttemptId appAttemptId) throws IOException {
|
||||
Map<ContainerId, ContainerHistoryData> historyDataMap =
|
||||
new HashMap<ContainerId, ContainerHistoryData>();
|
||||
Map<ContainerId, StartFinishDataPair<ContainerStartData, ContainerFinishData>> startFinshDataMap =
|
||||
new HashMap<ContainerId, StartFinishDataPair<ContainerStartData, ContainerFinishData>>();
|
||||
HistoryFileReader hfReader =
|
||||
getHistoryFileReader(appAttemptId.getApplicationId());
|
||||
try {
|
||||
while (hfReader.hasNext()) {
|
||||
HistoryFileReader.Entry entry = hfReader.next();
|
||||
if (entry.key.id.startsWith(ConverterUtils.CONTAINER_PREFIX)) {
|
||||
if (entry.key.suffix.equals(START_DATA_SUFFIX)) {
|
||||
retrieveStartFinishData(appAttemptId, entry, startFinshDataMap,
|
||||
true);
|
||||
} else if (entry.key.suffix.equals(FINISH_DATA_SUFFIX)) {
|
||||
retrieveStartFinishData(appAttemptId, entry, startFinshDataMap,
|
||||
false);
|
||||
}
|
||||
}
|
||||
}
|
||||
LOG.info("Completed reading history information of all conatiners"
|
||||
+ " of application attempt " + appAttemptId);
|
||||
} catch (IOException e) {
|
||||
LOG.info("Error when reading history information of some containers"
|
||||
+ " of application attempt " + appAttemptId);
|
||||
} finally {
|
||||
hfReader.close();
|
||||
}
|
||||
for (Map.Entry<ContainerId, StartFinishDataPair<ContainerStartData, ContainerFinishData>> entry : startFinshDataMap
|
||||
.entrySet()) {
|
||||
ContainerHistoryData historyData =
|
||||
ContainerHistoryData
|
||||
.newInstance(entry.getKey(), null, null, null, Long.MIN_VALUE,
|
||||
Long.MAX_VALUE, null, null, Integer.MAX_VALUE, null);
|
||||
mergeContainerHistoryData(historyData, entry.getValue().startData);
|
||||
mergeContainerHistoryData(historyData, entry.getValue().finishData);
|
||||
historyDataMap.put(entry.getKey(), historyData);
|
||||
}
|
||||
return historyDataMap;
|
||||
}
|
||||
|
||||
private
|
||||
void
|
||||
retrieveStartFinishData(
|
||||
ApplicationAttemptId appAttemptId,
|
||||
HistoryFileReader.Entry entry,
|
||||
Map<ContainerId, StartFinishDataPair<ContainerStartData, ContainerFinishData>> startFinshDataMap,
|
||||
boolean start) throws IOException {
|
||||
ContainerId containerId = ConverterUtils.toContainerId(entry.key.id);
|
||||
if (containerId.getApplicationAttemptId().equals(appAttemptId)) {
|
||||
StartFinishDataPair<ContainerStartData, ContainerFinishData> pair =
|
||||
startFinshDataMap.get(containerId);
|
||||
if (pair == null) {
|
||||
pair =
|
||||
new StartFinishDataPair<ContainerStartData, ContainerFinishData>();
|
||||
startFinshDataMap.put(containerId, pair);
|
||||
}
|
||||
if (start) {
|
||||
pair.startData = parseContainerStartData(entry.value);
|
||||
} else {
|
||||
pair.finishData = parseContainerFinishData(entry.value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationStarted(ApplicationStartData appStart)
|
||||
throws IOException {
|
||||
HistoryFileWriter hfWriter =
|
||||
outstandingWriters.get(appStart.getApplicationId());
|
||||
if (hfWriter == null) {
|
||||
Path applicationHistoryFile =
|
||||
new Path(rootDirPath, appStart.getApplicationId().toString());
|
||||
try {
|
||||
hfWriter = new HistoryFileWriter(applicationHistoryFile);
|
||||
LOG.info("Opened history file of application "
|
||||
+ appStart.getApplicationId());
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error when openning history file of application "
|
||||
+ appStart.getApplicationId());
|
||||
throw e;
|
||||
}
|
||||
outstandingWriters.put(appStart.getApplicationId(), hfWriter);
|
||||
} else {
|
||||
throw new IOException("History file of application "
|
||||
+ appStart.getApplicationId() + " is already opened");
|
||||
}
|
||||
assert appStart instanceof ApplicationStartDataPBImpl;
|
||||
try {
|
||||
hfWriter.writeHistoryData(new HistoryDataKey(appStart.getApplicationId()
|
||||
.toString(), START_DATA_SUFFIX),
|
||||
((ApplicationStartDataPBImpl) appStart).getProto().toByteArray());
|
||||
LOG.info("Start information of application "
|
||||
+ appStart.getApplicationId() + " is written");
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error when writing start information of application "
|
||||
+ appStart.getApplicationId());
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationFinished(ApplicationFinishData appFinish)
|
||||
throws IOException {
|
||||
HistoryFileWriter hfWriter =
|
||||
getHistoryFileWriter(appFinish.getApplicationId());
|
||||
assert appFinish instanceof ApplicationFinishDataPBImpl;
|
||||
try {
|
||||
hfWriter.writeHistoryData(new HistoryDataKey(appFinish.getApplicationId()
|
||||
.toString(), FINISH_DATA_SUFFIX),
|
||||
((ApplicationFinishDataPBImpl) appFinish).getProto().toByteArray());
|
||||
LOG.info("Finish information of application "
|
||||
+ appFinish.getApplicationId() + " is written");
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error when writing finish information of application "
|
||||
+ appFinish.getApplicationId());
|
||||
throw e;
|
||||
} finally {
|
||||
hfWriter.close();
|
||||
outstandingWriters.remove(appFinish.getApplicationId());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationAttemptStarted(
|
||||
ApplicationAttemptStartData appAttemptStart) throws IOException {
|
||||
HistoryFileWriter hfWriter =
|
||||
getHistoryFileWriter(appAttemptStart.getApplicationAttemptId()
|
||||
.getApplicationId());
|
||||
assert appAttemptStart instanceof ApplicationAttemptStartDataPBImpl;
|
||||
try {
|
||||
hfWriter.writeHistoryData(new HistoryDataKey(appAttemptStart
|
||||
.getApplicationAttemptId().toString(), START_DATA_SUFFIX),
|
||||
((ApplicationAttemptStartDataPBImpl) appAttemptStart).getProto()
|
||||
.toByteArray());
|
||||
LOG.info("Start information of application attempt "
|
||||
+ appAttemptStart.getApplicationAttemptId() + " is written");
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error when writing start information of application attempt "
|
||||
+ appAttemptStart.getApplicationAttemptId());
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationAttemptFinished(
|
||||
ApplicationAttemptFinishData appAttemptFinish) throws IOException {
|
||||
HistoryFileWriter hfWriter =
|
||||
getHistoryFileWriter(appAttemptFinish.getApplicationAttemptId()
|
||||
.getApplicationId());
|
||||
assert appAttemptFinish instanceof ApplicationAttemptFinishDataPBImpl;
|
||||
try {
|
||||
hfWriter.writeHistoryData(new HistoryDataKey(appAttemptFinish
|
||||
.getApplicationAttemptId().toString(), FINISH_DATA_SUFFIX),
|
||||
((ApplicationAttemptFinishDataPBImpl) appAttemptFinish).getProto()
|
||||
.toByteArray());
|
||||
LOG.info("Finish information of application attempt "
|
||||
+ appAttemptFinish.getApplicationAttemptId() + " is written");
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error when writing finish information of application attempt "
|
||||
+ appAttemptFinish.getApplicationAttemptId());
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void containerStarted(ContainerStartData containerStart)
|
||||
throws IOException {
|
||||
HistoryFileWriter hfWriter =
|
||||
getHistoryFileWriter(containerStart.getContainerId()
|
||||
.getApplicationAttemptId().getApplicationId());
|
||||
assert containerStart instanceof ContainerStartDataPBImpl;
|
||||
try {
|
||||
hfWriter.writeHistoryData(new HistoryDataKey(containerStart
|
||||
.getContainerId().toString(), START_DATA_SUFFIX),
|
||||
((ContainerStartDataPBImpl) containerStart).getProto().toByteArray());
|
||||
LOG.info("Start information of container "
|
||||
+ containerStart.getContainerId() + " is written");
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error when writing start information of container "
|
||||
+ containerStart.getContainerId());
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void containerFinished(ContainerFinishData containerFinish)
|
||||
throws IOException {
|
||||
HistoryFileWriter hfWriter =
|
||||
getHistoryFileWriter(containerFinish.getContainerId()
|
||||
.getApplicationAttemptId().getApplicationId());
|
||||
assert containerFinish instanceof ContainerFinishDataPBImpl;
|
||||
try {
|
||||
hfWriter.writeHistoryData(new HistoryDataKey(containerFinish
|
||||
.getContainerId().toString(), FINISH_DATA_SUFFIX),
|
||||
((ContainerFinishDataPBImpl) containerFinish).getProto().toByteArray());
|
||||
LOG.info("Finish information of container "
|
||||
+ containerFinish.getContainerId() + " is written");
|
||||
} catch (IOException e) {
|
||||
LOG.error("Error when writing finish information of container "
|
||||
+ containerFinish.getContainerId());
|
||||
}
|
||||
}
|
||||
|
||||
private static ApplicationStartData parseApplicationStartData(byte[] value)
|
||||
throws InvalidProtocolBufferException {
|
||||
return new ApplicationStartDataPBImpl(
|
||||
ApplicationStartDataProto.parseFrom(value));
|
||||
}
|
||||
|
||||
private static ApplicationFinishData parseApplicationFinishData(byte[] value)
|
||||
throws InvalidProtocolBufferException {
|
||||
return new ApplicationFinishDataPBImpl(
|
||||
ApplicationFinishDataProto.parseFrom(value));
|
||||
}
|
||||
|
||||
private static ApplicationAttemptStartData parseApplicationAttemptStartData(
|
||||
byte[] value) throws InvalidProtocolBufferException {
|
||||
return new ApplicationAttemptStartDataPBImpl(
|
||||
ApplicationAttemptStartDataProto.parseFrom(value));
|
||||
}
|
||||
|
||||
private static ApplicationAttemptFinishData
|
||||
parseApplicationAttemptFinishData(byte[] value)
|
||||
throws InvalidProtocolBufferException {
|
||||
return new ApplicationAttemptFinishDataPBImpl(
|
||||
ApplicationAttemptFinishDataProto.parseFrom(value));
|
||||
}
|
||||
|
||||
private static ContainerStartData parseContainerStartData(byte[] value)
|
||||
throws InvalidProtocolBufferException {
|
||||
return new ContainerStartDataPBImpl(
|
||||
ContainerStartDataProto.parseFrom(value));
|
||||
}
|
||||
|
||||
private static ContainerFinishData parseContainerFinishData(byte[] value)
|
||||
throws InvalidProtocolBufferException {
|
||||
return new ContainerFinishDataPBImpl(
|
||||
ContainerFinishDataProto.parseFrom(value));
|
||||
}
|
||||
|
||||
private static void mergeApplicationHistoryData(
|
||||
ApplicationHistoryData historyData, ApplicationStartData startData) {
|
||||
historyData.setApplicationName(startData.getApplicationName());
|
||||
historyData.setApplicationType(startData.getApplicationType());
|
||||
historyData.setQueue(startData.getQueue());
|
||||
historyData.setUser(startData.getUser());
|
||||
historyData.setSubmitTime(startData.getSubmitTime());
|
||||
historyData.setStartTime(startData.getStartTime());
|
||||
}
|
||||
|
||||
private static void mergeApplicationHistoryData(
|
||||
ApplicationHistoryData historyData, ApplicationFinishData finishData) {
|
||||
historyData.setFinishTime(finishData.getFinishTime());
|
||||
historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo());
|
||||
historyData.setFinalApplicationStatus(finishData
|
||||
.getFinalApplicationStatus());
|
||||
historyData.setYarnApplicationState(finishData.getYarnApplicationState());
|
||||
}
|
||||
|
||||
private static void mergeApplicationAttemptHistoryData(
|
||||
ApplicationAttemptHistoryData historyData,
|
||||
ApplicationAttemptStartData startData) {
|
||||
historyData.setHost(startData.getHost());
|
||||
historyData.setRPCPort(startData.getRPCPort());
|
||||
historyData.setMasterContainerId(startData.getMasterContainerId());
|
||||
}
|
||||
|
||||
private static void mergeApplicationAttemptHistoryData(
|
||||
ApplicationAttemptHistoryData historyData,
|
||||
ApplicationAttemptFinishData finishData) {
|
||||
historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo());
|
||||
historyData.setTrackingURL(finishData.getTrackingURL());
|
||||
historyData.setFinalApplicationStatus(finishData
|
||||
.getFinalApplicationStatus());
|
||||
historyData.setYarnApplicationAttemptState(finishData
|
||||
.getYarnApplicationAttemptState());
|
||||
}
|
||||
|
||||
private static void mergeContainerHistoryData(
|
||||
ContainerHistoryData historyData, ContainerStartData startData) {
|
||||
historyData.setAllocatedResource(startData.getAllocatedResource());
|
||||
historyData.setAssignedNode(startData.getAssignedNode());
|
||||
historyData.setPriority(startData.getPriority());
|
||||
historyData.setStartTime(startData.getStartTime());
|
||||
}
|
||||
|
||||
private static void mergeContainerHistoryData(
|
||||
ContainerHistoryData historyData, ContainerFinishData finishData) {
|
||||
historyData.setFinishTime(finishData.getFinishTime());
|
||||
historyData.setDiagnosticsInfo(finishData.getDiagnosticsInfo());
|
||||
historyData.setLogURL(finishData.getLogURL());
|
||||
historyData.setContainerExitStatus(finishData.getContainerExitStatus());
|
||||
historyData.setContainerState(finishData.getContainerState());
|
||||
}
|
||||
|
||||
private HistoryFileWriter getHistoryFileWriter(ApplicationId appId)
|
||||
throws IOException {
|
||||
HistoryFileWriter hfWriter = outstandingWriters.get(appId);
|
||||
if (hfWriter == null) {
|
||||
throw new IOException("History file of application " + appId
|
||||
+ " is not opened");
|
||||
}
|
||||
return hfWriter;
|
||||
}
|
||||
|
||||
private HistoryFileReader getHistoryFileReader(ApplicationId appId)
|
||||
throws IOException {
|
||||
Path applicationHistoryFile = new Path(rootDirPath, appId.toString());
|
||||
if (!fs.exists(applicationHistoryFile)) {
|
||||
throw new IOException("History file for application " + appId
|
||||
+ " is not found");
|
||||
}
|
||||
// The history file is still under writing
|
||||
if (outstandingWriters.containsKey(appId)) {
|
||||
throw new IOException("History file for application " + appId
|
||||
+ " is under writing");
|
||||
}
|
||||
return new HistoryFileReader(applicationHistoryFile);
|
||||
}
|
||||
|
||||
private class HistoryFileReader {
|
||||
|
||||
private class Entry {
|
||||
|
||||
private HistoryDataKey key;
|
||||
private byte[] value;
|
||||
|
||||
public Entry(HistoryDataKey key, byte[] value) {
|
||||
this.key = key;
|
||||
this.value = value;
|
||||
}
|
||||
}
|
||||
|
||||
private TFile.Reader reader;
|
||||
private TFile.Reader.Scanner scanner;
|
||||
|
||||
public HistoryFileReader(Path historyFile) throws IOException {
|
||||
FSDataInputStream fsdis = fs.open(historyFile);
|
||||
reader =
|
||||
new TFile.Reader(fsdis, fs.getFileStatus(historyFile).getLen(),
|
||||
getConfig());
|
||||
reset();
|
||||
}
|
||||
|
||||
public boolean hasNext() {
|
||||
return !scanner.atEnd();
|
||||
}
|
||||
|
||||
public Entry next() throws IOException {
|
||||
TFile.Reader.Scanner.Entry entry = scanner.entry();
|
||||
DataInputStream dis = entry.getKeyStream();
|
||||
HistoryDataKey key = new HistoryDataKey();
|
||||
key.readFields(dis);
|
||||
dis = entry.getValueStream();
|
||||
byte[] value = new byte[entry.getValueLength()];
|
||||
dis.read(value);
|
||||
scanner.advance();
|
||||
return new Entry(key, value);
|
||||
}
|
||||
|
||||
public void reset() throws IOException {
|
||||
IOUtils.cleanup(LOG, scanner);
|
||||
scanner = reader.createScanner();
|
||||
}
|
||||
|
||||
public void close() {
|
||||
IOUtils.cleanup(LOG, scanner, reader);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private class HistoryFileWriter {
|
||||
|
||||
private FSDataOutputStream fsdos;
|
||||
private TFile.Writer writer;
|
||||
|
||||
public HistoryFileWriter(Path historyFile) throws IOException {
|
||||
if (fs.exists(historyFile)) {
|
||||
fsdos = fs.append(historyFile);
|
||||
} else {
|
||||
fsdos = fs.create(historyFile);
|
||||
}
|
||||
fs.setPermission(historyFile, HISTORY_FILE_UMASK);
|
||||
writer =
|
||||
new TFile.Writer(fsdos, MIN_BLOCK_SIZE, getConfig().get(
|
||||
YarnConfiguration.FS_HISTORY_STORE_COMPRESSION_TYPE,
|
||||
YarnConfiguration.DEFAULT_FS_HISTORY_STORE_COMPRESSION_TYPE), null,
|
||||
getConfig());
|
||||
}
|
||||
|
||||
public synchronized void close() {
|
||||
IOUtils.cleanup(LOG, writer, fsdos);
|
||||
}
|
||||
|
||||
public synchronized void writeHistoryData(HistoryDataKey key, byte[] value)
|
||||
throws IOException {
|
||||
DataOutputStream dos = null;
|
||||
try {
|
||||
dos = writer.prepareAppendKey(-1);
|
||||
key.write(dos);
|
||||
} finally {
|
||||
IOUtils.cleanup(LOG, dos);
|
||||
}
|
||||
try {
|
||||
dos = writer.prepareAppendValue(value.length);
|
||||
dos.write(value);
|
||||
} finally {
|
||||
IOUtils.cleanup(LOG, dos);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class HistoryDataKey implements Writable {
|
||||
|
||||
private String id;
|
||||
|
||||
private String suffix;
|
||||
|
||||
public HistoryDataKey() {
|
||||
this(null, null);
|
||||
}
|
||||
|
||||
public HistoryDataKey(String id, String suffix) {
|
||||
this.id = id;
|
||||
this.suffix = suffix;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void write(DataOutput out) throws IOException {
|
||||
out.writeUTF(id);
|
||||
out.writeUTF(suffix);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
id = in.readUTF();
|
||||
suffix = in.readUTF();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static class StartFinishDataPair<S, F> {
|
||||
|
||||
private S startData;
|
||||
private F finishData;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,275 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.service.AbstractService;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
|
||||
|
||||
/**
|
||||
* In-memory implementation of {@link ApplicationHistoryStore}. This
|
||||
* implementation is for test purpose only. If users improperly instantiate it,
|
||||
* they may encounter reading and writing history data in different memory
|
||||
* store.
|
||||
*
|
||||
*/
|
||||
@Private
|
||||
@Unstable
|
||||
public class MemoryApplicationHistoryStore extends AbstractService implements
|
||||
ApplicationHistoryStore {
|
||||
|
||||
private final ConcurrentMap<ApplicationId, ApplicationHistoryData> applicationData =
|
||||
new ConcurrentHashMap<ApplicationId, ApplicationHistoryData>();
|
||||
private final ConcurrentMap<ApplicationId, ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData>> applicationAttemptData =
|
||||
new ConcurrentHashMap<ApplicationId, ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData>>();
|
||||
private final ConcurrentMap<ApplicationAttemptId, ConcurrentMap<ContainerId, ContainerHistoryData>> containerData =
|
||||
new ConcurrentHashMap<ApplicationAttemptId, ConcurrentMap<ContainerId, ContainerHistoryData>>();
|
||||
|
||||
public MemoryApplicationHistoryStore() {
|
||||
super(MemoryApplicationHistoryStore.class.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ApplicationId, ApplicationHistoryData> getAllApplications() {
|
||||
return new HashMap<ApplicationId, ApplicationHistoryData>(applicationData);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationHistoryData getApplication(ApplicationId appId) {
|
||||
return applicationData.get(appId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ApplicationAttemptId, ApplicationAttemptHistoryData>
|
||||
getApplicationAttempts(ApplicationId appId) {
|
||||
ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData> subMap =
|
||||
applicationAttemptData.get(appId);
|
||||
if (subMap == null) {
|
||||
return Collections
|
||||
.<ApplicationAttemptId, ApplicationAttemptHistoryData> emptyMap();
|
||||
} else {
|
||||
return new HashMap<ApplicationAttemptId, ApplicationAttemptHistoryData>(
|
||||
subMap);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptHistoryData getApplicationAttempt(
|
||||
ApplicationAttemptId appAttemptId) {
|
||||
ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData> subMap =
|
||||
applicationAttemptData.get(appAttemptId.getApplicationId());
|
||||
if (subMap == null) {
|
||||
return null;
|
||||
} else {
|
||||
return subMap.get(appAttemptId);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId) {
|
||||
ApplicationAttemptHistoryData appAttempt =
|
||||
getApplicationAttempt(appAttemptId);
|
||||
if (appAttempt == null || appAttempt.getMasterContainerId() == null) {
|
||||
return null;
|
||||
} else {
|
||||
return getContainer(appAttempt.getMasterContainerId());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerHistoryData getContainer(ContainerId containerId) {
|
||||
Map<ContainerId, ContainerHistoryData> subMap =
|
||||
containerData.get(containerId.getApplicationAttemptId());
|
||||
if (subMap == null) {
|
||||
return null;
|
||||
} else {
|
||||
return subMap.get(containerId);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ContainerId, ContainerHistoryData> getContainers(
|
||||
ApplicationAttemptId appAttemptId) throws IOException {
|
||||
ConcurrentMap<ContainerId, ContainerHistoryData> subMap =
|
||||
containerData.get(appAttemptId);
|
||||
if (subMap == null) {
|
||||
return Collections.<ContainerId, ContainerHistoryData> emptyMap();
|
||||
} else {
|
||||
return new HashMap<ContainerId, ContainerHistoryData>(subMap);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationStarted(ApplicationStartData appStart)
|
||||
throws IOException {
|
||||
ApplicationHistoryData oldData =
|
||||
applicationData.putIfAbsent(appStart.getApplicationId(),
|
||||
ApplicationHistoryData.newInstance(appStart.getApplicationId(),
|
||||
appStart.getApplicationName(), appStart.getApplicationType(),
|
||||
appStart.getQueue(), appStart.getUser(), appStart.getSubmitTime(),
|
||||
appStart.getStartTime(), Long.MAX_VALUE, null, null, null));
|
||||
if (oldData != null) {
|
||||
throw new IOException("The start information of application "
|
||||
+ appStart.getApplicationId() + " is already stored.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationFinished(ApplicationFinishData appFinish)
|
||||
throws IOException {
|
||||
ApplicationHistoryData data =
|
||||
applicationData.get(appFinish.getApplicationId());
|
||||
if (data == null) {
|
||||
throw new IOException("The finish information of application "
|
||||
+ appFinish.getApplicationId() + " is stored before the start"
|
||||
+ " information.");
|
||||
}
|
||||
// Make the assumption that YarnApplicationState should not be null if
|
||||
// the finish information is already recorded
|
||||
if (data.getYarnApplicationState() != null) {
|
||||
throw new IOException("The finish information of application "
|
||||
+ appFinish.getApplicationId() + " is already stored.");
|
||||
}
|
||||
data.setFinishTime(appFinish.getFinishTime());
|
||||
data.setDiagnosticsInfo(appFinish.getDiagnosticsInfo());
|
||||
data.setFinalApplicationStatus(appFinish.getFinalApplicationStatus());
|
||||
data.setYarnApplicationState(appFinish.getYarnApplicationState());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationAttemptStarted(
|
||||
ApplicationAttemptStartData appAttemptStart) throws IOException {
|
||||
ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData> subMap =
|
||||
getSubMap(appAttemptStart.getApplicationAttemptId().getApplicationId());
|
||||
ApplicationAttemptHistoryData oldData =
|
||||
subMap.putIfAbsent(appAttemptStart.getApplicationAttemptId(),
|
||||
ApplicationAttemptHistoryData.newInstance(
|
||||
appAttemptStart.getApplicationAttemptId(),
|
||||
appAttemptStart.getHost(), appAttemptStart.getRPCPort(),
|
||||
appAttemptStart.getMasterContainerId(), null, null, null, null));
|
||||
if (oldData != null) {
|
||||
throw new IOException("The start information of application attempt "
|
||||
+ appAttemptStart.getApplicationAttemptId() + " is already stored.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationAttemptFinished(
|
||||
ApplicationAttemptFinishData appAttemptFinish) throws IOException {
|
||||
ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData> subMap =
|
||||
getSubMap(appAttemptFinish.getApplicationAttemptId().getApplicationId());
|
||||
ApplicationAttemptHistoryData data =
|
||||
subMap.get(appAttemptFinish.getApplicationAttemptId());
|
||||
if (data == null) {
|
||||
throw new IOException("The finish information of application attempt "
|
||||
+ appAttemptFinish.getApplicationAttemptId() + " is stored before"
|
||||
+ " the start information.");
|
||||
}
|
||||
// Make the assumption that YarnApplicationAttemptState should not be null
|
||||
// if the finish information is already recorded
|
||||
if (data.getYarnApplicationAttemptState() != null) {
|
||||
throw new IOException("The finish information of application attempt "
|
||||
+ appAttemptFinish.getApplicationAttemptId() + " is already stored.");
|
||||
}
|
||||
data.setTrackingURL(appAttemptFinish.getTrackingURL());
|
||||
data.setDiagnosticsInfo(appAttemptFinish.getDiagnosticsInfo());
|
||||
data
|
||||
.setFinalApplicationStatus(appAttemptFinish.getFinalApplicationStatus());
|
||||
data.setYarnApplicationAttemptState(appAttemptFinish
|
||||
.getYarnApplicationAttemptState());
|
||||
}
|
||||
|
||||
private ConcurrentMap<ApplicationAttemptId, ApplicationAttemptHistoryData>
|
||||
getSubMap(ApplicationId appId) {
|
||||
applicationAttemptData
|
||||
.putIfAbsent(
|
||||
appId,
|
||||
new ConcurrentHashMap<ApplicationAttemptId, ApplicationAttemptHistoryData>());
|
||||
return applicationAttemptData.get(appId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void containerStarted(ContainerStartData containerStart)
|
||||
throws IOException {
|
||||
ConcurrentMap<ContainerId, ContainerHistoryData> subMap =
|
||||
getSubMap(containerStart.getContainerId().getApplicationAttemptId());
|
||||
ContainerHistoryData oldData =
|
||||
subMap.putIfAbsent(containerStart.getContainerId(),
|
||||
ContainerHistoryData.newInstance(containerStart.getContainerId(),
|
||||
containerStart.getAllocatedResource(),
|
||||
containerStart.getAssignedNode(), containerStart.getPriority(),
|
||||
containerStart.getStartTime(), Long.MAX_VALUE, null, null,
|
||||
Integer.MAX_VALUE, null));
|
||||
if (oldData != null) {
|
||||
throw new IOException("The start information of container "
|
||||
+ containerStart.getContainerId() + " is already stored.");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void containerFinished(ContainerFinishData containerFinish)
|
||||
throws IOException {
|
||||
ConcurrentMap<ContainerId, ContainerHistoryData> subMap =
|
||||
getSubMap(containerFinish.getContainerId().getApplicationAttemptId());
|
||||
ContainerHistoryData data = subMap.get(containerFinish.getContainerId());
|
||||
if (data == null) {
|
||||
throw new IOException("The finish information of container "
|
||||
+ containerFinish.getContainerId() + " is stored before"
|
||||
+ " the start information.");
|
||||
}
|
||||
// Make the assumption that ContainerState should not be null if
|
||||
// the finish information is already recorded
|
||||
if (data.getContainerState() != null) {
|
||||
throw new IOException("The finish information of container "
|
||||
+ containerFinish.getContainerId() + " is already stored.");
|
||||
}
|
||||
data.setFinishTime(containerFinish.getFinishTime());
|
||||
data.setDiagnosticsInfo(containerFinish.getDiagnosticsInfo());
|
||||
data.setLogURL(containerFinish.getLogURL());
|
||||
data.setContainerExitStatus(containerFinish.getContainerExitStatus());
|
||||
data.setContainerState(containerFinish.getContainerState());
|
||||
}
|
||||
|
||||
private ConcurrentMap<ContainerId, ContainerHistoryData> getSubMap(
|
||||
ApplicationAttemptId appAttemptId) {
|
||||
containerData.putIfAbsent(appAttemptId,
|
||||
new ConcurrentHashMap<ContainerId, ContainerHistoryData>());
|
||||
return containerData.get(appAttemptId);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,127 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.service.AbstractService;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
|
||||
|
||||
/**
|
||||
* Dummy implementation of {@link ApplicationHistoryStore}. If this
|
||||
* implementation is used, no history data will be persisted.
|
||||
*
|
||||
*/
|
||||
@Unstable
|
||||
@Private
|
||||
public class NullApplicationHistoryStore extends AbstractService implements
|
||||
ApplicationHistoryStore {
|
||||
|
||||
public NullApplicationHistoryStore() {
|
||||
super(NullApplicationHistoryStore.class.getName());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationStarted(ApplicationStartData appStart)
|
||||
throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationFinished(ApplicationFinishData appFinish)
|
||||
throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationAttemptStarted(
|
||||
ApplicationAttemptStartData appAttemptStart) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applicationAttemptFinished(
|
||||
ApplicationAttemptFinishData appAttemptFinish) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void containerStarted(ContainerStartData containerStart)
|
||||
throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void containerFinished(ContainerFinishData containerFinish)
|
||||
throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationHistoryData getApplication(ApplicationId appId)
|
||||
throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ApplicationId, ApplicationHistoryData> getAllApplications()
|
||||
throws IOException {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ApplicationAttemptId, ApplicationAttemptHistoryData>
|
||||
getApplicationAttempts(ApplicationId appId) throws IOException {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptHistoryData getApplicationAttempt(
|
||||
ApplicationAttemptId appAttemptId) throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerHistoryData getContainer(ContainerId containerId)
|
||||
throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerHistoryData getAMContainer(ApplicationAttemptId appAttemptId)
|
||||
throws IOException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<ContainerId, ContainerHistoryData> getContainers(
|
||||
ApplicationAttemptId appAttemptId) throws IOException {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,95 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* The class contains the fields that can be determined when
|
||||
* <code>RMAppAttempt</code> finishes, and that need to be stored persistently.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class ApplicationAttemptFinishData {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static ApplicationAttemptFinishData newInstance(
|
||||
ApplicationAttemptId appAttemptId, String diagnosticsInfo,
|
||||
String trackingURL, FinalApplicationStatus finalApplicationStatus,
|
||||
YarnApplicationAttemptState yarnApplicationAttemptState) {
|
||||
ApplicationAttemptFinishData appAttemptFD =
|
||||
Records.newRecord(ApplicationAttemptFinishData.class);
|
||||
appAttemptFD.setApplicationAttemptId(appAttemptId);
|
||||
appAttemptFD.setDiagnosticsInfo(diagnosticsInfo);
|
||||
appAttemptFD.setTrackingURL(trackingURL);
|
||||
appAttemptFD.setFinalApplicationStatus(finalApplicationStatus);
|
||||
appAttemptFD.setYarnApplicationAttemptState(yarnApplicationAttemptState);
|
||||
return appAttemptFD;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ApplicationAttemptId getApplicationAttemptId();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setApplicationAttemptId(
|
||||
ApplicationAttemptId applicationAttemptId);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getTrackingURL();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setTrackingURL(String trackingURL);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getDiagnosticsInfo();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setDiagnosticsInfo(String diagnosticsInfo);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract FinalApplicationStatus getFinalApplicationStatus();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setFinalApplicationStatus(
|
||||
FinalApplicationStatus finalApplicationStatus);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract YarnApplicationAttemptState getYarnApplicationAttemptState();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setYarnApplicationAttemptState(
|
||||
YarnApplicationAttemptState yarnApplicationAttemptState);
|
||||
|
||||
}
|
|
@ -0,0 +1,171 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
|
||||
|
||||
/**
|
||||
* The class contains all the fields that are stored persistently for
|
||||
* <code>RMAppAttempt</code>.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public class ApplicationAttemptHistoryData {
|
||||
|
||||
private ApplicationAttemptId applicationAttemptId;
|
||||
|
||||
private String host;
|
||||
|
||||
private int rpcPort;
|
||||
|
||||
private String trackingURL;
|
||||
|
||||
private String diagnosticsInfo;
|
||||
|
||||
private FinalApplicationStatus finalApplicationStatus;
|
||||
|
||||
private ContainerId masterContainerId;
|
||||
|
||||
private YarnApplicationAttemptState yarnApplicationAttemptState;
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static ApplicationAttemptHistoryData newInstance(
|
||||
ApplicationAttemptId appAttemptId, String host, int rpcPort,
|
||||
ContainerId masterContainerId, String diagnosticsInfo,
|
||||
String trackingURL, FinalApplicationStatus finalApplicationStatus,
|
||||
YarnApplicationAttemptState yarnApplicationAttemptState) {
|
||||
ApplicationAttemptHistoryData appAttemptHD =
|
||||
new ApplicationAttemptHistoryData();
|
||||
appAttemptHD.setApplicationAttemptId(appAttemptId);
|
||||
appAttemptHD.setHost(host);
|
||||
appAttemptHD.setRPCPort(rpcPort);
|
||||
appAttemptHD.setMasterContainerId(masterContainerId);
|
||||
appAttemptHD.setDiagnosticsInfo(diagnosticsInfo);
|
||||
appAttemptHD.setTrackingURL(trackingURL);
|
||||
appAttemptHD.setFinalApplicationStatus(finalApplicationStatus);
|
||||
appAttemptHD.setYarnApplicationAttemptState(yarnApplicationAttemptState);
|
||||
return appAttemptHD;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public ApplicationAttemptId getApplicationAttemptId() {
|
||||
return applicationAttemptId;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void
|
||||
setApplicationAttemptId(ApplicationAttemptId applicationAttemptId) {
|
||||
this.applicationAttemptId = applicationAttemptId;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public String getHost() {
|
||||
return host;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setHost(String host) {
|
||||
this.host = host;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public int getRPCPort() {
|
||||
return rpcPort;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setRPCPort(int rpcPort) {
|
||||
this.rpcPort = rpcPort;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public String getTrackingURL() {
|
||||
return trackingURL;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setTrackingURL(String trackingURL) {
|
||||
this.trackingURL = trackingURL;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public String getDiagnosticsInfo() {
|
||||
return diagnosticsInfo;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setDiagnosticsInfo(String diagnosticsInfo) {
|
||||
this.diagnosticsInfo = diagnosticsInfo;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public FinalApplicationStatus getFinalApplicationStatus() {
|
||||
return finalApplicationStatus;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setFinalApplicationStatus(
|
||||
FinalApplicationStatus finalApplicationStatus) {
|
||||
this.finalApplicationStatus = finalApplicationStatus;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public ContainerId getMasterContainerId() {
|
||||
return masterContainerId;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setMasterContainerId(ContainerId masterContainerId) {
|
||||
this.masterContainerId = masterContainerId;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public YarnApplicationAttemptState getYarnApplicationAttemptState() {
|
||||
return yarnApplicationAttemptState;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setYarnApplicationAttemptState(
|
||||
YarnApplicationAttemptState yarnApplicationAttemptState) {
|
||||
this.yarnApplicationAttemptState = yarnApplicationAttemptState;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,82 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* The class contains the fields that can be determined when
|
||||
* <code>RMAppAttempt</code> starts, and that need to be stored persistently.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class ApplicationAttemptStartData {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static ApplicationAttemptStartData newInstance(
|
||||
ApplicationAttemptId appAttemptId, String host, int rpcPort,
|
||||
ContainerId masterContainerId) {
|
||||
ApplicationAttemptStartData appAttemptSD =
|
||||
Records.newRecord(ApplicationAttemptStartData.class);
|
||||
appAttemptSD.setApplicationAttemptId(appAttemptId);
|
||||
appAttemptSD.setHost(host);
|
||||
appAttemptSD.setRPCPort(rpcPort);
|
||||
appAttemptSD.setMasterContainerId(masterContainerId);
|
||||
return appAttemptSD;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ApplicationAttemptId getApplicationAttemptId();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setApplicationAttemptId(
|
||||
ApplicationAttemptId applicationAttemptId);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getHost();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setHost(String host);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract int getRPCPort();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setRPCPort(int rpcPort);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ContainerId getMasterContainerId();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setMasterContainerId(ContainerId masterContainerId);
|
||||
|
||||
}
|
|
@ -0,0 +1,94 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* The class contains the fields that can be determined when <code>RMApp</code>
|
||||
* finishes, and that need to be stored persistently.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class ApplicationFinishData {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static ApplicationFinishData newInstance(ApplicationId applicationId,
|
||||
long finishTime, String diagnosticsInfo,
|
||||
FinalApplicationStatus finalApplicationStatus,
|
||||
YarnApplicationState yarnApplicationState) {
|
||||
ApplicationFinishData appFD =
|
||||
Records.newRecord(ApplicationFinishData.class);
|
||||
appFD.setApplicationId(applicationId);
|
||||
appFD.setFinishTime(finishTime);
|
||||
appFD.setDiagnosticsInfo(diagnosticsInfo);
|
||||
appFD.setFinalApplicationStatus(finalApplicationStatus);
|
||||
appFD.setYarnApplicationState(yarnApplicationState);
|
||||
return appFD;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ApplicationId getApplicationId();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setApplicationId(ApplicationId applicationId);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract long getFinishTime();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setFinishTime(long finishTime);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getDiagnosticsInfo();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setDiagnosticsInfo(String diagnosticsInfo);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract FinalApplicationStatus getFinalApplicationStatus();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setFinalApplicationStatus(
|
||||
FinalApplicationStatus finalApplicationStatus);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract YarnApplicationState getYarnApplicationState();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setYarnApplicationState(
|
||||
YarnApplicationState yarnApplicationState);
|
||||
|
||||
}
|
|
@ -0,0 +1,213 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
|
||||
/**
|
||||
* The class contains all the fields that are stored persistently for
|
||||
* <code>RMApp</code>.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public class ApplicationHistoryData {
|
||||
|
||||
private ApplicationId applicationId;
|
||||
|
||||
private String applicationName;
|
||||
|
||||
private String applicationType;
|
||||
|
||||
private String user;
|
||||
|
||||
private String queue;
|
||||
|
||||
private long submitTime;
|
||||
|
||||
private long startTime;
|
||||
|
||||
private long finishTime;
|
||||
|
||||
private String diagnosticsInfo;
|
||||
|
||||
private FinalApplicationStatus finalApplicationStatus;
|
||||
|
||||
private YarnApplicationState yarnApplicationState;
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static ApplicationHistoryData newInstance(ApplicationId applicationId,
|
||||
String applicationName, String applicationType, String queue,
|
||||
String user, long submitTime, long startTime, long finishTime,
|
||||
String diagnosticsInfo, FinalApplicationStatus finalApplicationStatus,
|
||||
YarnApplicationState yarnApplicationState) {
|
||||
ApplicationHistoryData appHD = new ApplicationHistoryData();
|
||||
appHD.setApplicationId(applicationId);
|
||||
appHD.setApplicationName(applicationName);
|
||||
appHD.setApplicationType(applicationType);
|
||||
appHD.setQueue(queue);
|
||||
appHD.setUser(user);
|
||||
appHD.setSubmitTime(submitTime);
|
||||
appHD.setStartTime(startTime);
|
||||
appHD.setFinishTime(finishTime);
|
||||
appHD.setDiagnosticsInfo(diagnosticsInfo);
|
||||
appHD.setFinalApplicationStatus(finalApplicationStatus);
|
||||
appHD.setYarnApplicationState(yarnApplicationState);
|
||||
return appHD;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public ApplicationId getApplicationId() {
|
||||
return applicationId;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setApplicationId(ApplicationId applicationId) {
|
||||
this.applicationId = applicationId;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public String getApplicationName() {
|
||||
return applicationName;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setApplicationName(String applicationName) {
|
||||
this.applicationName = applicationName;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public String getApplicationType() {
|
||||
return applicationType;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setApplicationType(String applicationType) {
|
||||
this.applicationType = applicationType;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public String getUser() {
|
||||
return user;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setUser(String user) {
|
||||
this.user = user;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public String getQueue() {
|
||||
return queue;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setQueue(String queue) {
|
||||
this.queue = queue;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public long getSubmitTime() {
|
||||
return submitTime;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setSubmitTime(long submitTime) {
|
||||
this.submitTime = submitTime;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public long getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setStartTime(long startTime) {
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public long getFinishTime() {
|
||||
return finishTime;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setFinishTime(long finishTime) {
|
||||
this.finishTime = finishTime;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public String getDiagnosticsInfo() {
|
||||
return diagnosticsInfo;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setDiagnosticsInfo(String diagnosticsInfo) {
|
||||
this.diagnosticsInfo = diagnosticsInfo;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public FinalApplicationStatus getFinalApplicationStatus() {
|
||||
return finalApplicationStatus;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setFinalApplicationStatus(
|
||||
FinalApplicationStatus finalApplicationStatus) {
|
||||
this.finalApplicationStatus = finalApplicationStatus;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public YarnApplicationState getYarnApplicationState() {
|
||||
return this.yarnApplicationState;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void
|
||||
setYarnApplicationState(YarnApplicationState yarnApplicationState) {
|
||||
this.yarnApplicationState = yarnApplicationState;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* The class contains the fields that can be determined when <code>RMApp</code>
|
||||
* starts, and that need to be stored persistently.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class ApplicationStartData {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static ApplicationStartData newInstance(ApplicationId applicationId,
|
||||
String applicationName, String applicationType, String queue,
|
||||
String user, long submitTime, long startTime) {
|
||||
ApplicationStartData appSD = Records.newRecord(ApplicationStartData.class);
|
||||
appSD.setApplicationId(applicationId);
|
||||
appSD.setApplicationName(applicationName);
|
||||
appSD.setApplicationType(applicationType);
|
||||
appSD.setQueue(queue);
|
||||
appSD.setUser(user);
|
||||
appSD.setSubmitTime(submitTime);
|
||||
appSD.setStartTime(startTime);
|
||||
return appSD;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ApplicationId getApplicationId();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setApplicationId(ApplicationId applicationId);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getApplicationName();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setApplicationName(String applicationName);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getApplicationType();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setApplicationType(String applicationType);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getUser();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setUser(String user);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getQueue();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setQueue(String queue);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract long getSubmitTime();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setSubmitTime(long submitTime);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract long getStartTime();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setStartTime(long startTime);
|
||||
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerState;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* The class contains the fields that can be determined when
|
||||
* <code>RMContainer</code> finishes, and that need to be stored persistently.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class ContainerFinishData {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static ContainerFinishData newInstance(ContainerId containerId,
|
||||
long finishTime, String diagnosticsInfo, String logURL,
|
||||
int containerExitCode, ContainerState containerState) {
|
||||
ContainerFinishData containerFD =
|
||||
Records.newRecord(ContainerFinishData.class);
|
||||
containerFD.setContainerId(containerId);
|
||||
containerFD.setFinishTime(finishTime);
|
||||
containerFD.setDiagnosticsInfo(diagnosticsInfo);
|
||||
containerFD.setLogURL(logURL);
|
||||
containerFD.setContainerExitStatus(containerExitCode);
|
||||
containerFD.setContainerState(containerState);
|
||||
return containerFD;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ContainerId getContainerId();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setContainerId(ContainerId containerId);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract long getFinishTime();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setFinishTime(long finishTime);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getDiagnosticsInfo();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setDiagnosticsInfo(String diagnosticsInfo);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getLogURL();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setLogURL(String logURL);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract int getContainerExitStatus();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setContainerExitStatus(int containerExitStatus);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ContainerState getContainerState();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setContainerState(ContainerState containerState);
|
||||
|
||||
}
|
|
@ -0,0 +1,197 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerState;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.Resource;
|
||||
|
||||
/**
|
||||
* The class contains all the fields that are stored persistently for
|
||||
* <code>RMContainer</code>.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public class ContainerHistoryData {
|
||||
|
||||
private ContainerId containerId;
|
||||
|
||||
private Resource allocatedResource;
|
||||
|
||||
private NodeId assignedNode;
|
||||
|
||||
private Priority priority;
|
||||
|
||||
private long startTime;
|
||||
|
||||
private long finishTime;
|
||||
|
||||
private String diagnosticsInfo;
|
||||
|
||||
private String logURL;
|
||||
|
||||
private int containerExitStatus;
|
||||
|
||||
private ContainerState containerState;
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static ContainerHistoryData newInstance(ContainerId containerId,
|
||||
Resource allocatedResource, NodeId assignedNode, Priority priority,
|
||||
long startTime, long finishTime, String diagnosticsInfo, String logURL,
|
||||
int containerExitCode, ContainerState containerState) {
|
||||
ContainerHistoryData containerHD = new ContainerHistoryData();
|
||||
containerHD.setContainerId(containerId);
|
||||
containerHD.setAllocatedResource(allocatedResource);
|
||||
containerHD.setAssignedNode(assignedNode);
|
||||
containerHD.setPriority(priority);
|
||||
containerHD.setStartTime(startTime);
|
||||
containerHD.setFinishTime(finishTime);
|
||||
containerHD.setDiagnosticsInfo(diagnosticsInfo);
|
||||
containerHD.setLogURL(logURL);
|
||||
containerHD.setContainerExitStatus(containerExitCode);
|
||||
containerHD.setContainerState(containerState);
|
||||
return containerHD;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public ContainerId getContainerId() {
|
||||
return containerId;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setContainerId(ContainerId containerId) {
|
||||
this.containerId = containerId;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public Resource getAllocatedResource() {
|
||||
return allocatedResource;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setAllocatedResource(Resource resource) {
|
||||
this.allocatedResource = resource;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public NodeId getAssignedNode() {
|
||||
return assignedNode;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setAssignedNode(NodeId nodeId) {
|
||||
this.assignedNode = nodeId;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public Priority getPriority() {
|
||||
return priority;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setPriority(Priority priority) {
|
||||
this.priority = priority;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public long getStartTime() {
|
||||
return startTime;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setStartTime(long startTime) {
|
||||
this.startTime = startTime;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public long getFinishTime() {
|
||||
return finishTime;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setFinishTime(long finishTime) {
|
||||
this.finishTime = finishTime;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public String getDiagnosticsInfo() {
|
||||
return diagnosticsInfo;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setDiagnosticsInfo(String diagnosticsInfo) {
|
||||
this.diagnosticsInfo = diagnosticsInfo;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public String getLogURL() {
|
||||
return logURL;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setLogURL(String logURL) {
|
||||
this.logURL = logURL;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public int getContainerExitStatus() {
|
||||
return containerExitStatus;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setContainerExitStatus(int containerExitStatus) {
|
||||
this.containerExitStatus = containerExitStatus;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public ContainerState getContainerState() {
|
||||
return containerState;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public void setContainerState(ContainerState containerState) {
|
||||
this.containerState = containerState;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,92 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.Resource;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* The class contains the fields that can be determined when
|
||||
* <code>RMContainer</code> starts, and that need to be stored persistently.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class ContainerStartData {
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public static ContainerStartData newInstance(ContainerId containerId,
|
||||
Resource allocatedResource, NodeId assignedNode, Priority priority,
|
||||
long startTime) {
|
||||
ContainerStartData containerSD =
|
||||
Records.newRecord(ContainerStartData.class);
|
||||
containerSD.setContainerId(containerId);
|
||||
containerSD.setAllocatedResource(allocatedResource);
|
||||
containerSD.setAssignedNode(assignedNode);
|
||||
containerSD.setPriority(priority);
|
||||
containerSD.setStartTime(startTime);
|
||||
return containerSD;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract ContainerId getContainerId();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setContainerId(ContainerId containerId);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract Resource getAllocatedResource();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setAllocatedResource(Resource resource);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract NodeId getAssignedNode();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setAssignedNode(NodeId nodeId);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract Priority getPriority();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setPriority(Priority priority);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract long getStartTime();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setStartTime(long startTime);
|
||||
|
||||
}
|
|
@ -0,0 +1,239 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProto;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptFinishDataProtoOrBuilder;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
public class ApplicationAttemptFinishDataPBImpl extends
|
||||
ApplicationAttemptFinishData {
|
||||
|
||||
ApplicationAttemptFinishDataProto proto = ApplicationAttemptFinishDataProto
|
||||
.getDefaultInstance();
|
||||
ApplicationAttemptFinishDataProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
public ApplicationAttemptFinishDataPBImpl() {
|
||||
builder = ApplicationAttemptFinishDataProto.newBuilder();
|
||||
}
|
||||
|
||||
public ApplicationAttemptFinishDataPBImpl(
|
||||
ApplicationAttemptFinishDataProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private ApplicationAttemptId applicationAttemptId;
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptId getApplicationAttemptId() {
|
||||
if (this.applicationAttemptId != null) {
|
||||
return this.applicationAttemptId;
|
||||
}
|
||||
ApplicationAttemptFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasApplicationAttemptId()) {
|
||||
return null;
|
||||
}
|
||||
this.applicationAttemptId =
|
||||
convertFromProtoFormat(p.getApplicationAttemptId());
|
||||
return this.applicationAttemptId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void
|
||||
setApplicationAttemptId(ApplicationAttemptId applicationAttemptId) {
|
||||
maybeInitBuilder();
|
||||
if (applicationAttemptId == null) {
|
||||
builder.clearApplicationAttemptId();
|
||||
}
|
||||
this.applicationAttemptId = applicationAttemptId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTrackingURL() {
|
||||
ApplicationAttemptFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasTrackingUrl()) {
|
||||
return null;
|
||||
}
|
||||
return p.getTrackingUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setTrackingURL(String trackingURL) {
|
||||
maybeInitBuilder();
|
||||
if (trackingURL == null) {
|
||||
builder.clearTrackingUrl();
|
||||
return;
|
||||
}
|
||||
builder.setTrackingUrl(trackingURL);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDiagnosticsInfo() {
|
||||
ApplicationAttemptFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasDiagnosticsInfo()) {
|
||||
return null;
|
||||
}
|
||||
return p.getDiagnosticsInfo();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDiagnosticsInfo(String diagnosticsInfo) {
|
||||
maybeInitBuilder();
|
||||
if (diagnosticsInfo == null) {
|
||||
builder.clearDiagnosticsInfo();
|
||||
return;
|
||||
}
|
||||
builder.setDiagnosticsInfo(diagnosticsInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FinalApplicationStatus getFinalApplicationStatus() {
|
||||
ApplicationAttemptFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasFinalApplicationStatus()) {
|
||||
return null;
|
||||
}
|
||||
return convertFromProtoFormat(p.getFinalApplicationStatus());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFinalApplicationStatus(
|
||||
FinalApplicationStatus finalApplicationStatus) {
|
||||
maybeInitBuilder();
|
||||
if (finalApplicationStatus == null) {
|
||||
builder.clearFinalApplicationStatus();
|
||||
return;
|
||||
}
|
||||
builder
|
||||
.setFinalApplicationStatus(convertToProtoFormat(finalApplicationStatus));
|
||||
}
|
||||
|
||||
@Override
|
||||
public YarnApplicationAttemptState getYarnApplicationAttemptState() {
|
||||
ApplicationAttemptFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasYarnApplicationAttemptState()) {
|
||||
return null;
|
||||
}
|
||||
return convertFromProtoFormat(p.getYarnApplicationAttemptState());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setYarnApplicationAttemptState(YarnApplicationAttemptState state) {
|
||||
maybeInitBuilder();
|
||||
if (state == null) {
|
||||
builder.clearYarnApplicationAttemptState();
|
||||
return;
|
||||
}
|
||||
builder.setYarnApplicationAttemptState(convertToProtoFormat(state));
|
||||
}
|
||||
|
||||
public ApplicationAttemptFinishDataProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null)
|
||||
return false;
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.applicationAttemptId != null
|
||||
&& !((ApplicationAttemptIdPBImpl) this.applicationAttemptId).getProto()
|
||||
.equals(builder.getApplicationAttemptId())) {
|
||||
builder
|
||||
.setApplicationAttemptId(convertToProtoFormat(this.applicationAttemptId));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = ApplicationAttemptFinishDataProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
private ApplicationAttemptIdPBImpl convertFromProtoFormat(
|
||||
ApplicationAttemptIdProto applicationAttemptId) {
|
||||
return new ApplicationAttemptIdPBImpl(applicationAttemptId);
|
||||
}
|
||||
|
||||
private ApplicationAttemptIdProto convertToProtoFormat(
|
||||
ApplicationAttemptId applicationAttemptId) {
|
||||
return ((ApplicationAttemptIdPBImpl) applicationAttemptId).getProto();
|
||||
}
|
||||
|
||||
private FinalApplicationStatus convertFromProtoFormat(
|
||||
FinalApplicationStatusProto finalApplicationStatus) {
|
||||
return ProtoUtils.convertFromProtoFormat(finalApplicationStatus);
|
||||
}
|
||||
|
||||
private FinalApplicationStatusProto convertToProtoFormat(
|
||||
FinalApplicationStatus finalApplicationStatus) {
|
||||
return ProtoUtils.convertToProtoFormat(finalApplicationStatus);
|
||||
}
|
||||
|
||||
private YarnApplicationAttemptStateProto convertToProtoFormat(
|
||||
YarnApplicationAttemptState state) {
|
||||
return ProtoUtils.convertToProtoFormat(state);
|
||||
}
|
||||
|
||||
private YarnApplicationAttemptState convertFromProtoFormat(
|
||||
YarnApplicationAttemptStateProto yarnApplicationAttemptState) {
|
||||
return ProtoUtils.convertFromProtoFormat(yarnApplicationAttemptState);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,208 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProto;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationAttemptStartDataProtoOrBuilder;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
public class ApplicationAttemptStartDataPBImpl extends
|
||||
ApplicationAttemptStartData {
|
||||
|
||||
ApplicationAttemptStartDataProto proto = ApplicationAttemptStartDataProto
|
||||
.getDefaultInstance();
|
||||
ApplicationAttemptStartDataProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
public ApplicationAttemptStartDataPBImpl() {
|
||||
builder = ApplicationAttemptStartDataProto.newBuilder();
|
||||
}
|
||||
|
||||
public ApplicationAttemptStartDataPBImpl(
|
||||
ApplicationAttemptStartDataProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private ApplicationAttemptId applicationAttemptId;
|
||||
private ContainerId masterContainerId;
|
||||
|
||||
@Override
|
||||
public ApplicationAttemptId getApplicationAttemptId() {
|
||||
if (this.applicationAttemptId != null) {
|
||||
return this.applicationAttemptId;
|
||||
}
|
||||
ApplicationAttemptStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasApplicationAttemptId()) {
|
||||
return null;
|
||||
}
|
||||
this.applicationAttemptId =
|
||||
convertFromProtoFormat(p.getApplicationAttemptId());
|
||||
return this.applicationAttemptId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void
|
||||
setApplicationAttemptId(ApplicationAttemptId applicationAttemptId) {
|
||||
maybeInitBuilder();
|
||||
if (applicationAttemptId == null) {
|
||||
builder.clearApplicationAttemptId();
|
||||
}
|
||||
this.applicationAttemptId = applicationAttemptId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHost() {
|
||||
ApplicationAttemptStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasHost()) {
|
||||
return null;
|
||||
}
|
||||
return p.getHost();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setHost(String host) {
|
||||
maybeInitBuilder();
|
||||
if (host == null) {
|
||||
builder.clearHost();
|
||||
return;
|
||||
}
|
||||
builder.setHost(host);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getRPCPort() {
|
||||
ApplicationAttemptStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getRpcPort();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setRPCPort(int rpcPort) {
|
||||
maybeInitBuilder();
|
||||
builder.setRpcPort(rpcPort);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerId getMasterContainerId() {
|
||||
if (this.masterContainerId != null) {
|
||||
return this.masterContainerId;
|
||||
}
|
||||
ApplicationAttemptStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasApplicationAttemptId()) {
|
||||
return null;
|
||||
}
|
||||
this.masterContainerId = convertFromProtoFormat(p.getMasterContainerId());
|
||||
return this.masterContainerId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setMasterContainerId(ContainerId masterContainerId) {
|
||||
maybeInitBuilder();
|
||||
if (masterContainerId == null) {
|
||||
builder.clearMasterContainerId();
|
||||
}
|
||||
this.masterContainerId = masterContainerId;
|
||||
}
|
||||
|
||||
public ApplicationAttemptStartDataProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null)
|
||||
return false;
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.applicationAttemptId != null
|
||||
&& !((ApplicationAttemptIdPBImpl) this.applicationAttemptId).getProto()
|
||||
.equals(builder.getApplicationAttemptId())) {
|
||||
builder
|
||||
.setApplicationAttemptId(convertToProtoFormat(this.applicationAttemptId));
|
||||
}
|
||||
if (this.masterContainerId != null
|
||||
&& !((ContainerIdPBImpl) this.masterContainerId).getProto().equals(
|
||||
builder.getMasterContainerId())) {
|
||||
builder
|
||||
.setMasterContainerId(convertToProtoFormat(this.masterContainerId));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = ApplicationAttemptStartDataProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
private ApplicationAttemptIdPBImpl convertFromProtoFormat(
|
||||
ApplicationAttemptIdProto applicationAttemptId) {
|
||||
return new ApplicationAttemptIdPBImpl(applicationAttemptId);
|
||||
}
|
||||
|
||||
private ApplicationAttemptIdProto convertToProtoFormat(
|
||||
ApplicationAttemptId applicationAttemptId) {
|
||||
return ((ApplicationAttemptIdPBImpl) applicationAttemptId).getProto();
|
||||
}
|
||||
|
||||
private ContainerIdPBImpl
|
||||
convertFromProtoFormat(ContainerIdProto containerId) {
|
||||
return new ContainerIdPBImpl(containerId);
|
||||
}
|
||||
|
||||
private ContainerIdProto convertToProtoFormat(ContainerId masterContainerId) {
|
||||
return ((ContainerIdPBImpl) masterContainerId).getProto();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,226 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProto;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationFinishDataProtoOrBuilder;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
public class ApplicationFinishDataPBImpl extends ApplicationFinishData {
|
||||
|
||||
ApplicationFinishDataProto proto = ApplicationFinishDataProto
|
||||
.getDefaultInstance();
|
||||
ApplicationFinishDataProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
private ApplicationId applicationId;
|
||||
|
||||
public ApplicationFinishDataPBImpl() {
|
||||
builder = ApplicationFinishDataProto.newBuilder();
|
||||
}
|
||||
|
||||
public ApplicationFinishDataPBImpl(ApplicationFinishDataProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationId getApplicationId() {
|
||||
if (this.applicationId != null) {
|
||||
return this.applicationId;
|
||||
}
|
||||
ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasApplicationId()) {
|
||||
return null;
|
||||
}
|
||||
this.applicationId = convertFromProtoFormat(p.getApplicationId());
|
||||
return this.applicationId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setApplicationId(ApplicationId applicationId) {
|
||||
maybeInitBuilder();
|
||||
if (applicationId == null) {
|
||||
builder.clearApplicationId();
|
||||
}
|
||||
this.applicationId = applicationId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getFinishTime() {
|
||||
ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getFinishTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFinishTime(long finishTime) {
|
||||
maybeInitBuilder();
|
||||
builder.setFinishTime(finishTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDiagnosticsInfo() {
|
||||
ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasDiagnosticsInfo()) {
|
||||
return null;
|
||||
}
|
||||
return p.getDiagnosticsInfo();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDiagnosticsInfo(String diagnosticsInfo) {
|
||||
maybeInitBuilder();
|
||||
if (diagnosticsInfo == null) {
|
||||
builder.clearDiagnosticsInfo();
|
||||
return;
|
||||
}
|
||||
builder.setDiagnosticsInfo(diagnosticsInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FinalApplicationStatus getFinalApplicationStatus() {
|
||||
ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasFinalApplicationStatus()) {
|
||||
return null;
|
||||
}
|
||||
return convertFromProtoFormat(p.getFinalApplicationStatus());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFinalApplicationStatus(
|
||||
FinalApplicationStatus finalApplicationStatus) {
|
||||
maybeInitBuilder();
|
||||
if (finalApplicationStatus == null) {
|
||||
builder.clearFinalApplicationStatus();
|
||||
return;
|
||||
}
|
||||
builder
|
||||
.setFinalApplicationStatus(convertToProtoFormat(finalApplicationStatus));
|
||||
}
|
||||
|
||||
@Override
|
||||
public YarnApplicationState getYarnApplicationState() {
|
||||
ApplicationFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasYarnApplicationState()) {
|
||||
return null;
|
||||
}
|
||||
return convertFromProtoFormat(p.getYarnApplicationState());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setYarnApplicationState(YarnApplicationState state) {
|
||||
maybeInitBuilder();
|
||||
if (state == null) {
|
||||
builder.clearYarnApplicationState();
|
||||
return;
|
||||
}
|
||||
builder.setYarnApplicationState(convertToProtoFormat(state));
|
||||
}
|
||||
|
||||
public ApplicationFinishDataProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null)
|
||||
return false;
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.applicationId != null
|
||||
&& !((ApplicationIdPBImpl) this.applicationId).getProto().equals(
|
||||
builder.getApplicationId())) {
|
||||
builder.setApplicationId(convertToProtoFormat(this.applicationId));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = ApplicationFinishDataProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
private ApplicationIdProto convertToProtoFormat(ApplicationId applicationId) {
|
||||
return ((ApplicationIdPBImpl) applicationId).getProto();
|
||||
}
|
||||
|
||||
private ApplicationIdPBImpl convertFromProtoFormat(
|
||||
ApplicationIdProto applicationId) {
|
||||
return new ApplicationIdPBImpl(applicationId);
|
||||
}
|
||||
|
||||
private FinalApplicationStatus convertFromProtoFormat(
|
||||
FinalApplicationStatusProto finalApplicationStatus) {
|
||||
return ProtoUtils.convertFromProtoFormat(finalApplicationStatus);
|
||||
}
|
||||
|
||||
private FinalApplicationStatusProto convertToProtoFormat(
|
||||
FinalApplicationStatus finalApplicationStatus) {
|
||||
return ProtoUtils.convertToProtoFormat(finalApplicationStatus);
|
||||
}
|
||||
|
||||
private YarnApplicationStateProto convertToProtoFormat(
|
||||
YarnApplicationState state) {
|
||||
return ProtoUtils.convertToProtoFormat(state);
|
||||
}
|
||||
|
||||
private YarnApplicationState convertFromProtoFormat(
|
||||
YarnApplicationStateProto yarnApplicationState) {
|
||||
return ProtoUtils.convertFromProtoFormat(yarnApplicationState);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,229 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProto;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ApplicationStartDataProtoOrBuilder;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
public class ApplicationStartDataPBImpl extends ApplicationStartData {
|
||||
|
||||
ApplicationStartDataProto proto = ApplicationStartDataProto
|
||||
.getDefaultInstance();
|
||||
ApplicationStartDataProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
private ApplicationId applicationId;
|
||||
|
||||
public ApplicationStartDataPBImpl() {
|
||||
builder = ApplicationStartDataProto.newBuilder();
|
||||
}
|
||||
|
||||
public ApplicationStartDataPBImpl(ApplicationStartDataProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ApplicationId getApplicationId() {
|
||||
if (this.applicationId != null) {
|
||||
return this.applicationId;
|
||||
}
|
||||
ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasApplicationId()) {
|
||||
return null;
|
||||
}
|
||||
this.applicationId = convertFromProtoFormat(p.getApplicationId());
|
||||
return this.applicationId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setApplicationId(ApplicationId applicationId) {
|
||||
maybeInitBuilder();
|
||||
if (applicationId == null) {
|
||||
builder.clearApplicationId();
|
||||
}
|
||||
this.applicationId = applicationId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getApplicationName() {
|
||||
ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasApplicationName()) {
|
||||
return null;
|
||||
}
|
||||
return p.getApplicationName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setApplicationName(String applicationName) {
|
||||
maybeInitBuilder();
|
||||
if (applicationName == null) {
|
||||
builder.clearApplicationName();
|
||||
return;
|
||||
}
|
||||
builder.setApplicationName(applicationName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getApplicationType() {
|
||||
ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasApplicationType()) {
|
||||
return null;
|
||||
}
|
||||
return p.getApplicationType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setApplicationType(String applicationType) {
|
||||
maybeInitBuilder();
|
||||
if (applicationType == null) {
|
||||
builder.clearApplicationType();
|
||||
return;
|
||||
}
|
||||
builder.setApplicationType(applicationType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUser() {
|
||||
ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasUser()) {
|
||||
return null;
|
||||
}
|
||||
return p.getUser();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUser(String user) {
|
||||
maybeInitBuilder();
|
||||
if (user == null) {
|
||||
builder.clearUser();
|
||||
return;
|
||||
}
|
||||
builder.setUser(user);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getQueue() {
|
||||
ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasQueue()) {
|
||||
return null;
|
||||
}
|
||||
return p.getQueue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setQueue(String queue) {
|
||||
maybeInitBuilder();
|
||||
if (queue == null) {
|
||||
builder.clearQueue();
|
||||
return;
|
||||
}
|
||||
builder.setQueue(queue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSubmitTime() {
|
||||
ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getSubmitTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setSubmitTime(long submitTime) {
|
||||
maybeInitBuilder();
|
||||
builder.setSubmitTime(submitTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getStartTime() {
|
||||
ApplicationStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getStartTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setStartTime(long startTime) {
|
||||
maybeInitBuilder();
|
||||
builder.setStartTime(startTime);
|
||||
}
|
||||
|
||||
public ApplicationStartDataProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null)
|
||||
return false;
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.applicationId != null
|
||||
&& !((ApplicationIdPBImpl) this.applicationId).getProto().equals(
|
||||
builder.getApplicationId())) {
|
||||
builder.setApplicationId(convertToProtoFormat(this.applicationId));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = ApplicationStartDataProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
private ApplicationIdProto convertToProtoFormat(ApplicationId applicationId) {
|
||||
return ((ApplicationIdPBImpl) applicationId).getProto();
|
||||
}
|
||||
|
||||
private ApplicationIdPBImpl convertFromProtoFormat(
|
||||
ApplicationIdProto applicationId) {
|
||||
return new ApplicationIdPBImpl(applicationId);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,223 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerState;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProto;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerFinishDataProtoOrBuilder;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
public class ContainerFinishDataPBImpl extends ContainerFinishData {
|
||||
|
||||
ContainerFinishDataProto proto = ContainerFinishDataProto
|
||||
.getDefaultInstance();
|
||||
ContainerFinishDataProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
private ContainerId containerId;
|
||||
|
||||
public ContainerFinishDataPBImpl() {
|
||||
builder = ContainerFinishDataProto.newBuilder();
|
||||
}
|
||||
|
||||
public ContainerFinishDataPBImpl(ContainerFinishDataProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerId getContainerId() {
|
||||
if (this.containerId != null) {
|
||||
return this.containerId;
|
||||
}
|
||||
ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasContainerId()) {
|
||||
return null;
|
||||
}
|
||||
this.containerId = convertFromProtoFormat(p.getContainerId());
|
||||
return this.containerId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setContainerId(ContainerId containerId) {
|
||||
maybeInitBuilder();
|
||||
if (containerId == null) {
|
||||
builder.clearContainerId();
|
||||
}
|
||||
this.containerId = containerId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getFinishTime() {
|
||||
ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getFinishTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setFinishTime(long finishTime) {
|
||||
maybeInitBuilder();
|
||||
builder.setFinishTime(finishTime);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDiagnosticsInfo() {
|
||||
ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasDiagnosticsInfo()) {
|
||||
return null;
|
||||
}
|
||||
return p.getDiagnosticsInfo();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDiagnosticsInfo(String diagnosticsInfo) {
|
||||
maybeInitBuilder();
|
||||
if (diagnosticsInfo == null) {
|
||||
builder.clearDiagnosticsInfo();
|
||||
return;
|
||||
}
|
||||
builder.setDiagnosticsInfo(diagnosticsInfo);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getLogURL() {
|
||||
ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasLogUrl()) {
|
||||
return null;
|
||||
}
|
||||
return p.getLogUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setLogURL(String logURL) {
|
||||
maybeInitBuilder();
|
||||
if (logURL == null) {
|
||||
builder.clearLogUrl();
|
||||
return;
|
||||
}
|
||||
builder.setLogUrl(logURL);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getContainerExitStatus() {
|
||||
ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getContainerExitStatus();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerState getContainerState() {
|
||||
ContainerFinishDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasContainerState()) {
|
||||
return null;
|
||||
}
|
||||
return convertFromProtoFormat(p.getContainerState());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setContainerState(ContainerState state) {
|
||||
maybeInitBuilder();
|
||||
if (state == null) {
|
||||
builder.clearContainerState();
|
||||
return;
|
||||
}
|
||||
builder.setContainerState(convertToProtoFormat(state));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setContainerExitStatus(int containerExitStatus) {
|
||||
maybeInitBuilder();
|
||||
builder.setContainerExitStatus(containerExitStatus);
|
||||
}
|
||||
|
||||
public ContainerFinishDataProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null)
|
||||
return false;
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.containerId != null
|
||||
&& !((ContainerIdPBImpl) this.containerId).getProto().equals(
|
||||
builder.getContainerId())) {
|
||||
builder.setContainerId(convertToProtoFormat(this.containerId));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = ContainerFinishDataProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
private ContainerIdProto convertToProtoFormat(ContainerId containerId) {
|
||||
return ((ContainerIdPBImpl) containerId).getProto();
|
||||
}
|
||||
|
||||
private ContainerIdPBImpl
|
||||
convertFromProtoFormat(ContainerIdProto containerId) {
|
||||
return new ContainerIdPBImpl(containerId);
|
||||
}
|
||||
|
||||
private ContainerStateProto convertToProtoFormat(ContainerState state) {
|
||||
return ProtoUtils.convertToProtoFormat(state);
|
||||
}
|
||||
|
||||
private ContainerState convertFromProtoFormat(
|
||||
ContainerStateProto containerState) {
|
||||
return ProtoUtils.convertFromProtoFormat(containerState);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,258 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.Resource;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerIdPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.PriorityPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProto;
|
||||
import org.apache.hadoop.yarn.proto.ApplicationHistoryServerProtos.ContainerStartDataProtoOrBuilder;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
public class ContainerStartDataPBImpl extends ContainerStartData {
|
||||
|
||||
ContainerStartDataProto proto = ContainerStartDataProto.getDefaultInstance();
|
||||
ContainerStartDataProto.Builder builder = null;
|
||||
boolean viaProto = false;
|
||||
|
||||
private ContainerId containerId;
|
||||
private Resource resource;
|
||||
private NodeId nodeId;
|
||||
private Priority priority;
|
||||
|
||||
public ContainerStartDataPBImpl() {
|
||||
builder = ContainerStartDataProto.newBuilder();
|
||||
}
|
||||
|
||||
public ContainerStartDataPBImpl(ContainerStartDataProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ContainerId getContainerId() {
|
||||
if (this.containerId != null) {
|
||||
return this.containerId;
|
||||
}
|
||||
ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasContainerId()) {
|
||||
return null;
|
||||
}
|
||||
this.containerId = convertFromProtoFormat(p.getContainerId());
|
||||
return this.containerId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setContainerId(ContainerId containerId) {
|
||||
maybeInitBuilder();
|
||||
if (containerId == null) {
|
||||
builder.clearContainerId();
|
||||
}
|
||||
this.containerId = containerId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Resource getAllocatedResource() {
|
||||
if (this.resource != null) {
|
||||
return this.resource;
|
||||
}
|
||||
ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasAllocatedResource()) {
|
||||
return null;
|
||||
}
|
||||
this.resource = convertFromProtoFormat(p.getAllocatedResource());
|
||||
return this.resource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAllocatedResource(Resource resource) {
|
||||
maybeInitBuilder();
|
||||
if (resource == null) {
|
||||
builder.clearAllocatedResource();
|
||||
}
|
||||
this.resource = resource;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NodeId getAssignedNode() {
|
||||
if (this.nodeId != null) {
|
||||
return this.nodeId;
|
||||
}
|
||||
ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasAssignedNodeId()) {
|
||||
return null;
|
||||
}
|
||||
this.nodeId = convertFromProtoFormat(p.getAssignedNodeId());
|
||||
return this.nodeId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAssignedNode(NodeId nodeId) {
|
||||
maybeInitBuilder();
|
||||
if (nodeId == null) {
|
||||
builder.clearAssignedNodeId();
|
||||
}
|
||||
this.nodeId = nodeId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Priority getPriority() {
|
||||
if (this.priority != null) {
|
||||
return this.priority;
|
||||
}
|
||||
ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasPriority()) {
|
||||
return null;
|
||||
}
|
||||
this.priority = convertFromProtoFormat(p.getPriority());
|
||||
return this.priority;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setPriority(Priority priority) {
|
||||
maybeInitBuilder();
|
||||
if (priority == null) {
|
||||
builder.clearPriority();
|
||||
}
|
||||
this.priority = priority;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getStartTime() {
|
||||
ContainerStartDataProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getStartTime();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setStartTime(long startTime) {
|
||||
maybeInitBuilder();
|
||||
builder.setStartTime(startTime);
|
||||
}
|
||||
|
||||
public ContainerStartDataProto getProto() {
|
||||
mergeLocalToProto();
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getProto().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (other == null)
|
||||
return false;
|
||||
if (other.getClass().isAssignableFrom(this.getClass())) {
|
||||
return this.getProto().equals(this.getClass().cast(other).getProto());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return TextFormat.shortDebugString(getProto());
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.containerId != null
|
||||
&& !((ContainerIdPBImpl) this.containerId).getProto().equals(
|
||||
builder.getContainerId())) {
|
||||
builder.setContainerId(convertToProtoFormat(this.containerId));
|
||||
}
|
||||
if (this.resource != null
|
||||
&& !((ResourcePBImpl) this.resource).getProto().equals(
|
||||
builder.getAllocatedResource())) {
|
||||
builder.setAllocatedResource(convertToProtoFormat(this.resource));
|
||||
}
|
||||
if (this.nodeId != null
|
||||
&& !((NodeIdPBImpl) this.nodeId).getProto().equals(
|
||||
builder.getAssignedNodeId())) {
|
||||
builder.setAssignedNodeId(convertToProtoFormat(this.nodeId));
|
||||
}
|
||||
if (this.priority != null
|
||||
&& !((PriorityPBImpl) this.priority).getProto().equals(
|
||||
builder.getPriority())) {
|
||||
builder.setPriority(convertToProtoFormat(this.priority));
|
||||
}
|
||||
}
|
||||
|
||||
private void mergeLocalToProto() {
|
||||
if (viaProto) {
|
||||
maybeInitBuilder();
|
||||
}
|
||||
mergeLocalToBuilder();
|
||||
proto = builder.build();
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = ContainerStartDataProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
private ContainerIdProto convertToProtoFormat(ContainerId containerId) {
|
||||
return ((ContainerIdPBImpl) containerId).getProto();
|
||||
}
|
||||
|
||||
private ContainerIdPBImpl
|
||||
convertFromProtoFormat(ContainerIdProto containerId) {
|
||||
return new ContainerIdPBImpl(containerId);
|
||||
}
|
||||
|
||||
private ResourceProto convertToProtoFormat(Resource resource) {
|
||||
return ((ResourcePBImpl) resource).getProto();
|
||||
}
|
||||
|
||||
private ResourcePBImpl convertFromProtoFormat(ResourceProto resource) {
|
||||
return new ResourcePBImpl(resource);
|
||||
}
|
||||
|
||||
private NodeIdProto convertToProtoFormat(NodeId nodeId) {
|
||||
return ((NodeIdPBImpl) nodeId).getProto();
|
||||
}
|
||||
|
||||
private NodeIdPBImpl convertFromProtoFormat(NodeIdProto nodeId) {
|
||||
return new NodeIdPBImpl(nodeId);
|
||||
}
|
||||
|
||||
private PriorityProto convertToProtoFormat(Priority priority) {
|
||||
return ((PriorityPBImpl) priority).getProto();
|
||||
}
|
||||
|
||||
private PriorityPBImpl convertFromProtoFormat(PriorityProto priority) {
|
||||
return new PriorityPBImpl(priority);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import org.apache.hadoop.yarn.webapp.Controller;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
|
||||
public class AHSController extends Controller {
|
||||
|
||||
@Inject
|
||||
AHSController(RequestContext ctx) {
|
||||
super(ctx);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void index() {
|
||||
setTitle("Application History");
|
||||
}
|
||||
|
||||
public void app() {
|
||||
render(AppPage.class);
|
||||
}
|
||||
|
||||
public void appattempt() {
|
||||
render(AppAttemptPage.class);
|
||||
}
|
||||
|
||||
public void container() {
|
||||
render(ContainerPage.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Render the logs page.
|
||||
*/
|
||||
public void logs() {
|
||||
render(AHSLogsPage.class);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
|
||||
import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
|
||||
|
||||
import org.apache.hadoop.yarn.webapp.SubView;
|
||||
import org.apache.hadoop.yarn.webapp.log.AggregatedLogsBlock;
|
||||
|
||||
public class AHSLogsPage extends AHSView {
|
||||
/*
|
||||
* (non-Javadoc)
|
||||
*
|
||||
* @see
|
||||
* org.apache.hadoop.yarn.server.applicationhistoryservice.webapp.AHSView#
|
||||
* preHead(org.apache.hadoop .yarn.webapp.hamlet.Hamlet.HTML)
|
||||
*/
|
||||
@Override
|
||||
protected void preHead(Page.HTML<_> html) {
|
||||
String logEntity = $(ENTITY_STRING);
|
||||
if (logEntity == null || logEntity.isEmpty()) {
|
||||
logEntity = $(CONTAINER_ID);
|
||||
}
|
||||
if (logEntity == null || logEntity.isEmpty()) {
|
||||
logEntity = "UNKNOWN";
|
||||
}
|
||||
commonPreHead(html);
|
||||
}
|
||||
|
||||
/**
|
||||
* The content of this page is the AggregatedLogsBlock
|
||||
*
|
||||
* @return AggregatedLogsBlock.class
|
||||
*/
|
||||
@Override
|
||||
protected Class<? extends SubView> content() {
|
||||
return AggregatedLogsBlock.class;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import static org.apache.hadoop.yarn.util.StringHelper.sjoin;
|
||||
import static org.apache.hadoop.yarn.webapp.YarnWebParams.APP_STATE;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION_ID;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
|
||||
|
||||
import org.apache.hadoop.yarn.server.webapp.AppsBlock;
|
||||
import org.apache.hadoop.yarn.webapp.SubView;
|
||||
import org.apache.hadoop.yarn.webapp.view.TwoColumnLayout;
|
||||
|
||||
// Do NOT rename/refactor this to AHSView as it will wreak havoc
|
||||
// on Mac OS HFS
|
||||
public class AHSView extends TwoColumnLayout {
|
||||
static final int MAX_DISPLAY_ROWS = 100; // direct table rendering
|
||||
static final int MAX_FAST_ROWS = 1000; // inline js array
|
||||
|
||||
@Override
|
||||
protected void preHead(Page.HTML<_> html) {
|
||||
commonPreHead(html);
|
||||
set(DATATABLES_ID, "apps");
|
||||
set(initID(DATATABLES, "apps"), appsTableInit());
|
||||
setTableStyles(html, "apps", ".queue {width:6em}", ".ui {width:8em}");
|
||||
|
||||
// Set the correct title.
|
||||
String reqState = $(APP_STATE);
|
||||
reqState = (reqState == null || reqState.isEmpty() ? "All" : reqState);
|
||||
setTitle(sjoin(reqState, "Applications"));
|
||||
}
|
||||
|
||||
protected void commonPreHead(Page.HTML<_> html) {
|
||||
set(ACCORDION_ID, "nav");
|
||||
set(initID(ACCORDION, "nav"), "{autoHeight:false, active:0}");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<? extends SubView> nav() {
|
||||
return NavBlock.class;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<? extends SubView> content() {
|
||||
return AppsBlock.class;
|
||||
}
|
||||
|
||||
private String appsTableInit() {
|
||||
// id, user, name, queue, starttime, finishtime, state, status, progress, ui
|
||||
return tableInit().append(", 'aaData': appsTableData")
|
||||
.append(", bDeferRender: true").append(", bProcessing: true")
|
||||
|
||||
.append("\n, aoColumnDefs: ").append(getAppsTableColumnDefs())
|
||||
|
||||
// Sort by id upon page load
|
||||
.append(", aaSorting: [[0, 'desc']]}").toString();
|
||||
}
|
||||
|
||||
protected String getAppsTableColumnDefs() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
return sb.append("[\n").append("{'sType':'numeric', 'aTargets': [0]")
|
||||
.append(", 'mRender': parseHadoopID }")
|
||||
|
||||
.append("\n, {'sType':'numeric', 'aTargets': [5, 6]")
|
||||
.append(", 'mRender': renderHadoopDate }")
|
||||
|
||||
.append("\n, {'sType':'numeric', bSearchable:false, 'aTargets': [9]")
|
||||
.append(", 'mRender': parseHadoopProgress }]").toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import static org.apache.hadoop.yarn.util.StringHelper.pajoin;
|
||||
|
||||
import org.apache.hadoop.yarn.server.api.ApplicationContext;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager;
|
||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||
import org.apache.hadoop.yarn.webapp.YarnWebParams;
|
||||
|
||||
public class AHSWebApp extends WebApp implements YarnWebParams {
|
||||
|
||||
private final ApplicationHistoryManager applicationHistoryManager;
|
||||
|
||||
public AHSWebApp(ApplicationHistoryManager applicationHistoryManager) {
|
||||
this.applicationHistoryManager = applicationHistoryManager;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setup() {
|
||||
bind(JAXBContextResolver.class);
|
||||
bind(AHSWebServices.class);
|
||||
bind(GenericExceptionHandler.class);
|
||||
bind(ApplicationContext.class).toInstance(applicationHistoryManager);
|
||||
route("/", AHSController.class);
|
||||
route(pajoin("/apps", APP_STATE), AHSController.class);
|
||||
route(pajoin("/app", APPLICATION_ID), AHSController.class, "app");
|
||||
route(pajoin("/appattempt", APPLICATION_ATTEMPT_ID), AHSController.class,
|
||||
"appattempt");
|
||||
route(pajoin("/container", CONTAINER_ID), AHSController.class, "container");
|
||||
route(
|
||||
pajoin("/logs", NM_NODENAME, CONTAINER_ID, ENTITY_STRING, APP_OWNER,
|
||||
CONTAINER_LOG_TYPE), AHSController.class, "logs");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,162 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
import javax.ws.rs.GET;
|
||||
import javax.ws.rs.Path;
|
||||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.server.api.ApplicationContext;
|
||||
import org.apache.hadoop.yarn.server.webapp.WebServices;
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptInfo;
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptsInfo;
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.AppInfo;
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.AppsInfo;
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.ContainerInfo;
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.ContainersInfo;
|
||||
import org.apache.hadoop.yarn.webapp.BadRequestException;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import com.google.inject.Singleton;
|
||||
|
||||
@Singleton
|
||||
@Path("/ws/v1/applicationhistory")
|
||||
public class AHSWebServices extends WebServices {
|
||||
|
||||
@Inject
|
||||
public AHSWebServices(ApplicationContext appContext) {
|
||||
super(appContext);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||
public AppsInfo get(@Context HttpServletRequest req,
|
||||
@Context HttpServletResponse res) {
|
||||
return getApps(req, res, null, Collections.<String> emptySet(), null, null,
|
||||
null, null, null, null, null, null, Collections.<String> emptySet());
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/apps")
|
||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||
@Override
|
||||
public AppsInfo getApps(@Context HttpServletRequest req,
|
||||
@Context HttpServletResponse res, @QueryParam("state") String stateQuery,
|
||||
@QueryParam("states") Set<String> statesQuery,
|
||||
@QueryParam("finalStatus") String finalStatusQuery,
|
||||
@QueryParam("user") String userQuery,
|
||||
@QueryParam("queue") String queueQuery,
|
||||
@QueryParam("limit") String count,
|
||||
@QueryParam("startedTimeBegin") String startedBegin,
|
||||
@QueryParam("startedTimeEnd") String startedEnd,
|
||||
@QueryParam("finishedTimeBegin") String finishBegin,
|
||||
@QueryParam("finishedTimeEnd") String finishEnd,
|
||||
@QueryParam("applicationTypes") Set<String> applicationTypes) {
|
||||
init(res);
|
||||
validateStates(stateQuery, statesQuery);
|
||||
return super.getApps(req, res, stateQuery, statesQuery, finalStatusQuery,
|
||||
userQuery, queueQuery, count, startedBegin, startedEnd, finishBegin,
|
||||
finishEnd, applicationTypes);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/apps/{appid}")
|
||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||
@Override
|
||||
public AppInfo getApp(@Context HttpServletRequest req,
|
||||
@Context HttpServletResponse res, @PathParam("appid") String appId) {
|
||||
init(res);
|
||||
return super.getApp(req, res, appId);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/apps/{appid}/appattempts")
|
||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||
@Override
|
||||
public AppAttemptsInfo getAppAttempts(@Context HttpServletRequest req,
|
||||
@Context HttpServletResponse res, @PathParam("appid") String appId) {
|
||||
init(res);
|
||||
return super.getAppAttempts(req, res, appId);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/apps/{appid}/appattempts/{appattemptid}")
|
||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||
@Override
|
||||
public AppAttemptInfo getAppAttempt(@Context HttpServletRequest req,
|
||||
@Context HttpServletResponse res, @PathParam("appid") String appId,
|
||||
@PathParam("appattemptid") String appAttemptId) {
|
||||
init(res);
|
||||
return super.getAppAttempt(req, res, appId, appAttemptId);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/apps/{appid}/appattempts/{appattemptid}/containers")
|
||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||
@Override
|
||||
public ContainersInfo getContainers(@Context HttpServletRequest req,
|
||||
@Context HttpServletResponse res, @PathParam("appid") String appId,
|
||||
@PathParam("appattemptid") String appAttemptId) {
|
||||
init(res);
|
||||
return super.getContainers(req, res, appId, appAttemptId);
|
||||
}
|
||||
|
||||
@GET
|
||||
@Path("/apps/{appid}/appattempts/{appattemptid}/containers/{containerid}")
|
||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||
@Override
|
||||
public ContainerInfo getContainer(@Context HttpServletRequest req,
|
||||
@Context HttpServletResponse res, @PathParam("appid") String appId,
|
||||
@PathParam("appattemptid") String appAttemptId,
|
||||
@PathParam("containerid") String containerId) {
|
||||
init(res);
|
||||
return super.getContainer(req, res, appId, appAttemptId, containerId);
|
||||
}
|
||||
|
||||
private static void
|
||||
validateStates(String stateQuery, Set<String> statesQuery) {
|
||||
// stateQuery is deprecated.
|
||||
if (stateQuery != null && !stateQuery.isEmpty()) {
|
||||
statesQuery.add(stateQuery);
|
||||
}
|
||||
Set<String> appStates = parseQueries(statesQuery, true);
|
||||
for (String appState : appStates) {
|
||||
switch (YarnApplicationState.valueOf(appState.toUpperCase())) {
|
||||
case FINISHED:
|
||||
case FAILED:
|
||||
case KILLED:
|
||||
continue;
|
||||
default:
|
||||
throw new BadRequestException("Invalid application-state " + appState
|
||||
+ " specified. It should be a final state");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
|
||||
|
||||
import org.apache.hadoop.yarn.server.webapp.AppAttemptBlock;
|
||||
import org.apache.hadoop.yarn.webapp.SubView;
|
||||
import org.apache.hadoop.yarn.webapp.YarnWebParams;
|
||||
|
||||
public class AppAttemptPage extends AHSView {
|
||||
|
||||
@Override
|
||||
protected void preHead(Page.HTML<_> html) {
|
||||
commonPreHead(html);
|
||||
|
||||
String appAttemptId = $(YarnWebParams.APPLICATION_ATTEMPT_ID);
|
||||
set(
|
||||
TITLE,
|
||||
appAttemptId.isEmpty() ? "Bad request: missing application attempt ID"
|
||||
: join("Application Attempt ",
|
||||
$(YarnWebParams.APPLICATION_ATTEMPT_ID)));
|
||||
|
||||
set(DATATABLES_ID, "containers");
|
||||
set(initID(DATATABLES, "containers"), containersTableInit());
|
||||
setTableStyles(html, "containers", ".queue {width:6em}", ".ui {width:8em}");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<? extends SubView> content() {
|
||||
return AppAttemptBlock.class;
|
||||
}
|
||||
|
||||
private String containersTableInit() {
|
||||
return tableInit().append(", 'aaData': containersTableData")
|
||||
.append(", bDeferRender: true").append(", bProcessing: true")
|
||||
|
||||
.append("\n, aoColumnDefs: ").append(getContainersTableColumnDefs())
|
||||
|
||||
// Sort by id upon page load
|
||||
.append(", aaSorting: [[0, 'desc']]}").toString();
|
||||
}
|
||||
|
||||
protected String getContainersTableColumnDefs() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
return sb.append("[\n").append("{'sType':'numeric', 'aTargets': [0]")
|
||||
.append(", 'mRender': parseHadoopID }]").toString();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
|
||||
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
|
||||
|
||||
import org.apache.hadoop.yarn.server.webapp.AppBlock;
|
||||
import org.apache.hadoop.yarn.webapp.SubView;
|
||||
import org.apache.hadoop.yarn.webapp.YarnWebParams;
|
||||
|
||||
public class AppPage extends AHSView {
|
||||
|
||||
@Override
|
||||
protected void preHead(Page.HTML<_> html) {
|
||||
commonPreHead(html);
|
||||
|
||||
String appId = $(YarnWebParams.APPLICATION_ID);
|
||||
set(
|
||||
TITLE,
|
||||
appId.isEmpty() ? "Bad request: missing application ID" : join(
|
||||
"Application ", $(YarnWebParams.APPLICATION_ID)));
|
||||
|
||||
set(DATATABLES_ID, "attempts");
|
||||
set(initID(DATATABLES, "attempts"), attemptsTableInit());
|
||||
setTableStyles(html, "attempts", ".queue {width:6em}", ".ui {width:8em}");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<? extends SubView> content() {
|
||||
return AppBlock.class;
|
||||
}
|
||||
|
||||
private String attemptsTableInit() {
|
||||
return tableInit().append(", 'aaData': attemptsTableData")
|
||||
.append(", bDeferRender: true").append(", bProcessing: true")
|
||||
|
||||
.append("\n, aoColumnDefs: ").append(getAttemptsTableColumnDefs())
|
||||
|
||||
// Sort by id upon page load
|
||||
.append(", aaSorting: [[0, 'desc']]}").toString();
|
||||
}
|
||||
|
||||
protected String getAttemptsTableColumnDefs() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
return sb.append("[\n").append("{'sType':'numeric', 'aTargets': [0]")
|
||||
.append(", 'mRender': parseHadoopID }")
|
||||
|
||||
.append("\n, {'sType':'numeric', 'aTargets': [1]")
|
||||
.append(", 'mRender': renderHadoopDate }]").toString();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
||||
|
||||
import org.apache.hadoop.yarn.server.webapp.ContainerBlock;
|
||||
import org.apache.hadoop.yarn.webapp.SubView;
|
||||
import org.apache.hadoop.yarn.webapp.YarnWebParams;
|
||||
|
||||
public class ContainerPage extends AHSView {
|
||||
|
||||
@Override
|
||||
protected void preHead(Page.HTML<_> html) {
|
||||
commonPreHead(html);
|
||||
|
||||
String containerId = $(YarnWebParams.CONTAINER_ID);
|
||||
set(TITLE, containerId.isEmpty() ? "Bad request: missing container ID"
|
||||
: join("Container ", $(YarnWebParams.CONTAINER_ID)));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Class<? extends SubView> content() {
|
||||
return ContainerBlock.class;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.ws.rs.ext.ContextResolver;
|
||||
import javax.ws.rs.ext.Provider;
|
||||
import javax.xml.bind.JAXBContext;
|
||||
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptInfo;
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.AppAttemptsInfo;
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.AppInfo;
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.AppsInfo;
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.ContainerInfo;
|
||||
import org.apache.hadoop.yarn.server.webapp.dao.ContainersInfo;
|
||||
|
||||
import com.google.inject.Singleton;
|
||||
import com.sun.jersey.api.json.JSONConfiguration;
|
||||
import com.sun.jersey.api.json.JSONJAXBContext;
|
||||
|
||||
@Singleton
|
||||
@Provider
|
||||
@SuppressWarnings("rawtypes")
|
||||
public class JAXBContextResolver implements ContextResolver<JAXBContext> {
|
||||
|
||||
private JAXBContext context;
|
||||
private final Set<Class> types;
|
||||
|
||||
// you have to specify all the dao classes here
|
||||
private final Class[] cTypes = { AppInfo.class, AppsInfo.class,
|
||||
AppAttemptInfo.class, AppAttemptsInfo.class, ContainerInfo.class,
|
||||
ContainersInfo.class };
|
||||
|
||||
public JAXBContextResolver() throws Exception {
|
||||
this.types = new HashSet<Class>(Arrays.asList(cTypes));
|
||||
this.context =
|
||||
new JSONJAXBContext(JSONConfiguration.natural().rootUnwrapping(false)
|
||||
.build(), cTypes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public JAXBContext getContext(Class<?> objectType) {
|
||||
return (types.contains(objectType)) ? context : null;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,51 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
|
||||
|
||||
public class NavBlock extends HtmlBlock {
|
||||
|
||||
@Override
|
||||
public void render(Block html) {
|
||||
html.
|
||||
div("#nav").
|
||||
h3("Application History").
|
||||
ul().
|
||||
li().a(url("apps"), "Applications").
|
||||
ul().
|
||||
li().a(url("apps",
|
||||
YarnApplicationState.FINISHED.toString()),
|
||||
YarnApplicationState.FINISHED.toString()).
|
||||
_().
|
||||
li().a(url("apps",
|
||||
YarnApplicationState.FAILED.toString()),
|
||||
YarnApplicationState.FAILED.toString()).
|
||||
_().
|
||||
li().a(url("apps",
|
||||
YarnApplicationState.KILLED.toString()),
|
||||
YarnApplicationState.KILLED.toString()).
|
||||
_().
|
||||
_().
|
||||
_().
|
||||
_().
|
||||
_();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerState;
|
||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.Resource;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptStartData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationStartData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerFinishData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerStartData;
|
||||
|
||||
public class ApplicationHistoryStoreTestUtils {
|
||||
|
||||
protected ApplicationHistoryStore store;
|
||||
|
||||
protected void writeApplicationStartData(ApplicationId appId)
|
||||
throws IOException {
|
||||
store.applicationStarted(ApplicationStartData.newInstance(appId,
|
||||
appId.toString(), "test type", "test queue", "test user", 0, 0));
|
||||
}
|
||||
|
||||
protected void writeApplicationFinishData(ApplicationId appId)
|
||||
throws IOException {
|
||||
store.applicationFinished(ApplicationFinishData.newInstance(appId, 0,
|
||||
appId.toString(), FinalApplicationStatus.UNDEFINED,
|
||||
YarnApplicationState.FINISHED));
|
||||
}
|
||||
|
||||
protected void writeApplicationAttemptStartData(
|
||||
ApplicationAttemptId appAttemptId) throws IOException {
|
||||
store.applicationAttemptStarted(ApplicationAttemptStartData.newInstance(
|
||||
appAttemptId, appAttemptId.toString(), 0,
|
||||
ContainerId.newInstance(appAttemptId, 1)));
|
||||
}
|
||||
|
||||
protected void writeApplicationAttemptFinishData(
|
||||
ApplicationAttemptId appAttemptId) throws IOException {
|
||||
store.applicationAttemptFinished(ApplicationAttemptFinishData.newInstance(
|
||||
appAttemptId, appAttemptId.toString(), "test tracking url",
|
||||
FinalApplicationStatus.UNDEFINED, YarnApplicationAttemptState.FINISHED));
|
||||
}
|
||||
|
||||
protected void writeContainerStartData(ContainerId containerId)
|
||||
throws IOException {
|
||||
store.containerStarted(ContainerStartData.newInstance(containerId,
|
||||
Resource.newInstance(0, 0), NodeId.newInstance("localhost", 0),
|
||||
Priority.newInstance(containerId.getId()), 0));
|
||||
}
|
||||
|
||||
protected void writeContainerFinishData(ContainerId containerId)
|
||||
throws IOException {
|
||||
store.containerFinished(ContainerFinishData.newInstance(containerId, 0,
|
||||
containerId.toString(), "http://localhost:0/log", 0,
|
||||
ContainerState.COMPLETE));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,195 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestApplicationHistoryClientService extends
|
||||
ApplicationHistoryStoreTestUtils {
|
||||
|
||||
ApplicationHistoryServer historyServer = null;
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
historyServer = new ApplicationHistoryServer();
|
||||
Configuration config = new YarnConfiguration();
|
||||
config.setClass(YarnConfiguration.AHS_STORE,
|
||||
MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class);
|
||||
historyServer.init(config);
|
||||
historyServer.start();
|
||||
store =
|
||||
((ApplicationHistoryManagerImpl) historyServer.getApplicationHistory())
|
||||
.getHistoryStore();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
historyServer.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testApplicationReport() throws IOException, YarnException {
|
||||
ApplicationId appId = null;
|
||||
appId = ApplicationId.newInstance(0, 1);
|
||||
writeApplicationStartData(appId);
|
||||
writeApplicationFinishData(appId);
|
||||
GetApplicationReportRequest request =
|
||||
GetApplicationReportRequest.newInstance(appId);
|
||||
GetApplicationReportResponse response =
|
||||
historyServer.getClientService().getClientHandler()
|
||||
.getApplicationReport(request);
|
||||
ApplicationReport appReport = response.getApplicationReport();
|
||||
Assert.assertNotNull(appReport);
|
||||
Assert.assertEquals("application_0_0001", appReport.getApplicationId()
|
||||
.toString());
|
||||
Assert.assertEquals("test type", appReport.getApplicationType().toString());
|
||||
Assert.assertEquals("test queue", appReport.getQueue().toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testApplications() throws IOException, YarnException {
|
||||
ApplicationId appId = null;
|
||||
appId = ApplicationId.newInstance(0, 1);
|
||||
writeApplicationStartData(appId);
|
||||
writeApplicationFinishData(appId);
|
||||
ApplicationId appId1 = ApplicationId.newInstance(0, 2);
|
||||
writeApplicationStartData(appId1);
|
||||
writeApplicationFinishData(appId1);
|
||||
GetApplicationsRequest request = GetApplicationsRequest.newInstance();
|
||||
GetApplicationsResponse response =
|
||||
historyServer.getClientService().getClientHandler()
|
||||
.getApplications(request);
|
||||
List<ApplicationReport> appReport = response.getApplicationList();
|
||||
Assert.assertNotNull(appReport);
|
||||
Assert.assertEquals(appId, appReport.get(0).getApplicationId());
|
||||
Assert.assertEquals(appId1, appReport.get(1).getApplicationId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testApplicationAttemptReport() throws IOException, YarnException {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
writeApplicationAttemptStartData(appAttemptId);
|
||||
writeApplicationAttemptFinishData(appAttemptId);
|
||||
GetApplicationAttemptReportRequest request =
|
||||
GetApplicationAttemptReportRequest.newInstance(appAttemptId);
|
||||
GetApplicationAttemptReportResponse response =
|
||||
historyServer.getClientService().getClientHandler()
|
||||
.getApplicationAttemptReport(request);
|
||||
ApplicationAttemptReport attemptReport =
|
||||
response.getApplicationAttemptReport();
|
||||
Assert.assertNotNull(attemptReport);
|
||||
Assert.assertEquals("appattempt_0_0001_000001", attemptReport
|
||||
.getApplicationAttemptId().toString());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testApplicationAttempts() throws IOException, YarnException {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
ApplicationAttemptId appAttemptId1 =
|
||||
ApplicationAttemptId.newInstance(appId, 2);
|
||||
writeApplicationAttemptStartData(appAttemptId);
|
||||
writeApplicationAttemptFinishData(appAttemptId);
|
||||
writeApplicationAttemptStartData(appAttemptId1);
|
||||
writeApplicationAttemptFinishData(appAttemptId1);
|
||||
GetApplicationAttemptsRequest request =
|
||||
GetApplicationAttemptsRequest.newInstance(appId);
|
||||
GetApplicationAttemptsResponse response =
|
||||
historyServer.getClientService().getClientHandler()
|
||||
.getApplicationAttempts(request);
|
||||
List<ApplicationAttemptReport> attemptReports =
|
||||
response.getApplicationAttemptList();
|
||||
Assert.assertNotNull(attemptReports);
|
||||
Assert.assertEquals(appAttemptId, attemptReports.get(0)
|
||||
.getApplicationAttemptId());
|
||||
Assert.assertEquals(appAttemptId1, attemptReports.get(1)
|
||||
.getApplicationAttemptId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testContainerReport() throws IOException, YarnException {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
|
||||
writeContainerStartData(containerId);
|
||||
writeContainerFinishData(containerId);
|
||||
GetContainerReportRequest request =
|
||||
GetContainerReportRequest.newInstance(containerId);
|
||||
GetContainerReportResponse response =
|
||||
historyServer.getClientService().getClientHandler()
|
||||
.getContainerReport(request);
|
||||
ContainerReport container = response.getContainerReport();
|
||||
Assert.assertNotNull(container);
|
||||
Assert.assertEquals(containerId, container.getContainerId());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testContainers() throws IOException, YarnException {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
|
||||
ContainerId containerId1 = ContainerId.newInstance(appAttemptId, 2);
|
||||
writeContainerStartData(containerId);
|
||||
writeContainerFinishData(containerId);
|
||||
writeContainerStartData(containerId1);
|
||||
writeContainerFinishData(containerId1);
|
||||
GetContainersRequest request =
|
||||
GetContainersRequest.newInstance(appAttemptId);
|
||||
GetContainersResponse response =
|
||||
historyServer.getClientService().getClientHandler()
|
||||
.getContainers(request);
|
||||
List<ContainerReport> containers = response.getContainerList();
|
||||
Assert.assertNotNull(containers);
|
||||
Assert.assertEquals(containerId, containers.get(1).getContainerId());
|
||||
Assert.assertEquals(containerId1, containers.get(0).getContainerId());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,74 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestApplicationHistoryManagerImpl extends
|
||||
ApplicationHistoryStoreTestUtils {
|
||||
ApplicationHistoryManagerImpl applicationHistoryManagerImpl = null;
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
Configuration config = new Configuration();
|
||||
config.setClass(YarnConfiguration.AHS_STORE,
|
||||
MemoryApplicationHistoryStore.class, ApplicationHistoryStore.class);
|
||||
applicationHistoryManagerImpl = new ApplicationHistoryManagerImpl();
|
||||
applicationHistoryManagerImpl.init(config);
|
||||
applicationHistoryManagerImpl.start();
|
||||
store = applicationHistoryManagerImpl.getHistoryStore();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
applicationHistoryManagerImpl.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testApplicationReport() throws IOException, YarnException {
|
||||
ApplicationId appId = null;
|
||||
appId = ApplicationId.newInstance(0, 1);
|
||||
writeApplicationStartData(appId);
|
||||
writeApplicationFinishData(appId);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
writeApplicationAttemptStartData(appAttemptId);
|
||||
writeApplicationAttemptFinishData(appAttemptId);
|
||||
ApplicationReport appReport =
|
||||
applicationHistoryManagerImpl.getApplication(appId);
|
||||
Assert.assertNotNull(appReport);
|
||||
Assert.assertEquals(appId, appReport.getApplicationId());
|
||||
Assert.assertEquals(appAttemptId,
|
||||
appReport.getCurrentApplicationAttemptId());
|
||||
Assert.assertEquals(appAttemptId.toString(), appReport.getHost());
|
||||
Assert.assertEquals("test type", appReport.getApplicationType().toString());
|
||||
Assert.assertEquals("test queue", appReport.getQueue().toString());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.service.Service.STATE;
|
||||
import org.apache.hadoop.util.ExitUtil;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestApplicationHistoryServer {
|
||||
|
||||
ApplicationHistoryServer historyServer = null;
|
||||
|
||||
// simple test init/start/stop ApplicationHistoryServer. Status should change.
|
||||
@Test(timeout = 50000)
|
||||
public void testStartStopServer() throws Exception {
|
||||
historyServer = new ApplicationHistoryServer();
|
||||
Configuration config = new YarnConfiguration();
|
||||
historyServer.init(config);
|
||||
assertEquals(STATE.INITED, historyServer.getServiceState());
|
||||
assertEquals(2, historyServer.getServices().size());
|
||||
ApplicationHistoryClientService historyService =
|
||||
historyServer.getClientService();
|
||||
assertNotNull(historyServer.getClientService());
|
||||
assertEquals(STATE.INITED, historyService.getServiceState());
|
||||
|
||||
historyServer.start();
|
||||
assertEquals(STATE.STARTED, historyServer.getServiceState());
|
||||
assertEquals(STATE.STARTED, historyService.getServiceState());
|
||||
historyServer.stop();
|
||||
assertEquals(STATE.STOPPED, historyServer.getServiceState());
|
||||
}
|
||||
|
||||
// test launch method
|
||||
@Test(timeout = 60000)
|
||||
public void testLaunch() throws Exception {
|
||||
|
||||
ExitUtil.disableSystemExit();
|
||||
try {
|
||||
historyServer =
|
||||
ApplicationHistoryServer.launchAppHistoryServer(new String[0]);
|
||||
} catch (ExitUtil.ExitException e) {
|
||||
assertEquals(0, e.status);
|
||||
ExitUtil.resetFirstExitException();
|
||||
fail();
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void stop() {
|
||||
if (historyServer != null) {
|
||||
historyServer.stop();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,196 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.RawLocalFileSystem;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestFileSystemApplicationHistoryStore extends
|
||||
ApplicationHistoryStoreTestUtils {
|
||||
|
||||
private FileSystem fs;
|
||||
private Path fsWorkingPath;
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
fs = new RawLocalFileSystem();
|
||||
Configuration conf = new Configuration();
|
||||
fs.initialize(new URI("/"), conf);
|
||||
fsWorkingPath = new Path("Test");
|
||||
fs.delete(fsWorkingPath, true);
|
||||
conf.set(YarnConfiguration.FS_HISTORY_STORE_URI, fsWorkingPath.toString());
|
||||
store = new FileSystemApplicationHistoryStore();
|
||||
store.init(conf);
|
||||
store.start();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
store.stop();
|
||||
fs.delete(fsWorkingPath, true);
|
||||
fs.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadWriteHistoryData() throws IOException {
|
||||
testWriteHistoryData(5);
|
||||
testReadHistoryData(5);
|
||||
}
|
||||
|
||||
private void testWriteHistoryData(int num) throws IOException {
|
||||
// write application history data
|
||||
for (int i = 1; i <= num; ++i) {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, i);
|
||||
writeApplicationStartData(appId);
|
||||
|
||||
// write application attempt history data
|
||||
for (int j = 1; j <= num; ++j) {
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, j);
|
||||
writeApplicationAttemptStartData(appAttemptId);
|
||||
|
||||
// write container history data
|
||||
for (int k = 1; k <= num; ++k) {
|
||||
ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
|
||||
writeContainerStartData(containerId);
|
||||
writeContainerFinishData(containerId);
|
||||
|
||||
writeApplicationAttemptFinishData(appAttemptId);
|
||||
}
|
||||
}
|
||||
|
||||
writeApplicationFinishData(appId);
|
||||
}
|
||||
}
|
||||
|
||||
private void testReadHistoryData(int num) throws IOException {
|
||||
// read application history data
|
||||
Assert.assertEquals(num, store.getAllApplications().size());
|
||||
for (int i = 1; i <= num; ++i) {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, i);
|
||||
ApplicationHistoryData appData = store.getApplication(appId);
|
||||
Assert.assertNotNull(appData);
|
||||
Assert.assertEquals(appId.toString(), appData.getApplicationName());
|
||||
Assert.assertEquals(appId.toString(), appData.getDiagnosticsInfo());
|
||||
|
||||
// read application attempt history data
|
||||
Assert.assertEquals(num, store.getApplicationAttempts(appId).size());
|
||||
for (int j = 1; j <= num; ++j) {
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, j);
|
||||
ApplicationAttemptHistoryData attemptData =
|
||||
store.getApplicationAttempt(appAttemptId);
|
||||
Assert.assertNotNull(attemptData);
|
||||
Assert.assertEquals(appAttemptId.toString(), attemptData.getHost());
|
||||
Assert.assertEquals(appAttemptId.toString(),
|
||||
attemptData.getDiagnosticsInfo());
|
||||
|
||||
// read container history data
|
||||
Assert.assertEquals(num, store.getContainers(appAttemptId).size());
|
||||
for (int k = 1; k <= num; ++k) {
|
||||
ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
|
||||
ContainerHistoryData containerData = store.getContainer(containerId);
|
||||
Assert.assertNotNull(containerData);
|
||||
Assert.assertEquals(Priority.newInstance(containerId.getId()),
|
||||
containerData.getPriority());
|
||||
Assert.assertEquals(containerId.toString(),
|
||||
containerData.getDiagnosticsInfo());
|
||||
}
|
||||
ContainerHistoryData masterContainer =
|
||||
store.getAMContainer(appAttemptId);
|
||||
Assert.assertNotNull(masterContainer);
|
||||
Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
|
||||
masterContainer.getContainerId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWriteAfterApplicationFinish() throws IOException {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
writeApplicationStartData(appId);
|
||||
writeApplicationFinishData(appId);
|
||||
// write application attempt history data
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
try {
|
||||
writeApplicationAttemptStartData(appAttemptId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains("is not opened"));
|
||||
}
|
||||
try {
|
||||
writeApplicationAttemptFinishData(appAttemptId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains("is not opened"));
|
||||
}
|
||||
// write container history data
|
||||
ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
|
||||
try {
|
||||
writeContainerStartData(containerId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains("is not opened"));
|
||||
}
|
||||
try {
|
||||
writeContainerFinishData(containerId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains("is not opened"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMassiveWriteContainerHistoryData() throws IOException {
|
||||
long mb = 1024 * 1024;
|
||||
long usedDiskBefore = fs.getContentSummary(fsWorkingPath).getLength() / mb;
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
writeApplicationStartData(appId);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
for (int i = 1; i <= 100000; ++i) {
|
||||
ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
|
||||
writeContainerStartData(containerId);
|
||||
writeContainerFinishData(containerId);
|
||||
}
|
||||
writeApplicationFinishData(appId);
|
||||
long usedDiskAfter = fs.getContentSummary(fsWorkingPath).getLength() / mb;
|
||||
Assert.assertTrue((usedDiskAfter - usedDiskBefore) < 20);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,204 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationAttemptHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ApplicationHistoryData;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.records.ContainerHistoryData;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestMemoryApplicationHistoryStore extends
|
||||
ApplicationHistoryStoreTestUtils {
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
store = new MemoryApplicationHistoryStore();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadWriteApplicationHistory() throws Exception {
|
||||
// Out of order
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
try {
|
||||
writeApplicationFinishData(appId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains(
|
||||
"is stored before the start information"));
|
||||
}
|
||||
// Normal
|
||||
int numApps = 5;
|
||||
for (int i = 1; i <= numApps; ++i) {
|
||||
appId = ApplicationId.newInstance(0, i);
|
||||
writeApplicationStartData(appId);
|
||||
writeApplicationFinishData(appId);
|
||||
}
|
||||
Assert.assertEquals(numApps, store.getAllApplications().size());
|
||||
for (int i = 1; i <= numApps; ++i) {
|
||||
appId = ApplicationId.newInstance(0, i);
|
||||
ApplicationHistoryData data = store.getApplication(appId);
|
||||
Assert.assertNotNull(data);
|
||||
Assert.assertEquals(appId.toString(), data.getApplicationName());
|
||||
Assert.assertEquals(appId.toString(), data.getDiagnosticsInfo());
|
||||
}
|
||||
// Write again
|
||||
appId = ApplicationId.newInstance(0, 1);
|
||||
try {
|
||||
writeApplicationStartData(appId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains("is already stored"));
|
||||
}
|
||||
try {
|
||||
writeApplicationFinishData(appId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains("is already stored"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadWriteApplicationAttemptHistory() throws Exception {
|
||||
// Out of order
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
try {
|
||||
writeApplicationAttemptFinishData(appAttemptId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains(
|
||||
"is stored before the start information"));
|
||||
}
|
||||
// Normal
|
||||
int numAppAttempts = 5;
|
||||
writeApplicationStartData(appId);
|
||||
for (int i = 1; i <= numAppAttempts; ++i) {
|
||||
appAttemptId = ApplicationAttemptId.newInstance(appId, i);
|
||||
writeApplicationAttemptStartData(appAttemptId);
|
||||
writeApplicationAttemptFinishData(appAttemptId);
|
||||
}
|
||||
Assert.assertEquals(numAppAttempts, store.getApplicationAttempts(appId)
|
||||
.size());
|
||||
for (int i = 1; i <= numAppAttempts; ++i) {
|
||||
appAttemptId = ApplicationAttemptId.newInstance(appId, i);
|
||||
ApplicationAttemptHistoryData data =
|
||||
store.getApplicationAttempt(appAttemptId);
|
||||
Assert.assertNotNull(data);
|
||||
Assert.assertEquals(appAttemptId.toString(), data.getHost());
|
||||
Assert.assertEquals(appAttemptId.toString(), data.getDiagnosticsInfo());
|
||||
}
|
||||
writeApplicationFinishData(appId);
|
||||
// Write again
|
||||
appAttemptId = ApplicationAttemptId.newInstance(appId, 1);
|
||||
try {
|
||||
writeApplicationAttemptStartData(appAttemptId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains("is already stored"));
|
||||
}
|
||||
try {
|
||||
writeApplicationAttemptFinishData(appAttemptId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains("is already stored"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testReadWriteContainerHistory() throws Exception {
|
||||
// Out of order
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
|
||||
try {
|
||||
writeContainerFinishData(containerId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains(
|
||||
"is stored before the start information"));
|
||||
}
|
||||
// Normal
|
||||
writeApplicationAttemptStartData(appAttemptId);
|
||||
int numContainers = 5;
|
||||
for (int i = 1; i <= numContainers; ++i) {
|
||||
containerId = ContainerId.newInstance(appAttemptId, i);
|
||||
writeContainerStartData(containerId);
|
||||
writeContainerFinishData(containerId);
|
||||
}
|
||||
Assert
|
||||
.assertEquals(numContainers, store.getContainers(appAttemptId).size());
|
||||
for (int i = 1; i <= numContainers; ++i) {
|
||||
containerId = ContainerId.newInstance(appAttemptId, i);
|
||||
ContainerHistoryData data = store.getContainer(containerId);
|
||||
Assert.assertNotNull(data);
|
||||
Assert.assertEquals(Priority.newInstance(containerId.getId()),
|
||||
data.getPriority());
|
||||
Assert.assertEquals(containerId.toString(), data.getDiagnosticsInfo());
|
||||
}
|
||||
ContainerHistoryData masterContainer = store.getAMContainer(appAttemptId);
|
||||
Assert.assertNotNull(masterContainer);
|
||||
Assert.assertEquals(ContainerId.newInstance(appAttemptId, 1),
|
||||
masterContainer.getContainerId());
|
||||
writeApplicationAttemptFinishData(appAttemptId);
|
||||
// Write again
|
||||
containerId = ContainerId.newInstance(appAttemptId, 1);
|
||||
try {
|
||||
writeContainerStartData(containerId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains("is already stored"));
|
||||
}
|
||||
try {
|
||||
writeContainerFinishData(containerId);
|
||||
Assert.fail();
|
||||
} catch (IOException e) {
|
||||
Assert.assertTrue(e.getMessage().contains("is already stored"));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMassiveWriteContainerHistory() throws IOException {
|
||||
long mb = 1024 * 1024;
|
||||
Runtime runtime = Runtime.getRuntime();
|
||||
long usedMemoryBefore = (runtime.totalMemory() - runtime.freeMemory()) / mb;
|
||||
int numContainers = 100000;
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
for (int i = 1; i <= numContainers; ++i) {
|
||||
ContainerId containerId = ContainerId.newInstance(appAttemptId, i);
|
||||
writeContainerStartData(containerId);
|
||||
writeContainerFinishData(containerId);
|
||||
}
|
||||
long usedMemoryAfter = (runtime.totalMemory() - runtime.freeMemory()) / mb;
|
||||
Assert.assertTrue((usedMemoryAfter - usedMemoryBefore) < 200);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,182 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import static org.apache.hadoop.yarn.webapp.Params.TITLE;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.apache.hadoop.yarn.server.api.ApplicationContext;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManagerImpl;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStoreTestUtils;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore;
|
||||
import org.apache.hadoop.yarn.util.StringHelper;
|
||||
import org.apache.hadoop.yarn.webapp.YarnWebParams;
|
||||
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.google.inject.Injector;
|
||||
|
||||
public class TestAHSWebApp extends ApplicationHistoryStoreTestUtils {
|
||||
|
||||
public void setApplicationHistoryStore(ApplicationHistoryStore store) {
|
||||
this.store = store;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
store = new MemoryApplicationHistoryStore();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAppControllerIndex() throws Exception {
|
||||
ApplicationHistoryManager ahManager = mock(ApplicationHistoryManager.class);
|
||||
Injector injector =
|
||||
WebAppTests.createMockInjector(ApplicationHistoryManager.class,
|
||||
ahManager);
|
||||
AHSController controller = injector.getInstance(AHSController.class);
|
||||
controller.index();
|
||||
Assert
|
||||
.assertEquals("Application History", controller.get(TITLE, "unknown"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testView() throws Exception {
|
||||
Injector injector =
|
||||
WebAppTests.createMockInjector(ApplicationContext.class,
|
||||
mockApplicationHistoryManager(5, 1, 1));
|
||||
AHSView ahsViewInstance = injector.getInstance(AHSView.class);
|
||||
|
||||
ahsViewInstance.render();
|
||||
WebAppTests.flushOutput(injector);
|
||||
|
||||
ahsViewInstance.set(YarnWebParams.APP_STATE,
|
||||
YarnApplicationState.FAILED.toString());
|
||||
ahsViewInstance.render();
|
||||
WebAppTests.flushOutput(injector);
|
||||
|
||||
ahsViewInstance.set(YarnWebParams.APP_STATE, StringHelper.cjoin(
|
||||
YarnApplicationState.FAILED.toString(), YarnApplicationState.KILLED));
|
||||
ahsViewInstance.render();
|
||||
WebAppTests.flushOutput(injector);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAppPage() throws Exception {
|
||||
Injector injector =
|
||||
WebAppTests.createMockInjector(ApplicationContext.class,
|
||||
mockApplicationHistoryManager(1, 5, 1));
|
||||
AppPage appPageInstance = injector.getInstance(AppPage.class);
|
||||
|
||||
appPageInstance.render();
|
||||
WebAppTests.flushOutput(injector);
|
||||
|
||||
appPageInstance.set(YarnWebParams.APPLICATION_ID, ApplicationId
|
||||
.newInstance(0, 1).toString());
|
||||
appPageInstance.render();
|
||||
WebAppTests.flushOutput(injector);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAppAttemptPage() throws Exception {
|
||||
Injector injector =
|
||||
WebAppTests.createMockInjector(ApplicationContext.class,
|
||||
mockApplicationHistoryManager(1, 1, 5));
|
||||
AppAttemptPage appAttemptPageInstance =
|
||||
injector.getInstance(AppAttemptPage.class);
|
||||
|
||||
appAttemptPageInstance.render();
|
||||
WebAppTests.flushOutput(injector);
|
||||
|
||||
appAttemptPageInstance.set(YarnWebParams.APPLICATION_ATTEMPT_ID,
|
||||
ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1)
|
||||
.toString());
|
||||
appAttemptPageInstance.render();
|
||||
WebAppTests.flushOutput(injector);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testContainerPage() throws Exception {
|
||||
Injector injector =
|
||||
WebAppTests.createMockInjector(ApplicationContext.class,
|
||||
mockApplicationHistoryManager(1, 1, 1));
|
||||
ContainerPage containerPageInstance =
|
||||
injector.getInstance(ContainerPage.class);
|
||||
|
||||
containerPageInstance.render();
|
||||
WebAppTests.flushOutput(injector);
|
||||
|
||||
containerPageInstance.set(
|
||||
YarnWebParams.CONTAINER_ID,
|
||||
ContainerId
|
||||
.newInstance(
|
||||
ApplicationAttemptId.newInstance(ApplicationId.newInstance(0, 1), 1),
|
||||
1).toString());
|
||||
containerPageInstance.render();
|
||||
WebAppTests.flushOutput(injector);
|
||||
}
|
||||
|
||||
ApplicationHistoryManager mockApplicationHistoryManager(int numApps,
|
||||
int numAppAttempts, int numContainers) throws Exception {
|
||||
ApplicationHistoryManager ahManager =
|
||||
new MockApplicationHistoryManagerImpl(store);
|
||||
for (int i = 1; i <= numApps; ++i) {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, i);
|
||||
writeApplicationStartData(appId);
|
||||
for (int j = 1; j <= numAppAttempts; ++j) {
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, j);
|
||||
writeApplicationAttemptStartData(appAttemptId);
|
||||
for (int k = 1; k <= numContainers; ++k) {
|
||||
ContainerId containerId = ContainerId.newInstance(appAttemptId, k);
|
||||
writeContainerStartData(containerId);
|
||||
writeContainerFinishData(containerId);
|
||||
}
|
||||
writeApplicationAttemptFinishData(appAttemptId);
|
||||
}
|
||||
writeApplicationFinishData(appId);
|
||||
}
|
||||
return ahManager;
|
||||
}
|
||||
|
||||
class MockApplicationHistoryManagerImpl extends ApplicationHistoryManagerImpl {
|
||||
|
||||
public MockApplicationHistoryManagerImpl(ApplicationHistoryStore store) {
|
||||
super();
|
||||
init(new YarnConfiguration());
|
||||
start();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected ApplicationHistoryStore createApplicationHistoryStore(
|
||||
Configuration conf) {
|
||||
return store;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
|
@ -0,0 +1,295 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import javax.ws.rs.core.MediaType;
|
||||
|
||||
import junit.framework.Assert;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerState;
|
||||
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationAttemptState;
|
||||
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
|
||||
import org.apache.hadoop.yarn.server.api.ApplicationContext;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryManager;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.ApplicationHistoryStore;
|
||||
import org.apache.hadoop.yarn.server.applicationhistoryservice.MemoryApplicationHistoryStore;
|
||||
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||
import org.codehaus.jettison.json.JSONArray;
|
||||
import org.codehaus.jettison.json.JSONException;
|
||||
import org.codehaus.jettison.json.JSONObject;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.google.inject.Guice;
|
||||
import com.google.inject.Injector;
|
||||
import com.google.inject.servlet.GuiceServletContextListener;
|
||||
import com.google.inject.servlet.ServletModule;
|
||||
import com.sun.jersey.api.client.ClientResponse;
|
||||
import com.sun.jersey.api.client.ClientResponse.Status;
|
||||
import com.sun.jersey.api.client.UniformInterfaceException;
|
||||
import com.sun.jersey.api.client.WebResource;
|
||||
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||
import com.sun.jersey.test.framework.JerseyTest;
|
||||
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||
|
||||
public class TestAHSWebServices extends JerseyTest {
|
||||
|
||||
private static ApplicationHistoryManager ahManager;
|
||||
|
||||
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||
|
||||
@Override
|
||||
protected void configureServlets() {
|
||||
bind(JAXBContextResolver.class);
|
||||
bind(AHSWebServices.class);
|
||||
bind(GenericExceptionHandler.class);
|
||||
try {
|
||||
ahManager = mockApplicationHistoryManager();
|
||||
} catch (Exception e) {
|
||||
Assert.fail();
|
||||
}
|
||||
bind(ApplicationContext.class).toInstance(ahManager);
|
||||
serve("/*").with(GuiceContainer.class);
|
||||
}
|
||||
});
|
||||
|
||||
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||
|
||||
@Override
|
||||
protected Injector getInjector() {
|
||||
return injector;
|
||||
}
|
||||
}
|
||||
|
||||
private ApplicationHistoryManager mockApplicationHistoryManager()
|
||||
throws Exception {
|
||||
ApplicationHistoryStore store = new MemoryApplicationHistoryStore();
|
||||
TestAHSWebApp testAHSWebApp = new TestAHSWebApp();
|
||||
testAHSWebApp.setApplicationHistoryStore(store);
|
||||
ApplicationHistoryManager ahManager =
|
||||
testAHSWebApp.mockApplicationHistoryManager(5, 5, 5);
|
||||
return ahManager;
|
||||
}
|
||||
|
||||
public TestAHSWebServices() {
|
||||
super(new WebAppDescriptor.Builder(
|
||||
"org.apache.hadoop.yarn.server.applicationhistoryservice.webapp")
|
||||
.contextListenerClass(GuiceServletConfig.class)
|
||||
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||
}
|
||||
|
||||
@Before
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvalidUri() throws JSONException, Exception {
|
||||
WebResource r = resource();
|
||||
String responseStr = "";
|
||||
try {
|
||||
responseStr =
|
||||
r.path("ws").path("v1").path("applicationhistory").path("bogus")
|
||||
.accept(MediaType.APPLICATION_JSON).get(String.class);
|
||||
fail("should have thrown exception on invalid uri");
|
||||
} catch (UniformInterfaceException ue) {
|
||||
ClientResponse response = ue.getResponse();
|
||||
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||
|
||||
WebServicesTestUtils.checkStringMatch(
|
||||
"error string exists and shouldn't", "", responseStr);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvalidUri2() throws JSONException, Exception {
|
||||
WebResource r = resource();
|
||||
String responseStr = "";
|
||||
try {
|
||||
responseStr = r.accept(MediaType.APPLICATION_JSON).get(String.class);
|
||||
fail("should have thrown exception on invalid uri");
|
||||
} catch (UniformInterfaceException ue) {
|
||||
ClientResponse response = ue.getResponse();
|
||||
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||
WebServicesTestUtils.checkStringMatch(
|
||||
"error string exists and shouldn't", "", responseStr);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvalidAccept() throws JSONException, Exception {
|
||||
WebResource r = resource();
|
||||
String responseStr = "";
|
||||
try {
|
||||
responseStr =
|
||||
r.path("ws").path("v1").path("applicationhistory")
|
||||
.accept(MediaType.TEXT_PLAIN).get(String.class);
|
||||
fail("should have thrown exception on invalid uri");
|
||||
} catch (UniformInterfaceException ue) {
|
||||
ClientResponse response = ue.getResponse();
|
||||
assertEquals(Status.INTERNAL_SERVER_ERROR,
|
||||
response.getClientResponseStatus());
|
||||
WebServicesTestUtils.checkStringMatch(
|
||||
"error string exists and shouldn't", "", responseStr);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAppsQuery() throws Exception {
|
||||
WebResource r = resource();
|
||||
ClientResponse response =
|
||||
r.path("ws").path("v1").path("applicationhistory").path("apps")
|
||||
.queryParam("state", YarnApplicationState.FINISHED.toString())
|
||||
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
JSONObject json = response.getEntity(JSONObject.class);
|
||||
assertEquals("incorrect number of elements", 1, json.length());
|
||||
JSONObject apps = json.getJSONObject("apps");
|
||||
assertEquals("incorrect number of elements", 1, apps.length());
|
||||
JSONArray array = apps.getJSONArray("app");
|
||||
assertEquals("incorrect number of elements", 5, array.length());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleApp() throws Exception {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
WebResource r = resource();
|
||||
ClientResponse response =
|
||||
r.path("ws").path("v1").path("applicationhistory").path("apps")
|
||||
.path(appId.toString()).accept(MediaType.APPLICATION_JSON)
|
||||
.get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
JSONObject json = response.getEntity(JSONObject.class);
|
||||
assertEquals("incorrect number of elements", 1, json.length());
|
||||
JSONObject app = json.getJSONObject("app");
|
||||
assertEquals(appId.toString(), app.getString("appId"));
|
||||
assertEquals(appId.toString(), app.get("name"));
|
||||
assertEquals(appId.toString(), app.get("diagnosticsInfo"));
|
||||
assertEquals("test queue", app.get("queue"));
|
||||
assertEquals("test user", app.get("user"));
|
||||
assertEquals("test type", app.get("type"));
|
||||
assertEquals(FinalApplicationStatus.UNDEFINED.toString(),
|
||||
app.get("finalAppStatus"));
|
||||
assertEquals(YarnApplicationState.FINISHED.toString(), app.get("appState"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleAttempts() throws Exception {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
WebResource r = resource();
|
||||
ClientResponse response =
|
||||
r.path("ws").path("v1").path("applicationhistory").path("apps")
|
||||
.path(appId.toString()).path("appattempts")
|
||||
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
JSONObject json = response.getEntity(JSONObject.class);
|
||||
assertEquals("incorrect number of elements", 1, json.length());
|
||||
JSONObject appAttempts = json.getJSONObject("appAttempts");
|
||||
assertEquals("incorrect number of elements", 1, appAttempts.length());
|
||||
JSONArray array = appAttempts.getJSONArray("appAttempt");
|
||||
assertEquals("incorrect number of elements", 5, array.length());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleAttempt() throws Exception {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
WebResource r = resource();
|
||||
ClientResponse response =
|
||||
r.path("ws").path("v1").path("applicationhistory").path("apps")
|
||||
.path(appId.toString()).path("appattempts")
|
||||
.path(appAttemptId.toString()).accept(MediaType.APPLICATION_JSON)
|
||||
.get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
JSONObject json = response.getEntity(JSONObject.class);
|
||||
assertEquals("incorrect number of elements", 1, json.length());
|
||||
JSONObject appAttempt = json.getJSONObject("appAttempt");
|
||||
assertEquals(appAttemptId.toString(), appAttempt.getString("appAttemptId"));
|
||||
assertEquals(appAttemptId.toString(), appAttempt.getString("host"));
|
||||
assertEquals(appAttemptId.toString(),
|
||||
appAttempt.getString("diagnosticsInfo"));
|
||||
assertEquals("test tracking url", appAttempt.getString("trackingUrl"));
|
||||
assertEquals(YarnApplicationAttemptState.FINISHED.toString(),
|
||||
appAttempt.get("appAttemptState"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultipleContainers() throws Exception {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
WebResource r = resource();
|
||||
ClientResponse response =
|
||||
r.path("ws").path("v1").path("applicationhistory").path("apps")
|
||||
.path(appId.toString()).path("appattempts")
|
||||
.path(appAttemptId.toString()).path("containers")
|
||||
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
JSONObject json = response.getEntity(JSONObject.class);
|
||||
assertEquals("incorrect number of elements", 1, json.length());
|
||||
JSONObject containers = json.getJSONObject("containers");
|
||||
assertEquals("incorrect number of elements", 1, containers.length());
|
||||
JSONArray array = containers.getJSONArray("container");
|
||||
assertEquals("incorrect number of elements", 5, array.length());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingleContainer() throws Exception {
|
||||
ApplicationId appId = ApplicationId.newInstance(0, 1);
|
||||
ApplicationAttemptId appAttemptId =
|
||||
ApplicationAttemptId.newInstance(appId, 1);
|
||||
ContainerId containerId = ContainerId.newInstance(appAttemptId, 1);
|
||||
WebResource r = resource();
|
||||
ClientResponse response =
|
||||
r.path("ws").path("v1").path("applicationhistory").path("apps")
|
||||
.path(appId.toString()).path("appattempts")
|
||||
.path(appAttemptId.toString()).path("containers")
|
||||
.path(containerId.toString()).accept(MediaType.APPLICATION_JSON)
|
||||
.get(ClientResponse.class);
|
||||
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||
JSONObject json = response.getEntity(JSONObject.class);
|
||||
assertEquals("incorrect number of elements", 1, json.length());
|
||||
JSONObject container = json.getJSONObject("container");
|
||||
assertEquals(containerId.toString(), container.getString("containerId"));
|
||||
assertEquals(containerId.toString(), container.getString("diagnosticsInfo"));
|
||||
assertEquals("0", container.getString("allocatedMB"));
|
||||
assertEquals("0", container.getString("allocatedVCores"));
|
||||
assertEquals(NodeId.newInstance("localhost", 0).toString(),
|
||||
container.getString("assignedNodeId"));
|
||||
assertEquals(Priority.newInstance(containerId.getId()).toString(),
|
||||
container.getString("priority"));
|
||||
assertEquals("http://localhost:0/log", container.getString("logUrl"));
|
||||
assertEquals(ContainerState.COMPLETE.toString(),
|
||||
container.getString("containerState"));
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,129 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.server.api;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public interface ApplicationContext {
|
||||
/**
|
||||
* This method returns Application {@link ApplicationReport} for the specified
|
||||
* {@link ApplicationId}.
|
||||
*
|
||||
* @param appId
|
||||
*
|
||||
* @return {@link ApplicationReport} for the ApplicationId.
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
ApplicationReport getApplication(ApplicationId appId) throws IOException;
|
||||
|
||||
/**
|
||||
* This method returns all Application {@link ApplicationReport}s
|
||||
*
|
||||
* @return map of {@link ApplicationId} to {@link ApplicationReport}s.
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
Map<ApplicationId, ApplicationReport> getAllApplications() throws IOException;
|
||||
|
||||
/**
|
||||
* Application can have multiple application attempts
|
||||
* {@link ApplicationAttemptReport}. This method returns the all
|
||||
* {@link ApplicationAttemptReport}s for the Application.
|
||||
*
|
||||
* @param appId
|
||||
*
|
||||
* @return all {@link ApplicationAttemptReport}s for the Application.
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
Map<ApplicationAttemptId, ApplicationAttemptReport> getApplicationAttempts(
|
||||
ApplicationId appId) throws IOException;
|
||||
|
||||
/**
|
||||
* This method returns {@link ApplicationAttemptReport} for specified
|
||||
* {@link ApplicationId}.
|
||||
*
|
||||
* @param appAttemptId
|
||||
* {@link ApplicationAttemptId}
|
||||
* @return {@link ApplicationAttemptReport} for ApplicationAttemptId
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
ApplicationAttemptReport getApplicationAttempt(
|
||||
ApplicationAttemptId appAttemptId) throws IOException;
|
||||
|
||||
/**
|
||||
* This method returns {@link ContainerReport} for specified
|
||||
* {@link ContainerId}.
|
||||
*
|
||||
* @param containerId
|
||||
* {@link ContainerId}
|
||||
* @return {@link ContainerReport} for ContainerId
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
ContainerReport getContainer(ContainerId containerId) throws IOException;
|
||||
|
||||
/**
|
||||
* This method returns {@link ContainerReport} for specified
|
||||
* {@link ApplicationAttemptId}.
|
||||
*
|
||||
* @param appAttemptId
|
||||
* {@link ApplicationAttemptId}
|
||||
* @return {@link ContainerReport} for ApplicationAttemptId
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
ContainerReport getAMContainer(ApplicationAttemptId appAttemptId)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* This method returns Map of {@link ContainerId} to {@link ContainerReport}
|
||||
* for specified {@link ApplicationAttemptId}.
|
||||
*
|
||||
* @param appAttemptId
|
||||
* {@link ApplicationAttemptId}
|
||||
* @return Map of {@link ContainerId} to {@link ContainerReport} for
|
||||
* ApplicationAttemptId
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
Map<ContainerId, ContainerReport> getContainers(
|
||||
ApplicationAttemptId appAttemptId) throws IOException;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue