Merge trunk into HA branch.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-1623@1228339 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
commit
d680080da0
|
@ -34,26 +34,6 @@
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
<pluginManagement>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-enforcer-plugin</artifactId>
|
|
||||||
<version>1.0</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-assembly-plugin</artifactId>
|
|
||||||
<version>2.2-beta-3</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.rat</groupId>
|
|
||||||
<artifactId>apache-rat-plugin</artifactId>
|
|
||||||
<version>0.7</version>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</pluginManagement>
|
|
||||||
|
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
|
|
@ -94,6 +94,9 @@
|
||||||
<include>${project.artifactId}-${project.version}-sources.jar</include>
|
<include>${project.artifactId}-${project.version}-sources.jar</include>
|
||||||
<include>${project.artifactId}-${project.version}-test-sources.jar</include>
|
<include>${project.artifactId}-${project.version}-test-sources.jar</include>
|
||||||
</includes>
|
</includes>
|
||||||
|
<excludes>
|
||||||
|
<exclude>hadoop-tools-dist-*.jar</exclude>
|
||||||
|
</excludes>
|
||||||
</fileSet>
|
</fileSet>
|
||||||
<fileSet>
|
<fileSet>
|
||||||
<directory>${basedir}/dev-support/jdiff</directory>
|
<directory>${basedir}/dev-support/jdiff</directory>
|
||||||
|
|
|
@ -27,14 +27,6 @@
|
||||||
<include>*</include>
|
<include>*</include>
|
||||||
</includes>
|
</includes>
|
||||||
</fileSet>
|
</fileSet>
|
||||||
<!-- Readme, licenses, etc. -->
|
|
||||||
<fileSet>
|
|
||||||
<directory>${basedir}</directory>
|
|
||||||
<outputDirectory>/</outputDirectory>
|
|
||||||
<includes>
|
|
||||||
<include>*.txt</include>
|
|
||||||
</includes>
|
|
||||||
</fileSet>
|
|
||||||
<fileSet>
|
<fileSet>
|
||||||
<directory>${basedir}/src/main/sbin</directory>
|
<directory>${basedir}/src/main/sbin</directory>
|
||||||
<outputDirectory>/sbin</outputDirectory>
|
<outputDirectory>/sbin</outputDirectory>
|
||||||
|
|
|
@ -46,16 +46,9 @@ Trunk (unreleased changes)
|
||||||
if the override value is same as the final parameter value.
|
if the override value is same as the final parameter value.
|
||||||
(Ravi Prakash via suresh)
|
(Ravi Prakash via suresh)
|
||||||
|
|
||||||
HADOOP-7737. normalize hadoop-mapreduce & hadoop-dist dist/tar build with
|
|
||||||
common/hdfs. (tucu)
|
|
||||||
|
|
||||||
HADOOP-7743. Add Maven profile to create a full source tarball. (tucu)
|
|
||||||
|
|
||||||
HADOOP-7729. Send back valid HTTP response if user hits IPC port with
|
HADOOP-7729. Send back valid HTTP response if user hits IPC port with
|
||||||
HTTP GET. (todd)
|
HTTP GET. (todd)
|
||||||
|
|
||||||
HADOOP-7758. Make GlobFilter class public. (tucu)
|
|
||||||
|
|
||||||
HADOOP-7728. Enable task memory management to be configurable in hadoop
|
HADOOP-7728. Enable task memory management to be configurable in hadoop
|
||||||
config setup script. (ramya)
|
config setup script. (ramya)
|
||||||
|
|
||||||
|
@ -67,9 +60,7 @@ Trunk (unreleased changes)
|
||||||
HADOOP-7688. Add servlet handler check in HttpServer.start().
|
HADOOP-7688. Add servlet handler check in HttpServer.start().
|
||||||
(Uma Maheswara Rao G via szetszwo)
|
(Uma Maheswara Rao G via szetszwo)
|
||||||
|
|
||||||
HADOOP-7590. Mavenize streaming and MR examples. (tucu)
|
HADOOP-7862. Move the support for multiple protocols to lower layer so
|
||||||
|
|
||||||
HADOOP-7862. Move the support for multiple protocols to lower layer so
|
|
||||||
that Writable, PB and Avro can all use it (Sanjay)
|
that Writable, PB and Avro can all use it (Sanjay)
|
||||||
|
|
||||||
HADOOP-7876. Provided access to encoded key in DelegationKey for
|
HADOOP-7876. Provided access to encoded key in DelegationKey for
|
||||||
|
@ -92,6 +83,10 @@ Trunk (unreleased changes)
|
||||||
hostname in token instead of IP to allow server IP change.
|
hostname in token instead of IP to allow server IP change.
|
||||||
(Daryn Sharp via suresh)
|
(Daryn Sharp via suresh)
|
||||||
|
|
||||||
|
HADOOP-7957. Classes deriving GetGroupsBase should be able to override
|
||||||
|
proxy creation. (jitendra)
|
||||||
|
|
||||||
|
HADOOP-4515. Configuration#getBoolean must not be case sensitive. (Sho Shimauchi via harsh)
|
||||||
|
|
||||||
BUGS
|
BUGS
|
||||||
|
|
||||||
|
@ -132,29 +127,14 @@ Trunk (unreleased changes)
|
||||||
HADOOP-7833. Fix findbugs warnings in protobuf generated code.
|
HADOOP-7833. Fix findbugs warnings in protobuf generated code.
|
||||||
(John Lee via suresh)
|
(John Lee via suresh)
|
||||||
|
|
||||||
HADOOP-7853. multiple javax security configurations cause conflicts.
|
|
||||||
(daryn via tucu)
|
|
||||||
|
|
||||||
HDFS-2614. hadoop dist tarball is missing hdfs headers. (tucu)
|
|
||||||
|
|
||||||
HADOOP-7874. native libs should be under lib/native/ dir. (tucu)
|
|
||||||
|
|
||||||
HADOOP-7887. KerberosAuthenticatorHandler is not setting
|
|
||||||
KerberosName name rules from configuration. (tucu)
|
|
||||||
|
|
||||||
HADOOP-7888. TestFailoverProxy fails intermittently on trunk. (Jason Lowe
|
HADOOP-7888. TestFailoverProxy fails intermittently on trunk. (Jason Lowe
|
||||||
via atm)
|
via atm)
|
||||||
|
|
||||||
HADOOP-7897. ProtobufRpcEngine client side exception mechanism is not
|
HADOOP-7897. ProtobufRpcEngine client side exception mechanism is not
|
||||||
consistent with WritableRpcEngine. (suresh)
|
consistent with WritableRpcEngine. (suresh)
|
||||||
|
|
||||||
HADOOP-7902. skipping name rules setting (if already set) should be done
|
|
||||||
on UGI initialization only. (tucu)
|
|
||||||
|
|
||||||
HADOOP-7913 Fix bug in ProtoBufRpcEngine (sanjay)
|
HADOOP-7913 Fix bug in ProtoBufRpcEngine (sanjay)
|
||||||
|
|
||||||
HADOOP-7810. move hadoop archive to core from tools. (tucu)
|
|
||||||
|
|
||||||
HADOOP-7892. IPC logs too verbose after "RpcKind" introduction (todd)
|
HADOOP-7892. IPC logs too verbose after "RpcKind" introduction (todd)
|
||||||
|
|
||||||
HADOOP-7931. o.a.h.ipc.WritableRpcEngine should have a way to force
|
HADOOP-7931. o.a.h.ipc.WritableRpcEngine should have a way to force
|
||||||
|
@ -164,8 +144,6 @@ Trunk (unreleased changes)
|
||||||
|
|
||||||
HADOOP-7761. Improve the performance of raw comparisons. (todd)
|
HADOOP-7761. Improve the performance of raw comparisons. (todd)
|
||||||
|
|
||||||
HADOOP_7917. compilation of protobuf files fails in windows/cygwin. (tucu)
|
|
||||||
|
|
||||||
Release 0.23.1 - Unreleased
|
Release 0.23.1 - Unreleased
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -213,6 +191,17 @@ Release 0.23.1 - Unreleased
|
||||||
HADOOP-7933. Add a getDelegationTokens api to FileSystem which checks
|
HADOOP-7933. Add a getDelegationTokens api to FileSystem which checks
|
||||||
for known tokens in the passed Credentials object. (sseth)
|
for known tokens in the passed Credentials object. (sseth)
|
||||||
|
|
||||||
|
HADOOP-7737. normalize hadoop-mapreduce & hadoop-dist dist/tar build with
|
||||||
|
common/hdfs. (tucu)
|
||||||
|
|
||||||
|
HADOOP-7743. Add Maven profile to create a full source tarball. (tucu)
|
||||||
|
|
||||||
|
HADOOP-7758. Make GlobFilter class public. (tucu)
|
||||||
|
|
||||||
|
HADOOP-7590. Mavenize streaming and MR examples. (tucu)
|
||||||
|
|
||||||
|
HADOOP-7934. Normalize dependencies versions across all modules. (tucu)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
@ -252,6 +241,27 @@ Release 0.23.1 - Unreleased
|
||||||
HADOOP-7949. Updated maxIdleTime default in the code to match
|
HADOOP-7949. Updated maxIdleTime default in the code to match
|
||||||
core-default.xml (eli)
|
core-default.xml (eli)
|
||||||
|
|
||||||
|
HADOOP-7853. multiple javax security configurations cause conflicts.
|
||||||
|
(daryn via tucu)
|
||||||
|
|
||||||
|
HDFS-2614. hadoop dist tarball is missing hdfs headers. (tucu)
|
||||||
|
|
||||||
|
HADOOP-7874. native libs should be under lib/native/ dir. (tucu)
|
||||||
|
|
||||||
|
HADOOP-7887. KerberosAuthenticatorHandler is not setting
|
||||||
|
KerberosName name rules from configuration. (tucu)
|
||||||
|
|
||||||
|
HADOOP-7902. skipping name rules setting (if already set) should be done
|
||||||
|
on UGI initialization only. (tucu)
|
||||||
|
|
||||||
|
HADOOP-7810. move hadoop archive to core from tools. (tucu)
|
||||||
|
|
||||||
|
HADOOP_7917. compilation of protobuf files fails in windows/cygwin. (tucu)
|
||||||
|
|
||||||
|
HADOOP-7907. hadoop-tools JARs are not part of the distro. (tucu)
|
||||||
|
|
||||||
|
HADOOP-7936. There's a Hoop README in the root dir of the tarball. (tucu)
|
||||||
|
|
||||||
Release 0.23.0 - 2011-11-01
|
Release 0.23.0 - 2011-11-01
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -973,6 +983,9 @@ Release 0.22.1 - Unreleased
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
|
HADOOP-7937. Forward port SequenceFile#syncFs and friends from Hadoop 1.x.
|
||||||
|
(tomwhite)
|
||||||
|
|
||||||
Release 0.22.0 - 2011-11-29
|
Release 0.22.0 - 2011-11-29
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -826,6 +826,12 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||||
*/
|
*/
|
||||||
public boolean getBoolean(String name, boolean defaultValue) {
|
public boolean getBoolean(String name, boolean defaultValue) {
|
||||||
String valueString = getTrimmed(name);
|
String valueString = getTrimmed(name);
|
||||||
|
if (null == valueString || "".equals(valueString)) {
|
||||||
|
return defaultValue;
|
||||||
|
}
|
||||||
|
|
||||||
|
valueString = valueString.toLowerCase();
|
||||||
|
|
||||||
if ("true".equals(valueString))
|
if ("true".equals(valueString))
|
||||||
return true;
|
return true;
|
||||||
else if ("false".equals(valueString))
|
else if ("false".equals(valueString))
|
||||||
|
|
|
@ -1193,6 +1193,13 @@ public class SequenceFile {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** flush all currently written data to the file system */
|
||||||
|
public void syncFs() throws IOException {
|
||||||
|
if (out != null) {
|
||||||
|
out.sync(); // flush contents to file system
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/** Returns the configuration of this file. */
|
/** Returns the configuration of this file. */
|
||||||
Configuration getConf() { return conf; }
|
Configuration getConf() { return conf; }
|
||||||
|
|
||||||
|
|
|
@ -94,7 +94,7 @@ public abstract class GetGroupsBase extends Configured implements Tool {
|
||||||
* @return A {@link GetUserMappingsProtocol} client proxy.
|
* @return A {@link GetUserMappingsProtocol} client proxy.
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
private GetUserMappingsProtocol getUgmProtocol() throws IOException {
|
protected GetUserMappingsProtocol getUgmProtocol() throws IOException {
|
||||||
GetUserMappingsProtocol userGroupMappingProtocol =
|
GetUserMappingsProtocol userGroupMappingProtocol =
|
||||||
RPC.getProxy(GetUserMappingsProtocol.class,
|
RPC.getProxy(GetUserMappingsProtocol.class,
|
||||||
GetUserMappingsProtocol.versionID,
|
GetUserMappingsProtocol.versionID,
|
||||||
|
|
|
@ -451,6 +451,9 @@ public class TestConfiguration extends TestCase {
|
||||||
appendProperty("test.bool3", " true ");
|
appendProperty("test.bool3", " true ");
|
||||||
appendProperty("test.bool4", " false ");
|
appendProperty("test.bool4", " false ");
|
||||||
appendProperty("test.bool5", "foo");
|
appendProperty("test.bool5", "foo");
|
||||||
|
appendProperty("test.bool6", "TRUE");
|
||||||
|
appendProperty("test.bool7", "FALSE");
|
||||||
|
appendProperty("test.bool8", "");
|
||||||
endConfig();
|
endConfig();
|
||||||
Path fileResource = new Path(CONFIG);
|
Path fileResource = new Path(CONFIG);
|
||||||
conf.addResource(fileResource);
|
conf.addResource(fileResource);
|
||||||
|
@ -459,6 +462,9 @@ public class TestConfiguration extends TestCase {
|
||||||
assertEquals(true, conf.getBoolean("test.bool3", false));
|
assertEquals(true, conf.getBoolean("test.bool3", false));
|
||||||
assertEquals(false, conf.getBoolean("test.bool4", true));
|
assertEquals(false, conf.getBoolean("test.bool4", true));
|
||||||
assertEquals(true, conf.getBoolean("test.bool5", true));
|
assertEquals(true, conf.getBoolean("test.bool5", true));
|
||||||
|
assertEquals(true, conf.getBoolean("test.bool6", false));
|
||||||
|
assertEquals(false, conf.getBoolean("test.bool7", true));
|
||||||
|
assertEquals(false, conf.getBoolean("test.bool8", false));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFloatValues() throws IOException {
|
public void testFloatValues() throws IOException {
|
||||||
|
|
|
@ -118,6 +118,7 @@
|
||||||
run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* .
|
run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* .
|
||||||
run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
|
run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
|
||||||
run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .
|
run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .
|
||||||
|
run cp -r $ROOT/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version}/* .
|
||||||
echo
|
echo
|
||||||
echo "Hadoop dist layout available at: ${project.build.directory}/hadoop-${project.version}"
|
echo "Hadoop dist layout available at: ${project.build.directory}/hadoop-${project.version}"
|
||||||
echo
|
echo
|
||||||
|
|
|
@ -270,11 +270,11 @@
|
||||||
|
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<!-- workaround for filtered/unfiltered resources in same directory -->
|
<!-- workaround for filtered/unfiltered resources in same directory -->
|
||||||
<!-- remove when maven-eclipse-plugin 2.9 is available -->
|
<!-- remove when maven-eclipse-plugin 2.9 is available -->
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-eclipse-plugin</artifactId>
|
<artifactId>maven-eclipse-plugin</artifactId>
|
||||||
<version>2.6</version>
|
<version>2.6</version>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
|
|
@ -27,9 +27,6 @@ Trunk (unreleased changes)
|
||||||
HDFS-2430. The number of failed or low-resource volumes the NN can tolerate
|
HDFS-2430. The number of failed or low-resource volumes the NN can tolerate
|
||||||
should be configurable. (atm)
|
should be configurable. (atm)
|
||||||
|
|
||||||
HDFS-2178. Contributing Hoop to HDFS, replacement for HDFS proxy with
|
|
||||||
read/write capabilities. (tucu)
|
|
||||||
|
|
||||||
HDFS-2642. Protobuf translators for DatanodeProtocol. (jitendra)
|
HDFS-2642. Protobuf translators for DatanodeProtocol. (jitendra)
|
||||||
|
|
||||||
HDFS-2647. Used protobuf based RPC for InterDatanodeProtocol,
|
HDFS-2647. Used protobuf based RPC for InterDatanodeProtocol,
|
||||||
|
@ -45,6 +42,9 @@ Trunk (unreleased changes)
|
||||||
|
|
||||||
HDFS-2661. Enable protobuf RPC for DatanodeProtocol. (jitendra)
|
HDFS-2661. Enable protobuf RPC for DatanodeProtocol. (jitendra)
|
||||||
|
|
||||||
|
HDFS-2697. Move RefreshAuthPolicy, RefreshUserMappings, GetUserMappings
|
||||||
|
protocol to protocol buffers. (jitendra)
|
||||||
|
|
||||||
IMPROVEMENTS
|
IMPROVEMENTS
|
||||||
|
|
||||||
HADOOP-7524 Change RPC to allow multiple protocols including multuple
|
HADOOP-7524 Change RPC to allow multiple protocols including multuple
|
||||||
|
@ -76,11 +76,6 @@ Trunk (unreleased changes)
|
||||||
|
|
||||||
HDFS-2181 Separate HDFS Client wire protocol data types (sanjay)
|
HDFS-2181 Separate HDFS Client wire protocol data types (sanjay)
|
||||||
|
|
||||||
HDFS-2294. Download of commons-daemon TAR should not be under target (tucu)
|
|
||||||
|
|
||||||
HDFS-2322. the build fails in Windows because commons-daemon TAR cannot be
|
|
||||||
fetched. (tucu)
|
|
||||||
|
|
||||||
HDFS-2489. Move Finalize and Register to separate file out of
|
HDFS-2489. Move Finalize and Register to separate file out of
|
||||||
DatanodeCommand.java. (suresh)
|
DatanodeCommand.java. (suresh)
|
||||||
|
|
||||||
|
@ -109,8 +104,6 @@ Trunk (unreleased changes)
|
||||||
|
|
||||||
HDFS-2597 ClientNameNodeProtocol in Protocol Buffers (sanjay)
|
HDFS-2597 ClientNameNodeProtocol in Protocol Buffers (sanjay)
|
||||||
|
|
||||||
HDFS-2511. Add dev script to generate HDFS protobufs. (tucu)
|
|
||||||
|
|
||||||
HDFS-2651 ClientNameNodeProtocol Translators for Protocol Buffers (sanjay)
|
HDFS-2651 ClientNameNodeProtocol Translators for Protocol Buffers (sanjay)
|
||||||
|
|
||||||
HDFS-2650. Replace @inheritDoc with @Override. (Hari Mankude via suresh).
|
HDFS-2650. Replace @inheritDoc with @Override. (Hari Mankude via suresh).
|
||||||
|
@ -172,9 +165,6 @@ Trunk (unreleased changes)
|
||||||
HDFS-2532. TestDfsOverAvroRpc timing out in trunk (Uma Maheswara Rao G
|
HDFS-2532. TestDfsOverAvroRpc timing out in trunk (Uma Maheswara Rao G
|
||||||
via todd)
|
via todd)
|
||||||
|
|
||||||
HDFS-2606. webhdfs client filesystem impl must set the content-type
|
|
||||||
header for create/append. (tucu)
|
|
||||||
|
|
||||||
HDFS-1765. Block Replication should respect under-replication
|
HDFS-1765. Block Replication should respect under-replication
|
||||||
block priority. (Uma Maheswara Rao G via eli)
|
block priority. (Uma Maheswara Rao G via eli)
|
||||||
|
|
||||||
|
@ -186,19 +176,6 @@ Trunk (unreleased changes)
|
||||||
HDFS-2700. Fix failing TestDataNodeMultipleRegistrations in trunk
|
HDFS-2700. Fix failing TestDataNodeMultipleRegistrations in trunk
|
||||||
(Uma Maheswara Rao G via todd)
|
(Uma Maheswara Rao G via todd)
|
||||||
|
|
||||||
HDFS-2658. HttpFS introduced 70 javadoc warnings. (tucu)
|
|
||||||
|
|
||||||
HDFS-2646. Hadoop HttpFS introduced 4 findbug warnings. (tucu)
|
|
||||||
|
|
||||||
HDFS-2657. TestHttpFSServer and TestServerWebApp are failing on trunk.
|
|
||||||
(tucu)
|
|
||||||
|
|
||||||
HttpFS server should check that upload requests have correct
|
|
||||||
content-type. (tucu)
|
|
||||||
|
|
||||||
HDFS-2707. HttpFS should read the hadoop-auth secret from a file
|
|
||||||
instead inline from the configuration. (tucu)
|
|
||||||
|
|
||||||
Release 0.23.1 - UNRELEASED
|
Release 0.23.1 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
@ -214,6 +191,9 @@ Release 0.23.1 - UNRELEASED
|
||||||
HDFS-2545. Change WebHDFS to support multiple namenodes in federation.
|
HDFS-2545. Change WebHDFS to support multiple namenodes in federation.
|
||||||
(szetszwo)
|
(szetszwo)
|
||||||
|
|
||||||
|
HDFS-2178. Contributing Hoop to HDFS, replacement for HDFS proxy with
|
||||||
|
read/write capabilities. (tucu)
|
||||||
|
|
||||||
IMPROVEMENTS
|
IMPROVEMENTS
|
||||||
HDFS-2560. Refactor BPOfferService to be a static inner class (todd)
|
HDFS-2560. Refactor BPOfferService to be a static inner class (todd)
|
||||||
|
|
||||||
|
@ -265,6 +245,13 @@ Release 0.23.1 - UNRELEASED
|
||||||
|
|
||||||
HDFS-2710. Add HDFS tests related to HADOOP-7933. (sid via suresh)
|
HDFS-2710. Add HDFS tests related to HADOOP-7933. (sid via suresh)
|
||||||
|
|
||||||
|
HDFS-2294. Download of commons-daemon TAR should not be under target (tucu)
|
||||||
|
|
||||||
|
HDFS-2322. the build fails in Windows because commons-daemon TAR cannot be
|
||||||
|
fetched. (tucu)
|
||||||
|
|
||||||
|
HDFS-2511. Add dev script to generate HDFS protobufs. (tucu)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
HDFS-2130. Switch default checksum to CRC32C. (todd)
|
HDFS-2130. Switch default checksum to CRC32C. (todd)
|
||||||
|
@ -312,6 +299,22 @@ Release 0.23.1 - UNRELEASED
|
||||||
HDFS-2706. Use configuration for blockInvalidateLimit if it is set.
|
HDFS-2706. Use configuration for blockInvalidateLimit if it is set.
|
||||||
(szetszwo)
|
(szetszwo)
|
||||||
|
|
||||||
|
HDFS-2606. webhdfs client filesystem impl must set the content-type
|
||||||
|
header for create/append. (tucu)
|
||||||
|
|
||||||
|
HDFS-2658. HttpFS introduced 70 javadoc warnings. (tucu)
|
||||||
|
|
||||||
|
HDFS-2646. Hadoop HttpFS introduced 4 findbug warnings. (tucu)
|
||||||
|
|
||||||
|
HDFS-2657. TestHttpFSServer and TestServerWebApp are failing on trunk.
|
||||||
|
(tucu)
|
||||||
|
|
||||||
|
HttpFS server should check that upload requests have correct
|
||||||
|
content-type. (tucu)
|
||||||
|
|
||||||
|
HDFS-2707. HttpFS should read the hadoop-auth secret from a file
|
||||||
|
instead inline from the configuration. (tucu)
|
||||||
|
|
||||||
Release 0.23.0 - 2011-11-01
|
Release 0.23.0 - 2011-11-01
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -322,48 +322,6 @@
|
||||||
</tasks>
|
</tasks>
|
||||||
</configuration>
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-javadoc-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<excludePackageNames>org.apache.hadoop.hdfs.protocol.proto</excludePackageNames>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.rat</groupId>
|
|
||||||
<artifactId>apache-rat-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<excludes>
|
|
||||||
<exclude>CHANGES.txt</exclude>
|
|
||||||
<exclude>CHANGES.HDFS-1623.txt</exclude>
|
|
||||||
<exclude>.idea/**</exclude>
|
|
||||||
<exclude>src/main/conf/*</exclude>
|
|
||||||
<exclude>src/main/docs/**</exclude>
|
|
||||||
<exclude>dev-support/findbugsExcludeFile.xml</exclude>
|
|
||||||
<exclude>dev-support/checkstyle*</exclude>
|
|
||||||
<exclude>dev-support/jdiff/**</exclude>
|
|
||||||
<exclude>dev-support/*tests</exclude>
|
|
||||||
<exclude>src/main/native/*</exclude>
|
|
||||||
<exclude>src/main/native/config/*</exclude>
|
|
||||||
<exclude>src/main/native/m4/*</exclude>
|
|
||||||
<exclude>src/test/empty-file</exclude>
|
|
||||||
<exclude>src/test/all-tests</exclude>
|
|
||||||
<exclude>src/test/resources/*.tgz</exclude>
|
|
||||||
<exclude>src/test/resources/data*</exclude>
|
|
||||||
<exclude>src/test/resources/editsStored*</exclude>
|
|
||||||
<exclude>src/test/resources/empty-file</exclude>
|
|
||||||
<exclude>src/main/webapps/datanode/robots.txt</exclude>
|
|
||||||
<exclude>src/main/docs/releasenotes.html</exclude>
|
|
||||||
<exclude>src/contrib/**</exclude>
|
|
||||||
</excludes>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-antrun-plugin</artifactId>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
<execution>
|
||||||
<id>xprepare-package-hadoop-daemon</id>
|
<id>xprepare-package-hadoop-daemon</id>
|
||||||
<phase>prepare-package</phase>
|
<phase>prepare-package</phase>
|
||||||
|
@ -409,6 +367,42 @@
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-javadoc-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<excludePackageNames>org.apache.hadoop.hdfs.protocol.proto</excludePackageNames>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.rat</groupId>
|
||||||
|
<artifactId>apache-rat-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<excludes>
|
||||||
|
<exclude>CHANGES.txt</exclude>
|
||||||
|
<exclude>CHANGES.HDFS-1623.txt</exclude>
|
||||||
|
<exclude>.idea/**</exclude>
|
||||||
|
<exclude>src/main/conf/*</exclude>
|
||||||
|
<exclude>src/main/docs/**</exclude>
|
||||||
|
<exclude>dev-support/findbugsExcludeFile.xml</exclude>
|
||||||
|
<exclude>dev-support/checkstyle*</exclude>
|
||||||
|
<exclude>dev-support/jdiff/**</exclude>
|
||||||
|
<exclude>dev-support/*tests</exclude>
|
||||||
|
<exclude>src/main/native/*</exclude>
|
||||||
|
<exclude>src/main/native/config/*</exclude>
|
||||||
|
<exclude>src/main/native/m4/*</exclude>
|
||||||
|
<exclude>src/test/empty-file</exclude>
|
||||||
|
<exclude>src/test/all-tests</exclude>
|
||||||
|
<exclude>src/test/resources/*.tgz</exclude>
|
||||||
|
<exclude>src/test/resources/data*</exclude>
|
||||||
|
<exclude>src/test/resources/editsStored*</exclude>
|
||||||
|
<exclude>src/test/resources/empty-file</exclude>
|
||||||
|
<exclude>src/main/webapps/datanode/robots.txt</exclude>
|
||||||
|
<exclude>src/main/docs/releasenotes.html</exclude>
|
||||||
|
<exclude>src/contrib/**</exclude>
|
||||||
|
</excludes>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</build>
|
</build>
|
||||||
|
|
||||||
|
|
|
@ -41,27 +41,28 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-common</artifactId>
|
<artifactId>hadoop-common</artifactId>
|
||||||
<version>0.24.0-SNAPSHOT</version>
|
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
<artifactId>hadoop-hdfs</artifactId>
|
||||||
<version>0.24.0-SNAPSHOT</version>
|
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
<artifactId>hadoop-hdfs</artifactId>
|
||||||
<version>0.24.0-SNAPSHOT</version>
|
|
||||||
<type>test-jar</type>
|
<type>test-jar</type>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.bookkeeper</groupId>
|
<groupId>org.apache.bookkeeper</groupId>
|
||||||
<artifactId>bookkeeper-server</artifactId>
|
<artifactId>bookkeeper-server</artifactId>
|
||||||
<version>4.0.0</version>
|
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>junit</groupId>
|
||||||
|
<artifactId>junit</artifactId>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</project>
|
</project>
|
||||||
|
|
|
@ -0,0 +1,89 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hdfs.protocolPB;
|
||||||
|
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.InetSocketAddress;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
|
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||||
|
import org.apache.hadoop.ipc.ProtobufRpcEngine;
|
||||||
|
import org.apache.hadoop.ipc.ProtocolSignature;
|
||||||
|
import org.apache.hadoop.ipc.RPC;
|
||||||
|
import org.apache.hadoop.net.NetUtils;
|
||||||
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import org.apache.hadoop.tools.GetUserMappingsProtocol;
|
||||||
|
|
||||||
|
import com.google.protobuf.RpcController;
|
||||||
|
import com.google.protobuf.ServiceException;
|
||||||
|
|
||||||
|
public class GetUserMappingsProtocolClientSideTranslatorPB implements
|
||||||
|
GetUserMappingsProtocol, Closeable {
|
||||||
|
|
||||||
|
/** RpcController is not used and hence is set to null */
|
||||||
|
private final static RpcController NULL_CONTROLLER = null;
|
||||||
|
private final GetUserMappingsProtocolPB rpcProxy;
|
||||||
|
|
||||||
|
public GetUserMappingsProtocolClientSideTranslatorPB(
|
||||||
|
InetSocketAddress nameNodeAddr, UserGroupInformation ugi,
|
||||||
|
Configuration conf) throws IOException {
|
||||||
|
RPC.setProtocolEngine(conf, GetUserMappingsProtocolPB.class,
|
||||||
|
ProtobufRpcEngine.class);
|
||||||
|
rpcProxy = RPC.getProxy(GetUserMappingsProtocolPB.class,
|
||||||
|
RPC.getProtocolVersion(GetUserMappingsProtocolPB.class),
|
||||||
|
NameNode.getAddress(conf), ugi, conf,
|
||||||
|
NetUtils.getSocketFactory(conf, GetUserMappingsProtocol.class));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getProtocolVersion(String protocol, long clientVersion)
|
||||||
|
throws IOException {
|
||||||
|
return rpcProxy.getProtocolVersion(protocol, clientVersion);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProtocolSignature getProtocolSignature(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException {
|
||||||
|
return ProtocolSignatureWritable.convert(rpcProxy.getProtocolSignature2(
|
||||||
|
protocol, clientVersion, clientMethodsHash));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
RPC.stopProxy(rpcProxy);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String[] getGroupsForUser(String user) throws IOException {
|
||||||
|
GetGroupsForUserRequestProto request = GetGroupsForUserRequestProto
|
||||||
|
.newBuilder().setUser(user).build();
|
||||||
|
GetGroupsForUserResponseProto resp;
|
||||||
|
try {
|
||||||
|
resp = rpcProxy.getGroupsForUser(NULL_CONTROLLER, request);
|
||||||
|
} catch (ServiceException se) {
|
||||||
|
throw ProtobufHelper.getRemoteException(se);
|
||||||
|
}
|
||||||
|
return resp.getGroupsList().toArray(new String[resp.getGroupsCount()]);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hdfs.protocolPB;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService;
|
||||||
|
import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
|
||||||
|
import org.apache.hadoop.ipc.ProtocolInfo;
|
||||||
|
import org.apache.hadoop.ipc.VersionedProtocol;
|
||||||
|
|
||||||
|
@ProtocolInfo(
|
||||||
|
protocolName = "org.apache.hadoop.tools.GetUserMappingsProtocol",
|
||||||
|
protocolVersion = 1)
|
||||||
|
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||||
|
@InterfaceStability.Evolving
|
||||||
|
public interface GetUserMappingsProtocolPB extends
|
||||||
|
GetUserMappingsProtocolService.BlockingInterface, VersionedProtocol {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method is defined to get the protocol signature using
|
||||||
|
* the R23 protocol - hence we have added the suffix of 2 the method name
|
||||||
|
* to avoid conflict.
|
||||||
|
*/
|
||||||
|
public ProtocolSignatureWritable getProtocolSignature2(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException;
|
||||||
|
}
|
|
@ -0,0 +1,96 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hdfs.protocolPB;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
|
||||||
|
import org.apache.hadoop.ipc.ProtocolSignature;
|
||||||
|
import org.apache.hadoop.ipc.RPC;
|
||||||
|
import org.apache.hadoop.tools.GetUserMappingsProtocol;
|
||||||
|
|
||||||
|
import com.google.protobuf.RpcController;
|
||||||
|
import com.google.protobuf.ServiceException;
|
||||||
|
|
||||||
|
public class GetUserMappingsProtocolServerSideTranslatorPB implements
|
||||||
|
GetUserMappingsProtocolPB {
|
||||||
|
|
||||||
|
private final GetUserMappingsProtocol impl;
|
||||||
|
|
||||||
|
public GetUserMappingsProtocolServerSideTranslatorPB(
|
||||||
|
GetUserMappingsProtocol impl) {
|
||||||
|
this.impl = impl;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getProtocolVersion(String protocol, long clientVersion)
|
||||||
|
throws IOException {
|
||||||
|
return RPC.getProtocolVersion(GetUserMappingsProtocolPB.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProtocolSignature getProtocolSignature(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException {
|
||||||
|
/**
|
||||||
|
* Don't forward this to the server. The protocol version and signature is
|
||||||
|
* that of {@link GetUserMappingsProtocol}
|
||||||
|
*/
|
||||||
|
if (!protocol.equals(RPC
|
||||||
|
.getProtocolName(GetUserMappingsProtocolPB.class))) {
|
||||||
|
throw new IOException("Namenode Serverside implements "
|
||||||
|
+ RPC.getProtocolName(GetUserMappingsProtocolPB.class)
|
||||||
|
+ ". The following requested protocol is unknown: " + protocol);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ProtocolSignature.getProtocolSignature(clientMethodsHash,
|
||||||
|
RPC.getProtocolVersion(GetUserMappingsProtocolPB.class),
|
||||||
|
GetUserMappingsProtocolPB.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProtocolSignatureWritable getProtocolSignature2(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException {
|
||||||
|
/**
|
||||||
|
* Don't forward this to the server. The protocol version and signature is
|
||||||
|
* that of {@link GetUserMappingsProtocolPB}
|
||||||
|
*/
|
||||||
|
return ProtocolSignatureWritable.convert(this.getProtocolSignature(
|
||||||
|
protocol, clientVersion, clientMethodsHash));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public GetGroupsForUserResponseProto getGroupsForUser(
|
||||||
|
RpcController controller, GetGroupsForUserRequestProto request)
|
||||||
|
throws ServiceException {
|
||||||
|
String[] groups;
|
||||||
|
try {
|
||||||
|
groups = impl.getGroupsForUser(request.getUser());
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new ServiceException(e);
|
||||||
|
}
|
||||||
|
GetGroupsForUserResponseProto.Builder builder = GetGroupsForUserResponseProto
|
||||||
|
.newBuilder();
|
||||||
|
for (String g : groups) {
|
||||||
|
builder.addGroups(g);
|
||||||
|
}
|
||||||
|
return builder.build();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,86 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hdfs.protocolPB;
|
||||||
|
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.InetSocketAddress;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshServiceAclRequestProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
|
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||||
|
import org.apache.hadoop.ipc.ProtobufRpcEngine;
|
||||||
|
import org.apache.hadoop.ipc.ProtocolSignature;
|
||||||
|
import org.apache.hadoop.ipc.RPC;
|
||||||
|
import org.apache.hadoop.net.NetUtils;
|
||||||
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
|
||||||
|
|
||||||
|
import com.google.protobuf.RpcController;
|
||||||
|
import com.google.protobuf.ServiceException;
|
||||||
|
|
||||||
|
public class RefreshAuthorizationPolicyProtocolClientSideTranslatorPB implements
|
||||||
|
RefreshAuthorizationPolicyProtocol, Closeable {
|
||||||
|
|
||||||
|
/** RpcController is not used and hence is set to null */
|
||||||
|
private final static RpcController NULL_CONTROLLER = null;
|
||||||
|
private final RefreshAuthorizationPolicyProtocolPB rpcProxy;
|
||||||
|
|
||||||
|
public RefreshAuthorizationPolicyProtocolClientSideTranslatorPB(
|
||||||
|
InetSocketAddress nameNodeAddr, UserGroupInformation ugi,
|
||||||
|
Configuration conf) throws IOException {
|
||||||
|
RPC.setProtocolEngine(conf, RefreshAuthorizationPolicyProtocolPB.class,
|
||||||
|
ProtobufRpcEngine.class);
|
||||||
|
rpcProxy = RPC.getProxy(RefreshAuthorizationPolicyProtocolPB.class,
|
||||||
|
RPC.getProtocolVersion(RefreshAuthorizationPolicyProtocolPB.class),
|
||||||
|
NameNode.getAddress(conf), ugi, conf,
|
||||||
|
NetUtils.getSocketFactory(conf, RefreshAuthorizationPolicyProtocol.class));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getProtocolVersion(String protocol, long clientVersion)
|
||||||
|
throws IOException {
|
||||||
|
return rpcProxy.getProtocolVersion(protocol, clientVersion);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProtocolSignature getProtocolSignature(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException {
|
||||||
|
return ProtocolSignatureWritable.convert(rpcProxy.getProtocolSignature2(
|
||||||
|
protocol, clientVersion, clientMethodsHash));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
RPC.stopProxy(rpcProxy);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void refreshServiceAcl() throws IOException {
|
||||||
|
RefreshServiceAclRequestProto request = RefreshServiceAclRequestProto
|
||||||
|
.newBuilder().build();
|
||||||
|
try {
|
||||||
|
rpcProxy.refreshServiceAcl(NULL_CONTROLLER, request);
|
||||||
|
} catch (ServiceException se) {
|
||||||
|
throw ProtobufHelper.getRemoteException(se);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,49 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hdfs.protocolPB;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshAuthorizationPolicyProtocolService;
|
||||||
|
import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
|
||||||
|
import org.apache.hadoop.ipc.ProtocolInfo;
|
||||||
|
import org.apache.hadoop.ipc.VersionedProtocol;
|
||||||
|
import org.apache.hadoop.security.KerberosInfo;
|
||||||
|
|
||||||
|
@KerberosInfo(
|
||||||
|
serverPrincipal=CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY)
|
||||||
|
@ProtocolInfo(
|
||||||
|
protocolName = "org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol",
|
||||||
|
protocolVersion = 1)
|
||||||
|
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||||
|
@InterfaceStability.Evolving
|
||||||
|
public interface RefreshAuthorizationPolicyProtocolPB extends
|
||||||
|
RefreshAuthorizationPolicyProtocolService.BlockingInterface, VersionedProtocol {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method is defined to get the protocol signature using
|
||||||
|
* the R23 protocol - hence we have added the suffix of 2 the method name
|
||||||
|
* to avoid conflict.
|
||||||
|
*/
|
||||||
|
public ProtocolSignatureWritable getProtocolSignature2(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException;
|
||||||
|
}
|
|
@ -0,0 +1,90 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hdfs.protocolPB;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshServiceAclRequestProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshServiceAclResponseProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
|
||||||
|
import org.apache.hadoop.ipc.ProtocolSignature;
|
||||||
|
import org.apache.hadoop.ipc.RPC;
|
||||||
|
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
|
||||||
|
|
||||||
|
import com.google.protobuf.RpcController;
|
||||||
|
import com.google.protobuf.ServiceException;
|
||||||
|
|
||||||
|
public class RefreshAuthorizationPolicyProtocolServerSideTranslatorPB implements
|
||||||
|
RefreshAuthorizationPolicyProtocolPB {
|
||||||
|
|
||||||
|
private final RefreshAuthorizationPolicyProtocol impl;
|
||||||
|
|
||||||
|
public RefreshAuthorizationPolicyProtocolServerSideTranslatorPB(
|
||||||
|
RefreshAuthorizationPolicyProtocol impl) {
|
||||||
|
this.impl = impl;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getProtocolVersion(String protocol, long clientVersion)
|
||||||
|
throws IOException {
|
||||||
|
return RPC.getProtocolVersion(RefreshAuthorizationPolicyProtocolPB.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProtocolSignature getProtocolSignature(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException {
|
||||||
|
/**
|
||||||
|
* Don't forward this to the server. The protocol version and signature is
|
||||||
|
* that of {@link RefreshAuthorizationPolicyProtocol}
|
||||||
|
*/
|
||||||
|
if (!protocol.equals(RPC
|
||||||
|
.getProtocolName(RefreshAuthorizationPolicyProtocolPB.class))) {
|
||||||
|
throw new IOException("Namenode Serverside implements "
|
||||||
|
+ RPC.getProtocolName(RefreshAuthorizationPolicyProtocolPB.class)
|
||||||
|
+ ". The following requested protocol is unknown: " + protocol);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ProtocolSignature.getProtocolSignature(clientMethodsHash,
|
||||||
|
RPC.getProtocolVersion(RefreshAuthorizationPolicyProtocolPB.class),
|
||||||
|
RefreshAuthorizationPolicyProtocolPB.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProtocolSignatureWritable getProtocolSignature2(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException {
|
||||||
|
/**
|
||||||
|
* Don't forward this to the server. The protocol version and signature is
|
||||||
|
* that of {@link RefreshAuthorizationPolicyProtocolPB}
|
||||||
|
*/
|
||||||
|
return ProtocolSignatureWritable.convert(this.getProtocolSignature(
|
||||||
|
protocol, clientVersion, clientMethodsHash));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public RefreshServiceAclResponseProto refreshServiceAcl(
|
||||||
|
RpcController controller, RefreshServiceAclRequestProto request)
|
||||||
|
throws ServiceException {
|
||||||
|
try {
|
||||||
|
impl.refreshServiceAcl();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new ServiceException(e);
|
||||||
|
}
|
||||||
|
return RefreshServiceAclResponseProto.newBuilder().build();
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,98 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hdfs.protocolPB;
|
||||||
|
|
||||||
|
import java.io.Closeable;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.InetSocketAddress;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshSuperUserGroupsConfigurationRequestProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserToGroupsMappingsRequestProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
|
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||||
|
import org.apache.hadoop.ipc.ProtobufRpcEngine;
|
||||||
|
import org.apache.hadoop.ipc.ProtocolSignature;
|
||||||
|
import org.apache.hadoop.ipc.RPC;
|
||||||
|
import org.apache.hadoop.net.NetUtils;
|
||||||
|
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
|
||||||
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
|
||||||
|
import com.google.protobuf.RpcController;
|
||||||
|
import com.google.protobuf.ServiceException;
|
||||||
|
|
||||||
|
public class RefreshUserMappingsProtocolClientSideTranslatorPB implements
|
||||||
|
RefreshUserMappingsProtocol, Closeable {
|
||||||
|
|
||||||
|
/** RpcController is not used and hence is set to null */
|
||||||
|
private final static RpcController NULL_CONTROLLER = null;
|
||||||
|
private final RefreshUserMappingsProtocolPB rpcProxy;
|
||||||
|
|
||||||
|
public RefreshUserMappingsProtocolClientSideTranslatorPB(
|
||||||
|
InetSocketAddress nameNodeAddr, UserGroupInformation ugi,
|
||||||
|
Configuration conf) throws IOException {
|
||||||
|
RPC.setProtocolEngine(conf, RefreshUserMappingsProtocolPB.class,
|
||||||
|
ProtobufRpcEngine.class);
|
||||||
|
rpcProxy = RPC.getProxy(RefreshUserMappingsProtocolPB.class,
|
||||||
|
RPC.getProtocolVersion(RefreshUserMappingsProtocolPB.class),
|
||||||
|
NameNode.getAddress(conf), ugi, conf,
|
||||||
|
NetUtils.getSocketFactory(conf, RefreshUserMappingsProtocol.class));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getProtocolVersion(String protocol, long clientVersion)
|
||||||
|
throws IOException {
|
||||||
|
return rpcProxy.getProtocolVersion(protocol, clientVersion);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProtocolSignature getProtocolSignature(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException {
|
||||||
|
return ProtocolSignatureWritable.convert(rpcProxy.getProtocolSignature2(
|
||||||
|
protocol, clientVersion, clientMethodsHash));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() throws IOException {
|
||||||
|
RPC.stopProxy(rpcProxy);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void refreshUserToGroupsMappings() throws IOException {
|
||||||
|
RefreshUserToGroupsMappingsRequestProto request =
|
||||||
|
RefreshUserToGroupsMappingsRequestProto.newBuilder().build();
|
||||||
|
try {
|
||||||
|
rpcProxy.refreshUserToGroupsMappings(NULL_CONTROLLER, request);
|
||||||
|
} catch (ServiceException se) {
|
||||||
|
throw ProtobufHelper.getRemoteException(se);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void refreshSuperUserGroupsConfiguration() throws IOException {
|
||||||
|
RefreshSuperUserGroupsConfigurationRequestProto request =
|
||||||
|
RefreshSuperUserGroupsConfigurationRequestProto.newBuilder().build();
|
||||||
|
try {
|
||||||
|
rpcProxy.refreshSuperUserGroupsConfiguration(NULL_CONTROLLER, request);
|
||||||
|
} catch (ServiceException se) {
|
||||||
|
throw ProtobufHelper.getRemoteException(se);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,49 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hdfs.protocolPB;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserMappingsProtocolService;
|
||||||
|
import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
|
||||||
|
import org.apache.hadoop.ipc.ProtocolInfo;
|
||||||
|
import org.apache.hadoop.ipc.VersionedProtocol;
|
||||||
|
import org.apache.hadoop.security.KerberosInfo;
|
||||||
|
|
||||||
|
@KerberosInfo(
|
||||||
|
serverPrincipal=CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY)
|
||||||
|
@ProtocolInfo(
|
||||||
|
protocolName = "org.apache.hadoop.security.RefreshUserMappingsProtocol",
|
||||||
|
protocolVersion = 1)
|
||||||
|
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
|
||||||
|
@InterfaceStability.Evolving
|
||||||
|
public interface RefreshUserMappingsProtocolPB extends
|
||||||
|
RefreshUserMappingsProtocolService.BlockingInterface, VersionedProtocol {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method is defined to get the protocol signature using
|
||||||
|
* the R23 protocol - hence we have added the suffix of 2 the method name
|
||||||
|
* to avoid conflict.
|
||||||
|
*/
|
||||||
|
public ProtocolSignatureWritable getProtocolSignature2(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException;
|
||||||
|
}
|
|
@ -0,0 +1,105 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.hdfs.protocolPB;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshSuperUserGroupsConfigurationRequestProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshSuperUserGroupsConfigurationResponseProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserToGroupsMappingsRequestProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserToGroupsMappingsResponseProto;
|
||||||
|
import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
|
||||||
|
import org.apache.hadoop.ipc.ProtocolSignature;
|
||||||
|
import org.apache.hadoop.ipc.RPC;
|
||||||
|
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
|
||||||
|
|
||||||
|
import com.google.protobuf.RpcController;
|
||||||
|
import com.google.protobuf.ServiceException;
|
||||||
|
|
||||||
|
public class RefreshUserMappingsProtocolServerSideTranslatorPB implements RefreshUserMappingsProtocolPB {
|
||||||
|
|
||||||
|
private final RefreshUserMappingsProtocol impl;
|
||||||
|
|
||||||
|
public RefreshUserMappingsProtocolServerSideTranslatorPB(RefreshUserMappingsProtocol impl) {
|
||||||
|
this.impl = impl;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public RefreshUserToGroupsMappingsResponseProto
|
||||||
|
refreshUserToGroupsMappings(RpcController controller,
|
||||||
|
RefreshUserToGroupsMappingsRequestProto request)
|
||||||
|
throws ServiceException {
|
||||||
|
try {
|
||||||
|
impl.refreshUserToGroupsMappings();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new ServiceException(e);
|
||||||
|
}
|
||||||
|
return RefreshUserToGroupsMappingsResponseProto.newBuilder().build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public RefreshSuperUserGroupsConfigurationResponseProto
|
||||||
|
refreshSuperUserGroupsConfiguration(RpcController controller,
|
||||||
|
RefreshSuperUserGroupsConfigurationRequestProto request)
|
||||||
|
throws ServiceException {
|
||||||
|
try {
|
||||||
|
impl.refreshSuperUserGroupsConfiguration();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new ServiceException(e);
|
||||||
|
}
|
||||||
|
return RefreshSuperUserGroupsConfigurationResponseProto.newBuilder()
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getProtocolVersion(String protocol, long clientVersion)
|
||||||
|
throws IOException {
|
||||||
|
return RPC.getProtocolVersion(RefreshUserMappingsProtocolPB.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProtocolSignature getProtocolSignature(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException {
|
||||||
|
/**
|
||||||
|
* Don't forward this to the server. The protocol version and signature is
|
||||||
|
* that of {@link RefreshUserMappingsProtocol}
|
||||||
|
*/
|
||||||
|
if (!protocol.equals(RPC
|
||||||
|
.getProtocolName(RefreshUserMappingsProtocolPB.class))) {
|
||||||
|
throw new IOException("Namenode Serverside implements "
|
||||||
|
+ RPC.getProtocolName(RefreshUserMappingsProtocolPB.class)
|
||||||
|
+ ". The following requested protocol is unknown: " + protocol);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ProtocolSignature.getProtocolSignature(clientMethodsHash,
|
||||||
|
RPC.getProtocolVersion(RefreshUserMappingsProtocolPB.class),
|
||||||
|
RefreshUserMappingsProtocolPB.class);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ProtocolSignatureWritable getProtocolSignature2(String protocol,
|
||||||
|
long clientVersion, int clientMethodsHash) throws IOException {
|
||||||
|
/**
|
||||||
|
* Don't forward this to the server. The protocol version and signature is
|
||||||
|
* that of {@link RefreshUserMappingsProtocolPB}
|
||||||
|
*/
|
||||||
|
return ProtocolSignatureWritable.convert(this.getProtocolSignature(
|
||||||
|
protocol, clientVersion, clientMethodsHash));
|
||||||
|
}
|
||||||
|
}
|
|
@ -65,12 +65,21 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants.UpgradeAction;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol;
|
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.NamenodeProtocolService;
|
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.NamenodeProtocolService;
|
||||||
import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeProtocolService;
|
import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeProtocolService;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshAuthorizationPolicyProtocolService;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserMappingsProtocolService;
|
||||||
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolPB;
|
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolPB;
|
||||||
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolServerSideTranslatorPB;
|
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolServerSideTranslatorPB;
|
||||||
|
import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolPB;
|
||||||
|
import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolServerSideTranslatorPB;
|
||||||
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB;
|
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB;
|
||||||
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolServerSideTranslatorPB;
|
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolServerSideTranslatorPB;
|
||||||
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB;
|
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB;
|
||||||
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB;
|
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB;
|
||||||
|
import org.apache.hadoop.hdfs.protocolPB.RefreshAuthorizationPolicyProtocolPB;
|
||||||
|
import org.apache.hadoop.hdfs.protocolPB.RefreshAuthorizationPolicyProtocolServerSideTranslatorPB;
|
||||||
|
import org.apache.hadoop.hdfs.protocolPB.RefreshUserMappingsProtocolPB;
|
||||||
|
import org.apache.hadoop.hdfs.protocolPB.RefreshUserMappingsProtocolServerSideTranslatorPB;
|
||||||
import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
|
import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
|
||||||
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
||||||
import org.apache.hadoop.hdfs.server.common.IncorrectVersionException;
|
import org.apache.hadoop.hdfs.server.common.IncorrectVersionException;
|
||||||
|
@ -99,9 +108,9 @@ import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.ipc.ProtobufRpcEngine;
|
import org.apache.hadoop.ipc.ProtobufRpcEngine;
|
||||||
import org.apache.hadoop.ipc.ProtocolSignature;
|
import org.apache.hadoop.ipc.ProtocolSignature;
|
||||||
import org.apache.hadoop.ipc.RPC;
|
import org.apache.hadoop.ipc.RPC;
|
||||||
|
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
|
||||||
import org.apache.hadoop.ipc.Server;
|
import org.apache.hadoop.ipc.Server;
|
||||||
import org.apache.hadoop.ipc.WritableRpcEngine;
|
import org.apache.hadoop.ipc.WritableRpcEngine;
|
||||||
import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
|
|
||||||
import org.apache.hadoop.net.Node;
|
import org.apache.hadoop.net.Node;
|
||||||
import org.apache.hadoop.security.AccessControlException;
|
import org.apache.hadoop.security.AccessControlException;
|
||||||
import org.apache.hadoop.security.Groups;
|
import org.apache.hadoop.security.Groups;
|
||||||
|
@ -168,6 +177,21 @@ class NameNodeRpcServer implements NamenodeProtocols {
|
||||||
BlockingService NNPbService = NamenodeProtocolService
|
BlockingService NNPbService = NamenodeProtocolService
|
||||||
.newReflectiveBlockingService(namenodeProtocolXlator);
|
.newReflectiveBlockingService(namenodeProtocolXlator);
|
||||||
|
|
||||||
|
RefreshAuthorizationPolicyProtocolServerSideTranslatorPB refreshAuthPolicyXlator =
|
||||||
|
new RefreshAuthorizationPolicyProtocolServerSideTranslatorPB(this);
|
||||||
|
BlockingService refreshAuthService = RefreshAuthorizationPolicyProtocolService
|
||||||
|
.newReflectiveBlockingService(refreshAuthPolicyXlator);
|
||||||
|
|
||||||
|
RefreshUserMappingsProtocolServerSideTranslatorPB refreshUserMappingXlator =
|
||||||
|
new RefreshUserMappingsProtocolServerSideTranslatorPB(this);
|
||||||
|
BlockingService refreshUserMappingService = RefreshUserMappingsProtocolService
|
||||||
|
.newReflectiveBlockingService(refreshUserMappingXlator);
|
||||||
|
|
||||||
|
GetUserMappingsProtocolServerSideTranslatorPB getUserMappingXlator =
|
||||||
|
new GetUserMappingsProtocolServerSideTranslatorPB(this);
|
||||||
|
BlockingService getUserMappingService = GetUserMappingsProtocolService
|
||||||
|
.newReflectiveBlockingService(getUserMappingXlator);
|
||||||
|
|
||||||
WritableRpcEngine.ensureInitialized();
|
WritableRpcEngine.ensureInitialized();
|
||||||
|
|
||||||
InetSocketAddress dnSocketAddr = nn.getServiceRpcServerAddress(conf);
|
InetSocketAddress dnSocketAddr = nn.getServiceRpcServerAddress(conf);
|
||||||
|
@ -182,18 +206,18 @@ class NameNodeRpcServer implements NamenodeProtocols {
|
||||||
dnSocketAddr.getHostName(), dnSocketAddr.getPort(),
|
dnSocketAddr.getHostName(), dnSocketAddr.getPort(),
|
||||||
serviceHandlerCount,
|
serviceHandlerCount,
|
||||||
false, conf, namesystem.getDelegationTokenSecretManager());
|
false, conf, namesystem.getDelegationTokenSecretManager());
|
||||||
this.serviceRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
|
|
||||||
RefreshAuthorizationPolicyProtocol.class, this);
|
|
||||||
this.serviceRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
|
|
||||||
RefreshUserMappingsProtocol.class, this);
|
|
||||||
this.serviceRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
|
|
||||||
GetUserMappingsProtocol.class, this);
|
|
||||||
this.serviceRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
|
this.serviceRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
|
||||||
HAServiceProtocol.class, this);
|
HAServiceProtocol.class, this);
|
||||||
DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
|
DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
|
||||||
serviceRpcServer);
|
serviceRpcServer);
|
||||||
DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
|
DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
|
||||||
serviceRpcServer);
|
serviceRpcServer);
|
||||||
|
DFSUtil.addPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
|
||||||
|
refreshAuthService, serviceRpcServer);
|
||||||
|
DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
|
||||||
|
refreshUserMappingService, serviceRpcServer);
|
||||||
|
DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class,
|
||||||
|
getUserMappingService, serviceRpcServer);
|
||||||
|
|
||||||
this.serviceRPCAddress = this.serviceRpcServer.getListenerAddress();
|
this.serviceRPCAddress = this.serviceRpcServer.getListenerAddress();
|
||||||
nn.setRpcServiceServerAddress(conf, serviceRPCAddress);
|
nn.setRpcServiceServerAddress(conf, serviceRPCAddress);
|
||||||
|
@ -207,18 +231,18 @@ class NameNodeRpcServer implements NamenodeProtocols {
|
||||||
clientNNPbService, socAddr.getHostName(),
|
clientNNPbService, socAddr.getHostName(),
|
||||||
socAddr.getPort(), handlerCount, false, conf,
|
socAddr.getPort(), handlerCount, false, conf,
|
||||||
namesystem.getDelegationTokenSecretManager());
|
namesystem.getDelegationTokenSecretManager());
|
||||||
this.clientRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
|
|
||||||
RefreshAuthorizationPolicyProtocol.class, this);
|
|
||||||
this.clientRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
|
|
||||||
RefreshUserMappingsProtocol.class, this);
|
|
||||||
this.clientRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
|
|
||||||
GetUserMappingsProtocol.class, this);
|
|
||||||
this.clientRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
|
this.clientRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
|
||||||
HAServiceProtocol.class, this);
|
HAServiceProtocol.class, this);
|
||||||
DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
|
DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
|
||||||
clientRpcServer);
|
clientRpcServer);
|
||||||
DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
|
DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
|
||||||
clientRpcServer);
|
clientRpcServer);
|
||||||
|
DFSUtil.addPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
|
||||||
|
refreshAuthService, clientRpcServer);
|
||||||
|
DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
|
||||||
|
refreshUserMappingService, clientRpcServer);
|
||||||
|
DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class,
|
||||||
|
getUserMappingService, clientRpcServer);
|
||||||
|
|
||||||
// set service-level authorization security policy
|
// set service-level authorization security policy
|
||||||
if (serviceAuthEnabled =
|
if (serviceAuthEnabled =
|
||||||
|
|
|
@ -43,14 +43,14 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.UpgradeAction;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants.UpgradeAction;
|
||||||
|
import org.apache.hadoop.hdfs.protocolPB.RefreshAuthorizationPolicyProtocolClientSideTranslatorPB;
|
||||||
|
import org.apache.hadoop.hdfs.protocolPB.RefreshUserMappingsProtocolClientSideTranslatorPB;
|
||||||
import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
|
import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
import org.apache.hadoop.ipc.RPC;
|
import org.apache.hadoop.ipc.RPC;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
import org.apache.hadoop.net.NetUtils;
|
import org.apache.hadoop.net.NetUtils;
|
||||||
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
|
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
|
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
|
||||||
|
@ -790,13 +790,9 @@ public class DFSAdmin extends FsShell {
|
||||||
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, ""));
|
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, ""));
|
||||||
|
|
||||||
// Create the client
|
// Create the client
|
||||||
RefreshAuthorizationPolicyProtocol refreshProtocol =
|
RefreshAuthorizationPolicyProtocolClientSideTranslatorPB refreshProtocol =
|
||||||
(RefreshAuthorizationPolicyProtocol)
|
new RefreshAuthorizationPolicyProtocolClientSideTranslatorPB(
|
||||||
RPC.getProxy(RefreshAuthorizationPolicyProtocol.class,
|
NameNode.getAddress(conf), getUGI(), conf);
|
||||||
RefreshAuthorizationPolicyProtocol.versionID,
|
|
||||||
NameNode.getAddress(conf), getUGI(), conf,
|
|
||||||
NetUtils.getSocketFactory(conf,
|
|
||||||
RefreshAuthorizationPolicyProtocol.class));
|
|
||||||
|
|
||||||
// Refresh the authorization policy in-effect
|
// Refresh the authorization policy in-effect
|
||||||
refreshProtocol.refreshServiceAcl();
|
refreshProtocol.refreshServiceAcl();
|
||||||
|
@ -820,13 +816,9 @@ public class DFSAdmin extends FsShell {
|
||||||
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, ""));
|
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, ""));
|
||||||
|
|
||||||
// Create the client
|
// Create the client
|
||||||
RefreshUserMappingsProtocol refreshProtocol =
|
RefreshUserMappingsProtocolClientSideTranslatorPB refreshProtocol =
|
||||||
(RefreshUserMappingsProtocol)
|
new RefreshUserMappingsProtocolClientSideTranslatorPB(
|
||||||
RPC.getProxy(RefreshUserMappingsProtocol.class,
|
NameNode.getAddress(conf), getUGI(), conf);
|
||||||
RefreshUserMappingsProtocol.versionID,
|
|
||||||
NameNode.getAddress(conf), getUGI(), conf,
|
|
||||||
NetUtils.getSocketFactory(conf,
|
|
||||||
RefreshUserMappingsProtocol.class));
|
|
||||||
|
|
||||||
// Refresh the user-to-groups mappings
|
// Refresh the user-to-groups mappings
|
||||||
refreshProtocol.refreshUserToGroupsMappings();
|
refreshProtocol.refreshUserToGroupsMappings();
|
||||||
|
@ -851,13 +843,9 @@ public class DFSAdmin extends FsShell {
|
||||||
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, ""));
|
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, ""));
|
||||||
|
|
||||||
// Create the client
|
// Create the client
|
||||||
RefreshUserMappingsProtocol refreshProtocol =
|
RefreshUserMappingsProtocolClientSideTranslatorPB refreshProtocol =
|
||||||
(RefreshUserMappingsProtocol)
|
new RefreshUserMappingsProtocolClientSideTranslatorPB(
|
||||||
RPC.getProxy(RefreshUserMappingsProtocol.class,
|
NameNode.getAddress(conf), getUGI(), conf);
|
||||||
RefreshUserMappingsProtocol.versionID,
|
|
||||||
NameNode.getAddress(conf), getUGI(), conf,
|
|
||||||
NetUtils.getSocketFactory(conf,
|
|
||||||
RefreshUserMappingsProtocol.class));
|
|
||||||
|
|
||||||
// Refresh the user-to-groups mappings
|
// Refresh the user-to-groups mappings
|
||||||
refreshProtocol.refreshSuperUserGroupsConfiguration();
|
refreshProtocol.refreshSuperUserGroupsConfiguration();
|
||||||
|
|
|
@ -23,8 +23,11 @@ import java.net.InetSocketAddress;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
|
import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolClientSideTranslatorPB;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.tools.GetGroupsBase;
|
import org.apache.hadoop.tools.GetGroupsBase;
|
||||||
|
import org.apache.hadoop.tools.GetUserMappingsProtocol;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -52,6 +55,13 @@ public class GetGroups extends GetGroupsBase {
|
||||||
return NameNode.getAddress(conf);
|
return NameNode.getAddress(conf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected GetUserMappingsProtocol getUgmProtocol() throws IOException {
|
||||||
|
return new GetUserMappingsProtocolClientSideTranslatorPB(
|
||||||
|
NameNode.getAddress(getConf()), UserGroupInformation.getCurrentUser(),
|
||||||
|
getConf());
|
||||||
|
}
|
||||||
|
|
||||||
public static void main(String[] argv) throws Exception {
|
public static void main(String[] argv) throws Exception {
|
||||||
int res = ToolRunner.run(new GetGroups(new HdfsConfiguration()), argv);
|
int res = ToolRunner.run(new GetGroups(new HdfsConfiguration()), argv);
|
||||||
System.exit(res);
|
System.exit(res);
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
option java_package = "org.apache.hadoop.hdfs.protocol.proto";
|
||||||
|
option java_outer_classname = "GetUserMappingsProtocolProtos";
|
||||||
|
option java_generic_services = true;
|
||||||
|
option java_generate_equals_and_hash = true;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get groups for user request.
|
||||||
|
*/
|
||||||
|
message GetGroupsForUserRequestProto {
|
||||||
|
required string user = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Response for get groups.
|
||||||
|
*/
|
||||||
|
message GetGroupsForUserResponseProto {
|
||||||
|
repeated string groups = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Protocol implemented by the Name Node and Job Tracker which maps users to
|
||||||
|
* groups.
|
||||||
|
*/
|
||||||
|
service GetUserMappingsProtocolService {
|
||||||
|
/**
|
||||||
|
* Get the groups which are mapped to the given user.
|
||||||
|
*/
|
||||||
|
rpc getGroupsForUser(GetGroupsForUserRequestProto)
|
||||||
|
returns(GetGroupsForUserResponseProto);
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
option java_package = "org.apache.hadoop.hdfs.protocol.proto";
|
||||||
|
option java_outer_classname = "RefreshAuthorizationPolicyProtocolProtos";
|
||||||
|
option java_generic_services = true;
|
||||||
|
option java_generate_equals_and_hash = true;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh service acl request.
|
||||||
|
*/
|
||||||
|
message RefreshServiceAclRequestProto {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* void response
|
||||||
|
*/
|
||||||
|
message RefreshServiceAclResponseProto {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Protocol which is used to refresh the authorization policy in use currently.
|
||||||
|
*/
|
||||||
|
service RefreshAuthorizationPolicyProtocolService {
|
||||||
|
/**
|
||||||
|
* Refresh the service-level authorization policy in-effect.
|
||||||
|
*/
|
||||||
|
rpc refreshServiceAcl(RefreshServiceAclRequestProto)
|
||||||
|
returns(RefreshServiceAclResponseProto);
|
||||||
|
}
|
|
@ -0,0 +1,63 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
option java_package = "org.apache.hadoop.hdfs.protocol.proto";
|
||||||
|
option java_outer_classname = "RefreshUserMappingsProtocolProtos";
|
||||||
|
option java_generic_services = true;
|
||||||
|
option java_generate_equals_and_hash = true;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh user to group mappings request.
|
||||||
|
*/
|
||||||
|
message RefreshUserToGroupsMappingsRequestProto {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* void response
|
||||||
|
*/
|
||||||
|
message RefreshUserToGroupsMappingsResponseProto {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh superuser configuration request.
|
||||||
|
*/
|
||||||
|
message RefreshSuperUserGroupsConfigurationRequestProto {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* void response
|
||||||
|
*/
|
||||||
|
message RefreshSuperUserGroupsConfigurationResponseProto {
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Protocol to refresh the user mappings.
|
||||||
|
*/
|
||||||
|
service RefreshUserMappingsProtocolService {
|
||||||
|
/**
|
||||||
|
* Refresh user to group mappings.
|
||||||
|
*/
|
||||||
|
rpc refreshUserToGroupsMappings(RefreshUserToGroupsMappingsRequestProto)
|
||||||
|
returns(RefreshUserToGroupsMappingsResponseProto);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Refresh superuser proxy group list.
|
||||||
|
*/
|
||||||
|
rpc refreshSuperUserGroupsConfiguration(RefreshSuperUserGroupsConfigurationRequestProto)
|
||||||
|
returns(RefreshSuperUserGroupsConfigurationResponseProto);
|
||||||
|
}
|
|
@ -73,28 +73,6 @@ Trunk (unreleased changes)
|
||||||
findBugs, correct links to findBugs artifacts and no links to the
|
findBugs, correct links to findBugs artifacts and no links to the
|
||||||
artifacts when there are no warnings. (Tom White via vinodkv).
|
artifacts when there are no warnings. (Tom White via vinodkv).
|
||||||
|
|
||||||
MAPREDUCE-3183. hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml
|
|
||||||
missing license header. (Hitesh Shah via tucu).
|
|
||||||
|
|
||||||
MAPREDUCE-3003. Publish MR JARs to Maven snapshot repository. (tucu)
|
|
||||||
|
|
||||||
MAPREDUCE-3204. mvn site:site fails on MapReduce. (tucu)
|
|
||||||
|
|
||||||
MAPREDUCE-3014. Rename and invert logic of '-cbuild' profile to 'native' and off
|
|
||||||
by default. (tucu)
|
|
||||||
|
|
||||||
MAPREDUCE-3477. Hadoop site documentation cannot be built anymore. (jeagles via tucu)
|
|
||||||
|
|
||||||
MAPREDUCE-3500. MRJobConfig creates an LD_LIBRARY_PATH using the platform ARCH. (tucu)
|
|
||||||
|
|
||||||
MAPREDUCE-3389. MRApps loads the 'mrapp-generated-classpath' file with
|
|
||||||
classpath from the build machine. (tucu)
|
|
||||||
|
|
||||||
MAPREDUCE-3544. gridmix build is broken, requires hadoop-archives to be added as
|
|
||||||
ivy dependency. (tucu)
|
|
||||||
|
|
||||||
MAPREDUCE-3557. MR1 test fail to compile because of missing hadoop-archives dependency.
|
|
||||||
(tucu)
|
|
||||||
|
|
||||||
Release 0.23.1 - Unreleased
|
Release 0.23.1 - Unreleased
|
||||||
|
|
||||||
|
@ -414,6 +392,39 @@ Release 0.23.1 - Unreleased
|
||||||
|
|
||||||
MAPREDUCE-3615. Fix some ant test failures. (Thomas Graves via sseth)
|
MAPREDUCE-3615. Fix some ant test failures. (Thomas Graves via sseth)
|
||||||
|
|
||||||
|
MAPREDUCE-3326. Added detailed information about queue's to the
|
||||||
|
CapacityScheduler web-ui. (Jason Lowe via acmurthy)
|
||||||
|
|
||||||
|
MAPREDUCE-3548. Added more unit tests for MR AM & JHS web-services.
|
||||||
|
(Thomas Graves via acmurthy)
|
||||||
|
|
||||||
|
MAPREDUCE-3617. Removed wrong default value for
|
||||||
|
yarn.resourcemanager.principal and yarn.nodemanager.principal. (Jonathan
|
||||||
|
Eagles via acmurthy)
|
||||||
|
|
||||||
|
MAPREDUCE-3183. hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml
|
||||||
|
missing license header. (Hitesh Shah via tucu).
|
||||||
|
|
||||||
|
MAPREDUCE-3003. Publish MR JARs to Maven snapshot repository. (tucu)
|
||||||
|
|
||||||
|
MAPREDUCE-3204. mvn site:site fails on MapReduce. (tucu)
|
||||||
|
|
||||||
|
MAPREDUCE-3014. Rename and invert logic of '-cbuild' profile to 'native' and off
|
||||||
|
by default. (tucu)
|
||||||
|
|
||||||
|
MAPREDUCE-3477. Hadoop site documentation cannot be built anymore. (jeagles via tucu)
|
||||||
|
|
||||||
|
MAPREDUCE-3500. MRJobConfig creates an LD_LIBRARY_PATH using the platform ARCH. (tucu)
|
||||||
|
|
||||||
|
MAPREDUCE-3389. MRApps loads the 'mrapp-generated-classpath' file with
|
||||||
|
classpath from the build machine. (tucu)
|
||||||
|
|
||||||
|
MAPREDUCE-3544. gridmix build is broken, requires hadoop-archives to be added as
|
||||||
|
ivy dependency. (tucu)
|
||||||
|
|
||||||
|
MAPREDUCE-3557. MR1 test fail to compile because of missing hadoop-archives dependency.
|
||||||
|
(tucu)
|
||||||
|
|
||||||
Release 0.23.0 - 2011-11-01
|
Release 0.23.0 - 2011-11-01
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -136,7 +136,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
<artifactId>exec-maven-plugin</artifactId>
|
<artifactId>exec-maven-plugin</artifactId>
|
||||||
<version>1.2</version>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<phase>compile</phase>
|
<phase>compile</phase>
|
||||||
|
|
|
@ -76,14 +76,90 @@ public class AMWebServices {
|
||||||
}
|
}
|
||||||
|
|
||||||
Boolean hasAccess(Job job, HttpServletRequest request) {
|
Boolean hasAccess(Job job, HttpServletRequest request) {
|
||||||
UserGroupInformation callerUgi = UserGroupInformation
|
String remoteUser = request.getRemoteUser();
|
||||||
.createRemoteUser(request.getRemoteUser());
|
UserGroupInformation callerUGI = null;
|
||||||
if (!job.checkAccess(callerUgi, JobACL.VIEW_JOB)) {
|
if (remoteUser != null) {
|
||||||
|
callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
|
||||||
|
}
|
||||||
|
if (callerUGI != null && !job.checkAccess(callerUGI, JobACL.VIEW_JOB)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* convert a job id string to an actual job and handle all the error checking.
|
||||||
|
*/
|
||||||
|
public static Job getJobFromJobIdString(String jid, AppContext appCtx) throws NotFoundException {
|
||||||
|
JobId jobId;
|
||||||
|
Job job;
|
||||||
|
try {
|
||||||
|
jobId = MRApps.toJobID(jid);
|
||||||
|
} catch (YarnException e) {
|
||||||
|
throw new NotFoundException(e.getMessage());
|
||||||
|
}
|
||||||
|
if (jobId == null) {
|
||||||
|
throw new NotFoundException("job, " + jid + ", is not found");
|
||||||
|
}
|
||||||
|
job = appCtx.getJob(jobId);
|
||||||
|
if (job == null) {
|
||||||
|
throw new NotFoundException("job, " + jid + ", is not found");
|
||||||
|
}
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* convert a task id string to an actual task and handle all the error
|
||||||
|
* checking.
|
||||||
|
*/
|
||||||
|
public static Task getTaskFromTaskIdString(String tid, Job job) throws NotFoundException {
|
||||||
|
TaskId taskID;
|
||||||
|
Task task;
|
||||||
|
try {
|
||||||
|
taskID = MRApps.toTaskID(tid);
|
||||||
|
} catch (YarnException e) {
|
||||||
|
throw new NotFoundException(e.getMessage());
|
||||||
|
} catch (NumberFormatException ne) {
|
||||||
|
throw new NotFoundException(ne.getMessage());
|
||||||
|
}
|
||||||
|
if (taskID == null) {
|
||||||
|
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
||||||
|
}
|
||||||
|
task = job.getTask(taskID);
|
||||||
|
if (task == null) {
|
||||||
|
throw new NotFoundException("task not found with id " + tid);
|
||||||
|
}
|
||||||
|
return task;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* convert a task attempt id string to an actual task attempt and handle all
|
||||||
|
* the error checking.
|
||||||
|
*/
|
||||||
|
public static TaskAttempt getTaskAttemptFromTaskAttemptString(String attId, Task task)
|
||||||
|
throws NotFoundException {
|
||||||
|
TaskAttemptId attemptId;
|
||||||
|
TaskAttempt ta;
|
||||||
|
try {
|
||||||
|
attemptId = MRApps.toTaskAttemptID(attId);
|
||||||
|
} catch (YarnException e) {
|
||||||
|
throw new NotFoundException(e.getMessage());
|
||||||
|
} catch (NumberFormatException ne) {
|
||||||
|
throw new NotFoundException(ne.getMessage());
|
||||||
|
}
|
||||||
|
if (attemptId == null) {
|
||||||
|
throw new NotFoundException("task attempt id " + attId
|
||||||
|
+ " not found or invalid");
|
||||||
|
}
|
||||||
|
ta = task.getAttempt(attemptId);
|
||||||
|
if (ta == null) {
|
||||||
|
throw new NotFoundException("Error getting info on task attempt id "
|
||||||
|
+ attId);
|
||||||
|
}
|
||||||
|
return ta;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* check for job access.
|
* check for job access.
|
||||||
*
|
*
|
||||||
|
@ -130,16 +206,8 @@ public class AMWebServices {
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public JobInfo getJob(@Context HttpServletRequest hsr,
|
public JobInfo getJob(@Context HttpServletRequest hsr,
|
||||||
@PathParam("jobid") String jid) {
|
@PathParam("jobid") String jid) {
|
||||||
JobId jobId = MRApps.toJobID(jid);
|
Job job = getJobFromJobIdString(jid, appCtx);
|
||||||
if (jobId == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
Job job = appCtx.getJob(jobId);
|
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
return new JobInfo(job, hasAccess(job, hsr));
|
return new JobInfo(job, hasAccess(job, hsr));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@GET
|
@GET
|
||||||
|
@ -147,63 +215,25 @@ public class AMWebServices {
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public JobCounterInfo getJobCounters(@Context HttpServletRequest hsr,
|
public JobCounterInfo getJobCounters(@Context HttpServletRequest hsr,
|
||||||
@PathParam("jobid") String jid) {
|
@PathParam("jobid") String jid) {
|
||||||
JobId jobId = MRApps.toJobID(jid);
|
Job job = getJobFromJobIdString(jid, appCtx);
|
||||||
if (jobId == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
Job job = appCtx.getJob(jobId);
|
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
checkAccess(job, hsr);
|
checkAccess(job, hsr);
|
||||||
return new JobCounterInfo(this.appCtx, job);
|
return new JobCounterInfo(this.appCtx, job);
|
||||||
}
|
}
|
||||||
|
|
||||||
@GET
|
|
||||||
@Path("/jobs/{jobid}/tasks/{taskid}/counters")
|
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
|
||||||
public JobTaskCounterInfo getSingleTaskCounters(
|
|
||||||
@Context HttpServletRequest hsr, @PathParam("jobid") String jid,
|
|
||||||
@PathParam("taskid") String tid) {
|
|
||||||
JobId jobId = MRApps.toJobID(jid);
|
|
||||||
if (jobId == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
Job job = this.appCtx.getJob(jobId);
|
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
checkAccess(job, hsr);
|
|
||||||
TaskId taskID = MRApps.toTaskID(tid);
|
|
||||||
if (taskID == null) {
|
|
||||||
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
|
||||||
}
|
|
||||||
Task task = job.getTask(taskID);
|
|
||||||
if (task == null) {
|
|
||||||
throw new NotFoundException("task not found with id " + tid);
|
|
||||||
}
|
|
||||||
return new JobTaskCounterInfo(task);
|
|
||||||
}
|
|
||||||
|
|
||||||
@GET
|
@GET
|
||||||
@Path("/jobs/{jobid}/conf")
|
@Path("/jobs/{jobid}/conf")
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public ConfInfo getJobConf(@Context HttpServletRequest hsr,
|
public ConfInfo getJobConf(@Context HttpServletRequest hsr,
|
||||||
@PathParam("jobid") String jid) {
|
@PathParam("jobid") String jid) {
|
||||||
JobId jobId = MRApps.toJobID(jid);
|
|
||||||
if (jobId == null) {
|
Job job = getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
Job job = appCtx.getJob(jobId);
|
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
checkAccess(job, hsr);
|
checkAccess(job, hsr);
|
||||||
ConfInfo info;
|
ConfInfo info;
|
||||||
try {
|
try {
|
||||||
info = new ConfInfo(job, this.conf);
|
info = new ConfInfo(job, this.conf);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new NotFoundException("unable to load configuration for job: " + jid);
|
throw new NotFoundException("unable to load configuration for job: "
|
||||||
|
+ jid);
|
||||||
}
|
}
|
||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
|
@ -213,10 +243,8 @@ public class AMWebServices {
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public TasksInfo getJobTasks(@Context HttpServletRequest hsr,
|
public TasksInfo getJobTasks(@Context HttpServletRequest hsr,
|
||||||
@PathParam("jobid") String jid, @QueryParam("type") String type) {
|
@PathParam("jobid") String jid, @QueryParam("type") String type) {
|
||||||
Job job = this.appCtx.getJob(MRApps.toJobID(jid));
|
|
||||||
if (job == null) {
|
Job job = getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
checkAccess(job, hsr);
|
checkAccess(job, hsr);
|
||||||
TasksInfo allTasks = new TasksInfo();
|
TasksInfo allTasks = new TasksInfo();
|
||||||
for (Task task : job.getTasks().values()) {
|
for (Task task : job.getTasks().values()) {
|
||||||
|
@ -225,7 +253,8 @@ public class AMWebServices {
|
||||||
try {
|
try {
|
||||||
ttype = MRApps.taskType(type);
|
ttype = MRApps.taskType(type);
|
||||||
} catch (YarnException e) {
|
} catch (YarnException e) {
|
||||||
throw new BadRequestException("tasktype must be either m or r"); }
|
throw new BadRequestException("tasktype must be either m or r");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (ttype != null && task.getType() != ttype) {
|
if (ttype != null && task.getType() != ttype) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -240,21 +269,24 @@ public class AMWebServices {
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public TaskInfo getJobTask(@Context HttpServletRequest hsr,
|
public TaskInfo getJobTask(@Context HttpServletRequest hsr,
|
||||||
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
|
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
|
||||||
Job job = this.appCtx.getJob(MRApps.toJobID(jid));
|
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
checkAccess(job, hsr);
|
|
||||||
TaskId taskID = MRApps.toTaskID(tid);
|
|
||||||
if (taskID == null) {
|
|
||||||
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
|
||||||
}
|
|
||||||
Task task = job.getTask(taskID);
|
|
||||||
if (task == null) {
|
|
||||||
throw new NotFoundException("task not found with id " + tid);
|
|
||||||
}
|
|
||||||
return new TaskInfo(task);
|
|
||||||
|
|
||||||
|
Job job = getJobFromJobIdString(jid, appCtx);
|
||||||
|
checkAccess(job, hsr);
|
||||||
|
Task task = getTaskFromTaskIdString(tid, job);
|
||||||
|
return new TaskInfo(task);
|
||||||
|
}
|
||||||
|
|
||||||
|
@GET
|
||||||
|
@Path("/jobs/{jobid}/tasks/{taskid}/counters")
|
||||||
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
|
public JobTaskCounterInfo getSingleTaskCounters(
|
||||||
|
@Context HttpServletRequest hsr, @PathParam("jobid") String jid,
|
||||||
|
@PathParam("taskid") String tid) {
|
||||||
|
|
||||||
|
Job job = getJobFromJobIdString(jid, appCtx);
|
||||||
|
checkAccess(job, hsr);
|
||||||
|
Task task = getTaskFromTaskIdString(tid, job);
|
||||||
|
return new JobTaskCounterInfo(task);
|
||||||
}
|
}
|
||||||
|
|
||||||
@GET
|
@GET
|
||||||
|
@ -263,19 +295,11 @@ public class AMWebServices {
|
||||||
public TaskAttemptsInfo getJobTaskAttempts(@Context HttpServletRequest hsr,
|
public TaskAttemptsInfo getJobTaskAttempts(@Context HttpServletRequest hsr,
|
||||||
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
|
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
|
||||||
TaskAttemptsInfo attempts = new TaskAttemptsInfo();
|
TaskAttemptsInfo attempts = new TaskAttemptsInfo();
|
||||||
Job job = this.appCtx.getJob(MRApps.toJobID(jid));
|
|
||||||
if (job == null) {
|
Job job = getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
checkAccess(job, hsr);
|
checkAccess(job, hsr);
|
||||||
TaskId taskID = MRApps.toTaskID(tid);
|
Task task = getTaskFromTaskIdString(tid, job);
|
||||||
if (taskID == null) {
|
|
||||||
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
|
||||||
}
|
|
||||||
Task task = job.getTask(taskID);
|
|
||||||
if (task == null) {
|
|
||||||
throw new NotFoundException("task not found with id " + tid);
|
|
||||||
}
|
|
||||||
for (TaskAttempt ta : task.getAttempts().values()) {
|
for (TaskAttempt ta : task.getAttempts().values()) {
|
||||||
if (ta != null) {
|
if (ta != null) {
|
||||||
if (task.getType() == TaskType.REDUCE) {
|
if (task.getType() == TaskType.REDUCE) {
|
||||||
|
@ -294,29 +318,11 @@ public class AMWebServices {
|
||||||
public TaskAttemptInfo getJobTaskAttemptId(@Context HttpServletRequest hsr,
|
public TaskAttemptInfo getJobTaskAttemptId(@Context HttpServletRequest hsr,
|
||||||
@PathParam("jobid") String jid, @PathParam("taskid") String tid,
|
@PathParam("jobid") String jid, @PathParam("taskid") String tid,
|
||||||
@PathParam("attemptid") String attId) {
|
@PathParam("attemptid") String attId) {
|
||||||
Job job = this.appCtx.getJob(MRApps.toJobID(jid));
|
|
||||||
if (job == null) {
|
Job job = getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
checkAccess(job, hsr);
|
checkAccess(job, hsr);
|
||||||
TaskId taskID = MRApps.toTaskID(tid);
|
Task task = getTaskFromTaskIdString(tid, job);
|
||||||
if (taskID == null) {
|
TaskAttempt ta = getTaskAttemptFromTaskAttemptString(attId, task);
|
||||||
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
|
||||||
}
|
|
||||||
Task task = job.getTask(taskID);
|
|
||||||
if (task == null) {
|
|
||||||
throw new NotFoundException("task not found with id " + tid);
|
|
||||||
}
|
|
||||||
TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
|
|
||||||
if (attemptId == null) {
|
|
||||||
throw new NotFoundException("task attempt id " + attId
|
|
||||||
+ " not found or invalid");
|
|
||||||
}
|
|
||||||
TaskAttempt ta = task.getAttempt(attemptId);
|
|
||||||
if (ta == null) {
|
|
||||||
throw new NotFoundException("Error getting info on task attempt id "
|
|
||||||
+ attId);
|
|
||||||
}
|
|
||||||
if (task.getType() == TaskType.REDUCE) {
|
if (task.getType() == TaskType.REDUCE) {
|
||||||
return new ReduceTaskAttemptInfo(ta, task.getType());
|
return new ReduceTaskAttemptInfo(ta, task.getType());
|
||||||
} else {
|
} else {
|
||||||
|
@ -330,33 +336,11 @@ public class AMWebServices {
|
||||||
public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(
|
public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(
|
||||||
@Context HttpServletRequest hsr, @PathParam("jobid") String jid,
|
@Context HttpServletRequest hsr, @PathParam("jobid") String jid,
|
||||||
@PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
|
@PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
|
||||||
JobId jobId = MRApps.toJobID(jid);
|
|
||||||
if (jobId == null) {
|
Job job = getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
Job job = this.appCtx.getJob(jobId);
|
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
checkAccess(job, hsr);
|
checkAccess(job, hsr);
|
||||||
TaskId taskID = MRApps.toTaskID(tid);
|
Task task = getTaskFromTaskIdString(tid, job);
|
||||||
if (taskID == null) {
|
TaskAttempt ta = getTaskAttemptFromTaskAttemptString(attId, task);
|
||||||
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
|
||||||
}
|
|
||||||
Task task = job.getTask(taskID);
|
|
||||||
if (task == null) {
|
|
||||||
throw new NotFoundException("task not found with id " + tid);
|
|
||||||
}
|
|
||||||
TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
|
|
||||||
if (attemptId == null) {
|
|
||||||
throw new NotFoundException("task attempt id " + attId
|
|
||||||
+ " not found or invalid");
|
|
||||||
}
|
|
||||||
TaskAttempt ta = task.getAttempt(attemptId);
|
|
||||||
if (ta == null) {
|
|
||||||
throw new NotFoundException("Error getting info on task attempt id "
|
|
||||||
+ attId);
|
|
||||||
}
|
|
||||||
return new JobTaskAttemptCounterInfo(ta);
|
return new JobTaskAttemptCounterInfo(ta);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,7 +32,6 @@ public class AppInfo {
|
||||||
protected String appId;
|
protected String appId;
|
||||||
protected String name;
|
protected String name;
|
||||||
protected String user;
|
protected String user;
|
||||||
protected String hostname;
|
|
||||||
protected long startedOn;
|
protected long startedOn;
|
||||||
protected long elapsedTime;
|
protected long elapsedTime;
|
||||||
|
|
||||||
|
@ -44,7 +43,7 @@ public class AppInfo {
|
||||||
this.name = context.getApplicationName().toString();
|
this.name = context.getApplicationName().toString();
|
||||||
this.user = context.getUser().toString();
|
this.user = context.getUser().toString();
|
||||||
this.startedOn = context.getStartTime();
|
this.startedOn = context.getStartTime();
|
||||||
this.elapsedTime = Times.elapsed(context.getStartTime(), 0);
|
this.elapsedTime = Times.elapsed(this.startedOn, 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getId() {
|
public String getId() {
|
||||||
|
|
|
@ -30,7 +30,7 @@ import org.apache.hadoop.fs.FileContext;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
|
||||||
@XmlRootElement
|
@XmlRootElement(name = "conf")
|
||||||
@XmlAccessorType(XmlAccessType.FIELD)
|
@XmlAccessorType(XmlAccessType.FIELD)
|
||||||
public class ConfInfo {
|
public class ConfInfo {
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.Counter;
|
||||||
@XmlAccessorType(XmlAccessType.FIELD)
|
@XmlAccessorType(XmlAccessType.FIELD)
|
||||||
public class CounterInfo {
|
public class CounterInfo {
|
||||||
|
|
||||||
protected String counterName;
|
protected String name;
|
||||||
protected long totalCounterValue;
|
protected long totalCounterValue;
|
||||||
protected long mapCounterValue;
|
protected long mapCounterValue;
|
||||||
protected long reduceCounterValue;
|
protected long reduceCounterValue;
|
||||||
|
@ -36,7 +36,7 @@ public class CounterInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
public CounterInfo(Counter counter, Counter mc, Counter rc) {
|
public CounterInfo(Counter counter, Counter mc, Counter rc) {
|
||||||
this.counterName = counter.getName();
|
this.name = counter.getName();
|
||||||
this.totalCounterValue = counter.getValue();
|
this.totalCounterValue = counter.getValue();
|
||||||
this.mapCounterValue = mc == null ? 0 : mc.getValue();
|
this.mapCounterValue = mc == null ? 0 : mc.getValue();
|
||||||
this.reduceCounterValue = rc == null ? 0 : rc.getValue();
|
this.reduceCounterValue = rc == null ? 0 : rc.getValue();
|
||||||
|
|
|
@ -46,14 +46,14 @@ public class JobCounterInfo {
|
||||||
protected Counters reduce = null;
|
protected Counters reduce = null;
|
||||||
|
|
||||||
protected String id;
|
protected String id;
|
||||||
protected ArrayList<CounterGroupInfo> counterGroups;
|
protected ArrayList<CounterGroupInfo> counterGroup;
|
||||||
|
|
||||||
public JobCounterInfo() {
|
public JobCounterInfo() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public JobCounterInfo(AppContext ctx, Job job) {
|
public JobCounterInfo(AppContext ctx, Job job) {
|
||||||
getCounters(ctx, job);
|
getCounters(ctx, job);
|
||||||
counterGroups = new ArrayList<CounterGroupInfo>();
|
counterGroup = new ArrayList<CounterGroupInfo>();
|
||||||
this.id = MRApps.toString(job.getID());
|
this.id = MRApps.toString(job.getID());
|
||||||
|
|
||||||
int numGroups = 0;
|
int numGroups = 0;
|
||||||
|
@ -68,7 +68,7 @@ public class JobCounterInfo {
|
||||||
++numGroups;
|
++numGroups;
|
||||||
|
|
||||||
CounterGroupInfo cginfo = new CounterGroupInfo(g.getName(), g, mg, rg);
|
CounterGroupInfo cginfo = new CounterGroupInfo(g.getName(), g, mg, rg);
|
||||||
counterGroups.add(cginfo);
|
counterGroup.add(cginfo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,6 +30,7 @@ import javax.xml.bind.annotation.XmlTransient;
|
||||||
|
|
||||||
import org.apache.hadoop.mapreduce.JobACL;
|
import org.apache.hadoop.mapreduce.JobACL;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
@ -51,12 +52,12 @@ public class JobInfo {
|
||||||
protected String id;
|
protected String id;
|
||||||
protected String name;
|
protected String name;
|
||||||
protected String user;
|
protected String user;
|
||||||
protected String state;
|
protected JobState state;
|
||||||
protected int mapsTotal;
|
protected int mapsTotal;
|
||||||
protected int mapsCompleted;
|
protected int mapsCompleted;
|
||||||
protected float mapProgress;
|
|
||||||
protected int reducesTotal;
|
protected int reducesTotal;
|
||||||
protected int reducesCompleted;
|
protected int reducesCompleted;
|
||||||
|
protected float mapProgress;
|
||||||
protected float reduceProgress;
|
protected float reduceProgress;
|
||||||
|
|
||||||
@XmlTransient
|
@XmlTransient
|
||||||
|
@ -83,18 +84,12 @@ public class JobInfo {
|
||||||
protected int successfulMapAttempts = 0;
|
protected int successfulMapAttempts = 0;
|
||||||
protected ArrayList<ConfEntryInfo> acls;
|
protected ArrayList<ConfEntryInfo> acls;
|
||||||
|
|
||||||
@XmlTransient
|
|
||||||
protected int numMaps;
|
|
||||||
@XmlTransient
|
|
||||||
protected int numReduces;
|
|
||||||
|
|
||||||
public JobInfo() {
|
public JobInfo() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public JobInfo(Job job, Boolean hasAccess) {
|
public JobInfo(Job job, Boolean hasAccess) {
|
||||||
this.id = MRApps.toString(job.getID());
|
this.id = MRApps.toString(job.getID());
|
||||||
JobReport report = job.getReport();
|
JobReport report = job.getReport();
|
||||||
countTasksAndAttempts(job);
|
|
||||||
this.startTime = report.getStartTime();
|
this.startTime = report.getStartTime();
|
||||||
this.finishTime = report.getFinishTime();
|
this.finishTime = report.getFinishTime();
|
||||||
this.elapsedTime = Times.elapsed(this.startTime, this.finishTime);
|
this.elapsedTime = Times.elapsed(this.startTime, this.finishTime);
|
||||||
|
@ -103,7 +98,7 @@ public class JobInfo {
|
||||||
}
|
}
|
||||||
this.name = job.getName().toString();
|
this.name = job.getName().toString();
|
||||||
this.user = job.getUserName();
|
this.user = job.getUserName();
|
||||||
this.state = job.getState().toString();
|
this.state = job.getState();
|
||||||
this.mapsTotal = job.getTotalMaps();
|
this.mapsTotal = job.getTotalMaps();
|
||||||
this.mapsCompleted = job.getCompletedMaps();
|
this.mapsCompleted = job.getCompletedMaps();
|
||||||
this.mapProgress = report.getMapProgress() * 100;
|
this.mapProgress = report.getMapProgress() * 100;
|
||||||
|
@ -115,6 +110,9 @@ public class JobInfo {
|
||||||
|
|
||||||
this.acls = new ArrayList<ConfEntryInfo>();
|
this.acls = new ArrayList<ConfEntryInfo>();
|
||||||
if (hasAccess) {
|
if (hasAccess) {
|
||||||
|
this.diagnostics = "";
|
||||||
|
countTasksAndAttempts(job);
|
||||||
|
|
||||||
this.uberized = job.isUber();
|
this.uberized = job.isUber();
|
||||||
|
|
||||||
List<String> diagnostics = job.getDiagnostics();
|
List<String> diagnostics = job.getDiagnostics();
|
||||||
|
@ -213,7 +211,7 @@ public class JobInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getState() {
|
public String getState() {
|
||||||
return this.state;
|
return this.state.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getUser() {
|
public String getUser() {
|
||||||
|
@ -272,8 +270,6 @@ public class JobInfo {
|
||||||
* the job to get counts for.
|
* the job to get counts for.
|
||||||
*/
|
*/
|
||||||
private void countTasksAndAttempts(Job job) {
|
private void countTasksAndAttempts(Job job) {
|
||||||
numReduces = 0;
|
|
||||||
numMaps = 0;
|
|
||||||
final Map<TaskId, Task> tasks = job.getTasks();
|
final Map<TaskId, Task> tasks = job.getTasks();
|
||||||
if (tasks == null) {
|
if (tasks == null) {
|
||||||
return;
|
return;
|
||||||
|
|
|
@ -38,23 +38,22 @@ public class JobTaskAttemptCounterInfo {
|
||||||
protected Counters total = null;
|
protected Counters total = null;
|
||||||
|
|
||||||
protected String id;
|
protected String id;
|
||||||
protected ArrayList<TaskCounterGroupInfo> taskCounterGroups;
|
protected ArrayList<TaskCounterGroupInfo> taskAttemptCounterGroup;
|
||||||
|
|
||||||
public JobTaskAttemptCounterInfo() {
|
public JobTaskAttemptCounterInfo() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public JobTaskAttemptCounterInfo(TaskAttempt taskattempt) {
|
public JobTaskAttemptCounterInfo(TaskAttempt taskattempt) {
|
||||||
|
|
||||||
long value = 0;
|
|
||||||
this.id = MRApps.toString(taskattempt.getID());
|
this.id = MRApps.toString(taskattempt.getID());
|
||||||
total = taskattempt.getCounters();
|
total = taskattempt.getCounters();
|
||||||
taskCounterGroups = new ArrayList<TaskCounterGroupInfo>();
|
taskAttemptCounterGroup = new ArrayList<TaskCounterGroupInfo>();
|
||||||
if (total != null) {
|
if (total != null) {
|
||||||
for (CounterGroup g : total.getAllCounterGroups().values()) {
|
for (CounterGroup g : total.getAllCounterGroups().values()) {
|
||||||
if (g != null) {
|
if (g != null) {
|
||||||
TaskCounterGroupInfo cginfo = new TaskCounterGroupInfo(g.getName(), g);
|
TaskCounterGroupInfo cginfo = new TaskCounterGroupInfo(g.getName(), g);
|
||||||
if (cginfo != null) {
|
if (cginfo != null) {
|
||||||
taskCounterGroups.add(cginfo);
|
taskAttemptCounterGroup.add(cginfo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,7 +38,7 @@ public class JobTaskCounterInfo {
|
||||||
protected Counters total = null;
|
protected Counters total = null;
|
||||||
|
|
||||||
protected String id;
|
protected String id;
|
||||||
protected ArrayList<TaskCounterGroupInfo> taskCounterGroups;
|
protected ArrayList<TaskCounterGroupInfo> taskCounterGroup;
|
||||||
|
|
||||||
public JobTaskCounterInfo() {
|
public JobTaskCounterInfo() {
|
||||||
}
|
}
|
||||||
|
@ -46,12 +46,12 @@ public class JobTaskCounterInfo {
|
||||||
public JobTaskCounterInfo(Task task) {
|
public JobTaskCounterInfo(Task task) {
|
||||||
total = task.getCounters();
|
total = task.getCounters();
|
||||||
this.id = MRApps.toString(task.getID());
|
this.id = MRApps.toString(task.getID());
|
||||||
taskCounterGroups = new ArrayList<TaskCounterGroupInfo>();
|
taskCounterGroup = new ArrayList<TaskCounterGroupInfo>();
|
||||||
if (total != null) {
|
if (total != null) {
|
||||||
for (CounterGroup g : total.getAllCounterGroups().values()) {
|
for (CounterGroup g : total.getAllCounterGroups().values()) {
|
||||||
if (g != null) {
|
if (g != null) {
|
||||||
TaskCounterGroupInfo cginfo = new TaskCounterGroupInfo(g.getName(), g);
|
TaskCounterGroupInfo cginfo = new TaskCounterGroupInfo(g.getName(), g);
|
||||||
taskCounterGroups.add(cginfo);
|
taskCounterGroup.add(cginfo);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ import javax.xml.bind.annotation.XmlRootElement;
|
||||||
import javax.xml.bind.annotation.XmlSeeAlso;
|
import javax.xml.bind.annotation.XmlSeeAlso;
|
||||||
import javax.xml.bind.annotation.XmlTransient;
|
import javax.xml.bind.annotation.XmlTransient;
|
||||||
|
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||||
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
@ -43,7 +44,7 @@ public class TaskAttemptInfo {
|
||||||
protected float progress;
|
protected float progress;
|
||||||
protected String id;
|
protected String id;
|
||||||
protected String rack;
|
protected String rack;
|
||||||
protected String state;
|
protected TaskAttemptState state;
|
||||||
protected String nodeHttpAddress;
|
protected String nodeHttpAddress;
|
||||||
protected String diagnostics;
|
protected String diagnostics;
|
||||||
protected String type;
|
protected String type;
|
||||||
|
@ -69,7 +70,7 @@ public class TaskAttemptInfo {
|
||||||
.getAssignedContainerID());
|
.getAssignedContainerID());
|
||||||
this.assignedContainer = ta.getAssignedContainerID();
|
this.assignedContainer = ta.getAssignedContainerID();
|
||||||
this.progress = ta.getProgress() * 100;
|
this.progress = ta.getProgress() * 100;
|
||||||
this.state = ta.getState().toString();
|
this.state = ta.getState();
|
||||||
this.elapsedTime = Times
|
this.elapsedTime = Times
|
||||||
.elapsed(this.startTime, this.finishTime, isRunning);
|
.elapsed(this.startTime, this.finishTime, isRunning);
|
||||||
if (this.elapsedTime == -1) {
|
if (this.elapsedTime == -1) {
|
||||||
|
@ -95,7 +96,7 @@ public class TaskAttemptInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getState() {
|
public String getState() {
|
||||||
return this.state;
|
return this.state.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getId() {
|
public String getId() {
|
||||||
|
|
|
@ -23,21 +23,21 @@ import javax.xml.bind.annotation.XmlAccessType;
|
||||||
import javax.xml.bind.annotation.XmlAccessorType;
|
import javax.xml.bind.annotation.XmlAccessorType;
|
||||||
import javax.xml.bind.annotation.XmlRootElement;
|
import javax.xml.bind.annotation.XmlRootElement;
|
||||||
|
|
||||||
@XmlRootElement(name = "taskattempts")
|
@XmlRootElement(name = "taskAttempts")
|
||||||
@XmlAccessorType(XmlAccessType.FIELD)
|
@XmlAccessorType(XmlAccessType.FIELD)
|
||||||
public class TaskAttemptsInfo {
|
public class TaskAttemptsInfo {
|
||||||
|
|
||||||
protected ArrayList<TaskAttemptInfo> taskattempt = new ArrayList<TaskAttemptInfo>();
|
protected ArrayList<TaskAttemptInfo> taskAttempt = new ArrayList<TaskAttemptInfo>();
|
||||||
|
|
||||||
public TaskAttemptsInfo() {
|
public TaskAttemptsInfo() {
|
||||||
} // JAXB needs this
|
} // JAXB needs this
|
||||||
|
|
||||||
public void add(TaskAttemptInfo taskattemptInfo) {
|
public void add(TaskAttemptInfo taskattemptInfo) {
|
||||||
taskattempt.add(taskattemptInfo);
|
taskAttempt.add(taskattemptInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ArrayList<TaskAttemptInfo> getTaskAttempts() {
|
public ArrayList<TaskAttemptInfo> getTaskAttempts() {
|
||||||
return taskattempt;
|
return taskAttempt;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ import javax.xml.bind.annotation.XmlTransient;
|
||||||
|
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||||
|
@ -39,7 +40,7 @@ public class TaskInfo {
|
||||||
protected long elapsedTime;
|
protected long elapsedTime;
|
||||||
protected float progress;
|
protected float progress;
|
||||||
protected String id;
|
protected String id;
|
||||||
protected String state;
|
protected TaskState state;
|
||||||
protected String type;
|
protected String type;
|
||||||
protected String successfulAttempt;
|
protected String successfulAttempt;
|
||||||
|
|
||||||
|
@ -62,7 +63,7 @@ public class TaskInfo {
|
||||||
if (this.elapsedTime == -1) {
|
if (this.elapsedTime == -1) {
|
||||||
this.elapsedTime = 0;
|
this.elapsedTime = 0;
|
||||||
}
|
}
|
||||||
this.state = report.getTaskState().toString();
|
this.state = report.getTaskState();
|
||||||
this.progress = report.getProgress() * 100;
|
this.progress = report.getProgress() * 100;
|
||||||
this.id = MRApps.toString(task.getID());
|
this.id = MRApps.toString(task.getID());
|
||||||
this.taskNum = task.getID().getId();
|
this.taskNum = task.getID().getId();
|
||||||
|
@ -79,7 +80,7 @@ public class TaskInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getState() {
|
public String getState() {
|
||||||
return this.state;
|
return this.state.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getId() {
|
public String getId() {
|
||||||
|
|
|
@ -1,39 +1,41 @@
|
||||||
/**
|
/**
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
* to you under the Apache License, Version 2.0 (the
|
* to you under the Apache License, Version 2.0 (the
|
||||||
* "License"); you may not use this file except in compliance
|
* "License"); you may not use this file except in compliance
|
||||||
* with the License. You may obtain a copy of the License at
|
* with the License. You may obtain a copy of the License at
|
||||||
*
|
*
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
*
|
*
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.mapreduce.v2.app;
|
package org.apache.hadoop.mapreduce.v2.app;
|
||||||
|
|
||||||
import com.google.common.collect.Iterators;
|
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
import com.google.common.collect.Maps;
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.mapred.JobACLsManager;
|
||||||
import org.apache.hadoop.mapred.ShuffleHandler;
|
import org.apache.hadoop.mapred.ShuffleHandler;
|
||||||
import org.apache.hadoop.mapreduce.FileSystemCounter;
|
import org.apache.hadoop.mapreduce.FileSystemCounter;
|
||||||
import org.apache.hadoop.mapreduce.JobACL;
|
import org.apache.hadoop.mapreduce.JobACL;
|
||||||
import org.apache.hadoop.mapreduce.JobCounter;
|
import org.apache.hadoop.mapreduce.JobCounter;
|
||||||
|
import org.apache.hadoop.mapreduce.MRConfig;
|
||||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||||
|
import org.apache.hadoop.mapreduce.TypeConverter;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.Counters;
|
import org.apache.hadoop.mapreduce.v2.api.records.Counters;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
@ -48,7 +50,6 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.TypeConverter;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||||
|
@ -63,28 +64,33 @@ import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||||
import org.apache.hadoop.yarn.util.BuilderUtils;
|
import org.apache.hadoop.yarn.util.BuilderUtils;
|
||||||
import org.apache.hadoop.yarn.util.Records;
|
import org.apache.hadoop.yarn.util.Records;
|
||||||
|
|
||||||
|
import com.google.common.collect.Iterators;
|
||||||
|
import com.google.common.collect.Lists;
|
||||||
|
import com.google.common.collect.Maps;
|
||||||
|
|
||||||
public class MockJobs extends MockApps {
|
public class MockJobs extends MockApps {
|
||||||
static final Iterator<JobState> JOB_STATES = Iterators.cycle(
|
static final Iterator<JobState> JOB_STATES = Iterators.cycle(JobState
|
||||||
JobState.values());
|
.values());
|
||||||
static final Iterator<TaskState> TASK_STATES = Iterators.cycle(
|
static final Iterator<TaskState> TASK_STATES = Iterators.cycle(TaskState
|
||||||
TaskState.values());
|
.values());
|
||||||
static final Iterator<TaskAttemptState> TASK_ATTEMPT_STATES = Iterators.cycle(
|
static final Iterator<TaskAttemptState> TASK_ATTEMPT_STATES = Iterators
|
||||||
TaskAttemptState.values());
|
.cycle(TaskAttemptState.values());
|
||||||
static final Iterator<TaskType> TASK_TYPES = Iterators.cycle(
|
static final Iterator<TaskType> TASK_TYPES = Iterators.cycle(TaskType
|
||||||
TaskType.values());
|
.values());
|
||||||
static final Iterator<JobCounter> JOB_COUNTERS = Iterators.cycle(
|
static final Iterator<JobCounter> JOB_COUNTERS = Iterators.cycle(JobCounter
|
||||||
JobCounter.values());
|
.values());
|
||||||
static final Iterator<FileSystemCounter> FS_COUNTERS = Iterators.cycle(
|
static final Iterator<FileSystemCounter> FS_COUNTERS = Iterators
|
||||||
FileSystemCounter.values());
|
.cycle(FileSystemCounter.values());
|
||||||
static final Iterator<TaskCounter> TASK_COUNTERS = Iterators.cycle(
|
static final Iterator<TaskCounter> TASK_COUNTERS = Iterators
|
||||||
TaskCounter.values());
|
.cycle(TaskCounter.values());
|
||||||
static final Iterator<String> FS_SCHEMES = Iterators.cycle("FILE", "HDFS",
|
static final Iterator<String> FS_SCHEMES = Iterators.cycle("FILE", "HDFS",
|
||||||
"LAFS", "CEPH");
|
"LAFS", "CEPH");
|
||||||
static final Iterator<String> USER_COUNTER_GROUPS = Iterators.cycle(
|
static final Iterator<String> USER_COUNTER_GROUPS = Iterators
|
||||||
"com.company.project.subproject.component.subcomponent.UserDefinedSpecificSpecialTask$Counters",
|
.cycle(
|
||||||
"PigCounters");
|
"com.company.project.subproject.component.subcomponent.UserDefinedSpecificSpecialTask$Counters",
|
||||||
static final Iterator<String> USER_COUNTERS = Iterators.cycle(
|
"PigCounters");
|
||||||
"counter1", "counter2", "counter3");
|
static final Iterator<String> USER_COUNTERS = Iterators.cycle("counter1",
|
||||||
|
"counter2", "counter3");
|
||||||
static final Iterator<Phase> PHASES = Iterators.cycle(Phase.values());
|
static final Iterator<Phase> PHASES = Iterators.cycle(Phase.values());
|
||||||
static final Iterator<String> DIAGS = Iterators.cycle(
|
static final Iterator<String> DIAGS = Iterators.cycle(
|
||||||
"Error: java.lang.OutOfMemoryError: Java heap space",
|
"Error: java.lang.OutOfMemoryError: Java heap space",
|
||||||
|
@ -101,8 +107,7 @@ public class MockJobs extends MockApps {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp,
|
public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp,
|
||||||
int numTasksPerJob,
|
int numTasksPerJob, int numAttemptsPerTask) {
|
||||||
int numAttemptsPerTask) {
|
|
||||||
Map<JobId, Job> map = Maps.newHashMap();
|
Map<JobId, Job> map = Maps.newHashMap();
|
||||||
for (int j = 0; j < numJobsPerApp; ++j) {
|
for (int j = 0; j < numJobsPerApp; ++j) {
|
||||||
Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask);
|
Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask);
|
||||||
|
@ -121,10 +126,12 @@ public class MockJobs extends MockApps {
|
||||||
public static JobReport newJobReport(JobId id) {
|
public static JobReport newJobReport(JobId id) {
|
||||||
JobReport report = Records.newRecord(JobReport.class);
|
JobReport report = Records.newRecord(JobReport.class);
|
||||||
report.setJobId(id);
|
report.setJobId(id);
|
||||||
report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
|
report
|
||||||
report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
|
.setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT));
|
||||||
report.setMapProgress((float)Math.random());
|
report.setFinishTime(System.currentTimeMillis()
|
||||||
report.setReduceProgress((float)Math.random());
|
+ (int) (Math.random() * DT) + 1);
|
||||||
|
report.setMapProgress((float) Math.random());
|
||||||
|
report.setReduceProgress((float) Math.random());
|
||||||
report.setJobState(JOB_STATES.next());
|
report.setJobState(JOB_STATES.next());
|
||||||
return report;
|
return report;
|
||||||
}
|
}
|
||||||
|
@ -132,9 +139,11 @@ public class MockJobs extends MockApps {
|
||||||
public static TaskReport newTaskReport(TaskId id) {
|
public static TaskReport newTaskReport(TaskId id) {
|
||||||
TaskReport report = Records.newRecord(TaskReport.class);
|
TaskReport report = Records.newRecord(TaskReport.class);
|
||||||
report.setTaskId(id);
|
report.setTaskId(id);
|
||||||
report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
|
report
|
||||||
report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
|
.setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT));
|
||||||
report.setProgress((float)Math.random());
|
report.setFinishTime(System.currentTimeMillis()
|
||||||
|
+ (int) (Math.random() * DT) + 1);
|
||||||
|
report.setProgress((float) Math.random());
|
||||||
report.setCounters(newCounters());
|
report.setCounters(newCounters());
|
||||||
report.setTaskState(TASK_STATES.next());
|
report.setTaskState(TASK_STATES.next());
|
||||||
return report;
|
return report;
|
||||||
|
@ -143,41 +152,42 @@ public class MockJobs extends MockApps {
|
||||||
public static TaskAttemptReport newTaskAttemptReport(TaskAttemptId id) {
|
public static TaskAttemptReport newTaskAttemptReport(TaskAttemptId id) {
|
||||||
TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class);
|
TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class);
|
||||||
report.setTaskAttemptId(id);
|
report.setTaskAttemptId(id);
|
||||||
report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
|
report
|
||||||
report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
|
.setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT));
|
||||||
|
report.setFinishTime(System.currentTimeMillis()
|
||||||
|
+ (int) (Math.random() * DT) + 1);
|
||||||
report.setPhase(PHASES.next());
|
report.setPhase(PHASES.next());
|
||||||
report.setTaskAttemptState(TASK_ATTEMPT_STATES.next());
|
report.setTaskAttemptState(TASK_ATTEMPT_STATES.next());
|
||||||
report.setProgress((float)Math.random());
|
report.setProgress((float) Math.random());
|
||||||
report.setCounters(newCounters());
|
report.setCounters(newCounters());
|
||||||
return report;
|
return report;
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
public static Counters newCounters() {
|
public static Counters newCounters() {
|
||||||
org.apache.hadoop.mapred.Counters hc =
|
org.apache.hadoop.mapred.Counters hc = new org.apache.hadoop.mapred.Counters();
|
||||||
new org.apache.hadoop.mapred.Counters();
|
|
||||||
for (JobCounter c : JobCounter.values()) {
|
for (JobCounter c : JobCounter.values()) {
|
||||||
hc.findCounter(c).setValue((long)(Math.random() * 1000));
|
hc.findCounter(c).setValue((long) (Math.random() * 1000));
|
||||||
}
|
}
|
||||||
for (TaskCounter c : TaskCounter.values()) {
|
for (TaskCounter c : TaskCounter.values()) {
|
||||||
hc.findCounter(c).setValue((long)(Math.random() * 1000));
|
hc.findCounter(c).setValue((long) (Math.random() * 1000));
|
||||||
}
|
}
|
||||||
int nc = FileSystemCounter.values().length * 4;
|
int nc = FileSystemCounter.values().length * 4;
|
||||||
for (int i = 0; i < nc; ++i) {
|
for (int i = 0; i < nc; ++i) {
|
||||||
for (FileSystemCounter c : FileSystemCounter.values()) {
|
for (FileSystemCounter c : FileSystemCounter.values()) {
|
||||||
hc.findCounter(FS_SCHEMES.next(), c).
|
hc.findCounter(FS_SCHEMES.next(), c).setValue(
|
||||||
setValue((long)(Math.random() * DT));
|
(long) (Math.random() * DT));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (int i = 0; i < 2 * 3; ++i) {
|
for (int i = 0; i < 2 * 3; ++i) {
|
||||||
hc.findCounter(USER_COUNTER_GROUPS.next(), USER_COUNTERS.next()).
|
hc.findCounter(USER_COUNTER_GROUPS.next(), USER_COUNTERS.next())
|
||||||
setValue((long)(Math.random() * 100000));
|
.setValue((long) (Math.random() * 100000));
|
||||||
}
|
}
|
||||||
return TypeConverter.toYarn(hc);
|
return TypeConverter.toYarn(hc);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Map<TaskAttemptId, TaskAttempt> newTaskAttempts(TaskId tid,
|
public static Map<TaskAttemptId, TaskAttempt> newTaskAttempts(TaskId tid,
|
||||||
int m) {
|
int m) {
|
||||||
Map<TaskAttemptId, TaskAttempt> map = Maps.newHashMap();
|
Map<TaskAttemptId, TaskAttempt> map = Maps.newHashMap();
|
||||||
for (int i = 0; i < m; ++i) {
|
for (int i = 0; i < m; ++i) {
|
||||||
TaskAttempt ta = newTaskAttempt(tid, i);
|
TaskAttempt ta = newTaskAttempt(tid, i);
|
||||||
|
@ -237,9 +247,10 @@ public class MockJobs extends MockApps {
|
||||||
@Override
|
@Override
|
||||||
public boolean isFinished() {
|
public boolean isFinished() {
|
||||||
switch (report.getTaskAttemptState()) {
|
switch (report.getTaskAttemptState()) {
|
||||||
case SUCCEEDED:
|
case SUCCEEDED:
|
||||||
case FAILED:
|
case FAILED:
|
||||||
case KILLED: return true;
|
case KILLED:
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -247,8 +258,8 @@ public class MockJobs extends MockApps {
|
||||||
@Override
|
@Override
|
||||||
public ContainerId getAssignedContainerID() {
|
public ContainerId getAssignedContainerID() {
|
||||||
ContainerId id = Records.newRecord(ContainerId.class);
|
ContainerId id = Records.newRecord(ContainerId.class);
|
||||||
ApplicationAttemptId appAttemptId =
|
ApplicationAttemptId appAttemptId = Records
|
||||||
Records.newRecord(ApplicationAttemptId.class);
|
.newRecord(ApplicationAttemptId.class);
|
||||||
appAttemptId.setApplicationId(taid.getTaskId().getJobId().getAppId());
|
appAttemptId.setApplicationId(taid.getTaskId().getJobId().getAppId());
|
||||||
appAttemptId.setAttemptId(0);
|
appAttemptId.setAttemptId(0);
|
||||||
id.setApplicationAttemptId(appAttemptId);
|
id.setApplicationAttemptId(appAttemptId);
|
||||||
|
@ -280,10 +291,10 @@ public class MockJobs extends MockApps {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String getNodeRackName() {
|
public String getNodeRackName() {
|
||||||
return "/default-rack";
|
return "/default-rack";
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -342,9 +353,10 @@ public class MockJobs extends MockApps {
|
||||||
@Override
|
@Override
|
||||||
public boolean isFinished() {
|
public boolean isFinished() {
|
||||||
switch (report.getTaskState()) {
|
switch (report.getTaskState()) {
|
||||||
case SUCCEEDED:
|
case SUCCEEDED:
|
||||||
case KILLED:
|
case KILLED:
|
||||||
case FAILED: return true;
|
case FAILED:
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -398,12 +410,26 @@ public class MockJobs extends MockApps {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Job newJob(ApplicationId appID, int i, int n, int m) {
|
public static Job newJob(ApplicationId appID, int i, int n, int m) {
|
||||||
|
return newJob(appID, i, n, m, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Job newJob(ApplicationId appID, int i, int n, int m, Path confFile) {
|
||||||
final JobId id = newJobID(appID, i);
|
final JobId id = newJobID(appID, i);
|
||||||
final String name = newJobName();
|
final String name = newJobName();
|
||||||
final JobReport report = newJobReport(id);
|
final JobReport report = newJobReport(id);
|
||||||
final Map<TaskId, Task> tasks = newTasks(id, n, m);
|
final Map<TaskId, Task> tasks = newTasks(id, n, m);
|
||||||
final TaskCount taskCount = getTaskCount(tasks.values());
|
final TaskCount taskCount = getTaskCount(tasks.values());
|
||||||
final Counters counters = getCounters(tasks.values());
|
final Counters counters = getCounters(tasks.values());
|
||||||
|
final Path configFile = confFile;
|
||||||
|
|
||||||
|
Map<JobACL, AccessControlList> tmpJobACLs = new HashMap<JobACL, AccessControlList>();
|
||||||
|
Configuration conf = new Configuration();
|
||||||
|
conf.set(JobACL.VIEW_JOB.getAclName(), "testuser");
|
||||||
|
conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
|
||||||
|
|
||||||
|
JobACLsManager aclsManager = new JobACLsManager(conf);
|
||||||
|
tmpJobACLs = aclsManager.constructJobACLs(conf);
|
||||||
|
final Map<JobACL, AccessControlList> jobACLs = tmpJobACLs;
|
||||||
return new Job() {
|
return new Job() {
|
||||||
@Override
|
@Override
|
||||||
public JobId getID() {
|
public JobId getID() {
|
||||||
|
@ -483,7 +509,7 @@ public class MockJobs extends MockApps {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<String> getDiagnostics() {
|
public List<String> getDiagnostics() {
|
||||||
return Collections.<String>emptyList();
|
return Collections.<String> emptyList();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -504,12 +530,12 @@ public class MockJobs extends MockApps {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Path getConfFile() {
|
public Path getConfFile() {
|
||||||
throw new UnsupportedOperationException("Not supported yet.");
|
return configFile;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map<JobACL, AccessControlList> getJobACLs() {
|
public Map<JobACL, AccessControlList> getJobACLs() {
|
||||||
return Collections.<JobACL, AccessControlList>emptyMap();
|
return jobACLs;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -523,9 +549,8 @@ public class MockJobs extends MockApps {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static AMInfo createAMInfo(int attempt) {
|
private static AMInfo createAMInfo(int attempt) {
|
||||||
ApplicationAttemptId appAttemptId =
|
ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
|
||||||
BuilderUtils.newApplicationAttemptId(
|
BuilderUtils.newApplicationId(100, 1), attempt);
|
||||||
BuilderUtils.newApplicationId(100, 1), attempt);
|
|
||||||
ContainerId containerId = BuilderUtils.newContainerId(appAttemptId, 1);
|
ContainerId containerId = BuilderUtils.newContainerId(appAttemptId, 1);
|
||||||
return MRBuilderUtils.newAMInfo(appAttemptId, System.currentTimeMillis(),
|
return MRBuilderUtils.newAMInfo(appAttemptId, System.currentTimeMillis(),
|
||||||
containerId, NM_HOST, NM_PORT, NM_HTTP_PORT);
|
containerId, NM_HOST, NM_PORT, NM_HTTP_PORT);
|
||||||
|
|
|
@ -0,0 +1,359 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.app.webapp;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
|
import org.w3c.dom.NodeList;
|
||||||
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
|
import com.google.inject.Guice;
|
||||||
|
import com.google.inject.Injector;
|
||||||
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
import com.google.inject.servlet.ServletModule;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse.Status;
|
||||||
|
import com.sun.jersey.api.client.UniformInterfaceException;
|
||||||
|
import com.sun.jersey.api.client.WebResource;
|
||||||
|
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||||
|
import com.sun.jersey.test.framework.JerseyTest;
|
||||||
|
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the MapReduce Application master info web services api's. Also test
|
||||||
|
* non-existent urls.
|
||||||
|
*
|
||||||
|
* /ws/v1/mapreduce
|
||||||
|
* /ws/v1/mapreduce/info
|
||||||
|
*/
|
||||||
|
public class TestAMWebServices extends JerseyTest {
|
||||||
|
|
||||||
|
private static Configuration conf = new Configuration();
|
||||||
|
private static TestAppContext appContext;
|
||||||
|
|
||||||
|
static class TestAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||||
|
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
||||||
|
}
|
||||||
|
|
||||||
|
TestAppContext() {
|
||||||
|
this(0, 1, 1, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
|
@Override
|
||||||
|
protected void configureServlets() {
|
||||||
|
|
||||||
|
appContext = new TestAppContext();
|
||||||
|
bind(JAXBContextResolver.class);
|
||||||
|
bind(AMWebServices.class);
|
||||||
|
bind(GenericExceptionHandler.class);
|
||||||
|
bind(AppContext.class).toInstance(appContext);
|
||||||
|
bind(Configuration.class).toInstance(conf);
|
||||||
|
|
||||||
|
serve("/*").with(GuiceContainer.class);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Injector getInjector() {
|
||||||
|
return injector;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestAMWebServices() {
|
||||||
|
super(new WebAppDescriptor.Builder(
|
||||||
|
"org.apache.hadoop.mapreduce.v2.app.webapp")
|
||||||
|
.contextListenerClass(GuiceServletConfig.class)
|
||||||
|
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||||
|
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testAM() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyAMInfo(json.getJSONObject("info"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testAMSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyAMInfo(json.getJSONObject("info"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testAMDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce/")
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyAMInfo(json.getJSONObject("info"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testAMXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
verifyAMInfoXML(xml, appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInfo() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("info").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyAMInfo(json.getJSONObject("info"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInfoSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("info/").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyAMInfo(json.getJSONObject("info"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInfoDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("info/").get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyAMInfo(json.getJSONObject("info"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInfoXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("info/").accept(MediaType.APPLICATION_XML)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
verifyAMInfoXML(xml, appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInvalidUri() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
String responseStr = "";
|
||||||
|
try {
|
||||||
|
responseStr = r.path("ws").path("v1").path("mapreduce").path("bogus")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(String.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
WebServicesTestUtils.checkStringMatch(
|
||||||
|
"error string exists and shouldn't", "", responseStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInvalidUri2() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
String responseStr = "";
|
||||||
|
try {
|
||||||
|
responseStr = r.path("ws").path("v1").path("invalid")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(String.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
WebServicesTestUtils.checkStringMatch(
|
||||||
|
"error string exists and shouldn't", "", responseStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInvalidAccept() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
String responseStr = "";
|
||||||
|
try {
|
||||||
|
responseStr = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.accept(MediaType.TEXT_PLAIN).get(String.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.INTERNAL_SERVER_ERROR,
|
||||||
|
response.getClientResponseStatus());
|
||||||
|
WebServicesTestUtils.checkStringMatch(
|
||||||
|
"error string exists and shouldn't", "", responseStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMInfo(JSONObject info, TestAppContext ctx)
|
||||||
|
throws JSONException {
|
||||||
|
assertEquals("incorrect number of elements", 5, info.length());
|
||||||
|
|
||||||
|
verifyAMInfoGeneric(ctx, info.getString("appId"), info.getString("user"),
|
||||||
|
info.getString("name"), info.getLong("startedOn"),
|
||||||
|
info.getLong("elapsedTime"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMInfoXML(String xml, TestAppContext ctx)
|
||||||
|
throws JSONException, Exception {
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList nodes = dom.getElementsByTagName("info");
|
||||||
|
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
verifyAMInfoGeneric(ctx,
|
||||||
|
WebServicesTestUtils.getXmlString(element, "appId"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "user"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "name"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "startedOn"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedTime"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMInfoGeneric(TestAppContext ctx, String id, String user,
|
||||||
|
String name, long startedOn, long elapsedTime) {
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", ctx.getApplicationID()
|
||||||
|
.toString(), id);
|
||||||
|
WebServicesTestUtils.checkStringMatch("user", ctx.getUser().toString(),
|
||||||
|
user);
|
||||||
|
WebServicesTestUtils.checkStringMatch("name", ctx.getApplicationName(),
|
||||||
|
name);
|
||||||
|
|
||||||
|
assertEquals("startedOn incorrect", ctx.getStartTime(), startedOn);
|
||||||
|
assertTrue("elapsedTime not greater then 0", (elapsedTime > 0));
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,732 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.app.webapp;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||||
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONArray;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
|
import org.w3c.dom.NodeList;
|
||||||
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
|
import com.google.inject.Guice;
|
||||||
|
import com.google.inject.Injector;
|
||||||
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
import com.google.inject.servlet.ServletModule;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse;
|
||||||
|
import com.sun.jersey.api.client.UniformInterfaceException;
|
||||||
|
import com.sun.jersey.api.client.WebResource;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse.Status;
|
||||||
|
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||||
|
import com.sun.jersey.test.framework.JerseyTest;
|
||||||
|
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the app master web service Rest API for getting task attempts, a
|
||||||
|
* specific task attempt, and task attempt counters
|
||||||
|
*
|
||||||
|
* /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts
|
||||||
|
* /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}
|
||||||
|
* /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/counters
|
||||||
|
*/
|
||||||
|
public class TestAMWebServicesAttempts extends JerseyTest {
|
||||||
|
|
||||||
|
private static Configuration conf = new Configuration();
|
||||||
|
private static TestAppContext appContext;
|
||||||
|
|
||||||
|
static class TestAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||||
|
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
||||||
|
}
|
||||||
|
|
||||||
|
TestAppContext() {
|
||||||
|
this(0, 1, 2, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
|
@Override
|
||||||
|
protected void configureServlets() {
|
||||||
|
|
||||||
|
appContext = new TestAppContext();
|
||||||
|
bind(JAXBContextResolver.class);
|
||||||
|
bind(AMWebServices.class);
|
||||||
|
bind(GenericExceptionHandler.class);
|
||||||
|
bind(AppContext.class).toInstance(appContext);
|
||||||
|
bind(Configuration.class).toInstance(conf);
|
||||||
|
|
||||||
|
serve("/*").with(GuiceContainer.class);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Injector getInjector() {
|
||||||
|
return injector;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestAMWebServicesAttempts() {
|
||||||
|
super(new WebAppDescriptor.Builder(
|
||||||
|
"org.apache.hadoop.mapreduce.v2.app.webapp")
|
||||||
|
.contextListenerClass(GuiceServletConfig.class)
|
||||||
|
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||||
|
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttempts() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid).path("attempts")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
verifyAMTaskAttempts(json, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptsSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid).path("attempts/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
verifyAMTaskAttempts(json, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptsDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid).path("attempts")
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
verifyAMTaskAttempts(json, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptsXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid).path("attempts")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList attempts = dom.getElementsByTagName("taskAttempts");
|
||||||
|
assertEquals("incorrect number of elements", 1, attempts.getLength());
|
||||||
|
|
||||||
|
NodeList nodes = dom.getElementsByTagName("taskAttempt");
|
||||||
|
verifyAMTaskAttemptsXML(nodes, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptId() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("attempts").path(attid).accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("taskAttempt");
|
||||||
|
verifyAMTaskAttempt(info, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("attempts").path(attid + "/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("taskAttempt");
|
||||||
|
verifyAMTaskAttempt(info, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("attempts").path(attid).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("taskAttempt");
|
||||||
|
verifyAMTaskAttempt(info, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("attempts").path(attid).accept(MediaType.APPLICATION_XML)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList nodes = dom.getElementsByTagName("taskAttempt");
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
verifyAMTaskAttemptXML(element, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdBogus() throws JSONException, Exception {
|
||||||
|
|
||||||
|
testTaskAttemptIdErrorGeneric("bogusid",
|
||||||
|
"java.lang.Exception: Error parsing attempt ID: bogusid");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdNonExist() throws JSONException, Exception {
|
||||||
|
|
||||||
|
testTaskAttemptIdErrorGeneric(
|
||||||
|
"attempt_12345_0_0_r_1_0",
|
||||||
|
"java.lang.Exception: Error getting info on task attempt id attempt_12345_0_0_r_1_0");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdInvalid() throws JSONException, Exception {
|
||||||
|
|
||||||
|
testTaskAttemptIdErrorGeneric("attempt_12345_0_0_d_1_0",
|
||||||
|
"java.lang.Exception: Unknown task symbol: d");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdInvalid2() throws JSONException, Exception {
|
||||||
|
|
||||||
|
testTaskAttemptIdErrorGeneric("attempt_12345_0_r_1_0",
|
||||||
|
"java.lang.Exception: For input string: \"r\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdInvalid3() throws JSONException, Exception {
|
||||||
|
|
||||||
|
testTaskAttemptIdErrorGeneric("attempt_12345_0_0_r_1",
|
||||||
|
"java.lang.Exception: Error parsing attempt ID: attempt_12345_0_0_r_1");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void testTaskAttemptIdErrorGeneric(String attid, String error)
|
||||||
|
throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
|
||||||
|
.path("tasks").path(tid).path("attempts").path(attid)
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message", error,
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMTaskAttemptXML(Element element, TaskAttempt att,
|
||||||
|
TaskType ttype) {
|
||||||
|
verifyTaskAttemptGeneric(att, ttype,
|
||||||
|
WebServicesTestUtils.getXmlString(element, "id"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "state"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "type"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "rack"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "nodeHttpAddress"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "diagnostics"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "assignedContainerId"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "startTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "finishTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
|
||||||
|
WebServicesTestUtils.getXmlFloat(element, "progress"));
|
||||||
|
|
||||||
|
if (ttype == TaskType.REDUCE) {
|
||||||
|
verifyReduceTaskAttemptGeneric(att,
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "shuffleFinishTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "mergeFinishTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedShuffleTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedMergeTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedReduceTime"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMTaskAttempt(JSONObject info, TaskAttempt att,
|
||||||
|
TaskType ttype) throws JSONException {
|
||||||
|
if (ttype == TaskType.REDUCE) {
|
||||||
|
assertEquals("incorrect number of elements", 16, info.length());
|
||||||
|
} else {
|
||||||
|
assertEquals("incorrect number of elements", 11, info.length());
|
||||||
|
}
|
||||||
|
|
||||||
|
verifyTaskAttemptGeneric(att, ttype, info.getString("id"),
|
||||||
|
info.getString("state"), info.getString("type"),
|
||||||
|
info.getString("rack"), info.getString("nodeHttpAddress"),
|
||||||
|
info.getString("diagnostics"), info.getString("assignedContainerId"),
|
||||||
|
info.getLong("startTime"), info.getLong("finishTime"),
|
||||||
|
info.getLong("elapsedTime"), (float) info.getDouble("progress"));
|
||||||
|
|
||||||
|
if (ttype == TaskType.REDUCE) {
|
||||||
|
verifyReduceTaskAttemptGeneric(att, info.getLong("shuffleFinishTime"),
|
||||||
|
info.getLong("mergeFinishTime"), info.getLong("elapsedShuffleTime"),
|
||||||
|
info.getLong("elapsedMergeTime"), info.getLong("elapsedReduceTime"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMTaskAttempts(JSONObject json, Task task)
|
||||||
|
throws JSONException {
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject attempts = json.getJSONObject("taskAttempts");
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONArray arr = attempts.getJSONArray("taskAttempt");
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId id = att.getID();
|
||||||
|
String attid = MRApps.toString(id);
|
||||||
|
Boolean found = false;
|
||||||
|
|
||||||
|
for (int i = 0; i < arr.length(); i++) {
|
||||||
|
JSONObject info = arr.getJSONObject(i);
|
||||||
|
if (attid.matches(info.getString("id"))) {
|
||||||
|
found = true;
|
||||||
|
verifyAMTaskAttempt(info, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertTrue("task attempt with id: " + attid
|
||||||
|
+ " not in web service output", found);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMTaskAttemptsXML(NodeList nodes, Task task) {
|
||||||
|
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||||
|
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId id = att.getID();
|
||||||
|
String attid = MRApps.toString(id);
|
||||||
|
Boolean found = false;
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
|
||||||
|
if (attid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
|
||||||
|
found = true;
|
||||||
|
verifyAMTaskAttemptXML(element, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertTrue("task with id: " + attid + " not in web service output", found);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype,
|
||||||
|
String id, String state, String type, String rack,
|
||||||
|
String nodeHttpAddress, String diagnostics, String assignedContainerId,
|
||||||
|
long startTime, long finishTime, long elapsedTime, float progress) {
|
||||||
|
|
||||||
|
TaskAttemptId attid = ta.getID();
|
||||||
|
String attemptId = MRApps.toString(attid);
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", attemptId, id);
|
||||||
|
WebServicesTestUtils.checkStringMatch("type", ttype.toString(), type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("state", ta.getState().toString(),
|
||||||
|
state);
|
||||||
|
WebServicesTestUtils.checkStringMatch("rack", ta.getNodeRackName(), rack);
|
||||||
|
WebServicesTestUtils.checkStringMatch("nodeHttpAddress",
|
||||||
|
ta.getNodeHttpAddress(), nodeHttpAddress);
|
||||||
|
|
||||||
|
String expectDiag = "";
|
||||||
|
List<String> diagnosticsList = ta.getDiagnostics();
|
||||||
|
if (diagnosticsList != null && !diagnostics.isEmpty()) {
|
||||||
|
StringBuffer b = new StringBuffer();
|
||||||
|
for (String diag : diagnosticsList) {
|
||||||
|
b.append(diag);
|
||||||
|
}
|
||||||
|
expectDiag = b.toString();
|
||||||
|
}
|
||||||
|
WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag,
|
||||||
|
diagnostics);
|
||||||
|
WebServicesTestUtils.checkStringMatch("assignedContainerId",
|
||||||
|
ConverterUtils.toString(ta.getAssignedContainerID()),
|
||||||
|
assignedContainerId);
|
||||||
|
|
||||||
|
assertEquals("startTime wrong", ta.getLaunchTime(), startTime);
|
||||||
|
assertEquals("finishTime wrong", ta.getFinishTime(), finishTime);
|
||||||
|
assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime);
|
||||||
|
assertEquals("progress wrong", ta.getProgress() * 100, progress, 1e-3f);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyReduceTaskAttemptGeneric(TaskAttempt ta,
|
||||||
|
long shuffleFinishTime, long mergeFinishTime, long elapsedShuffleTime,
|
||||||
|
long elapsedMergeTime, long elapsedReduceTime) {
|
||||||
|
|
||||||
|
assertEquals("shuffleFinishTime wrong", ta.getShuffleFinishTime(),
|
||||||
|
shuffleFinishTime);
|
||||||
|
assertEquals("mergeFinishTime wrong", ta.getSortFinishTime(),
|
||||||
|
mergeFinishTime);
|
||||||
|
assertEquals("elapsedShuffleTime wrong",
|
||||||
|
ta.getLaunchTime() - ta.getShuffleFinishTime(), elapsedShuffleTime);
|
||||||
|
assertEquals("elapsedMergeTime wrong",
|
||||||
|
ta.getShuffleFinishTime() - ta.getSortFinishTime(), elapsedMergeTime);
|
||||||
|
assertEquals("elapsedReduceTime wrong",
|
||||||
|
ta.getSortFinishTime() - ta.getFinishTime(), elapsedReduceTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdCounters() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("attempts").path(attid).path("counters")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("JobTaskAttemptCounters");
|
||||||
|
verifyAMJobTaskAttemptCounters(info, att);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdXMLCounters() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("attempts").path(attid).path("counters")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList nodes = dom.getElementsByTagName("JobTaskAttemptCounters");
|
||||||
|
|
||||||
|
verifyAMTaskCountersXML(nodes, att);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMJobTaskAttemptCounters(JSONObject info, TaskAttempt att)
|
||||||
|
throws JSONException {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 2, info.length());
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(att.getID()),
|
||||||
|
info.getString("id"));
|
||||||
|
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
JSONArray counterGroups = info.getJSONArray("taskAttemptCounterGroup");
|
||||||
|
for (int i = 0; i < counterGroups.length(); i++) {
|
||||||
|
JSONObject counterGroup = counterGroups.getJSONObject(i);
|
||||||
|
String name = counterGroup.getString("counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
JSONArray counters = counterGroup.getJSONArray("counter");
|
||||||
|
for (int j = 0; j < counters.length(); j++) {
|
||||||
|
JSONObject counter = counters.getJSONObject(i);
|
||||||
|
String counterName = counter.getString("name");
|
||||||
|
assertTrue("name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
long value = counter.getLong("value");
|
||||||
|
assertTrue("value >= 0", value >= 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMTaskCountersXML(NodeList nodes, TaskAttempt att) {
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(att.getID()),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "id"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
NodeList groups = element.getElementsByTagName("taskAttemptCounterGroup");
|
||||||
|
|
||||||
|
for (int j = 0; j < groups.getLength(); j++) {
|
||||||
|
Element counters = (Element) groups.item(j);
|
||||||
|
assertNotNull("should have counters in the web service info", counters);
|
||||||
|
String name = WebServicesTestUtils.getXmlString(counters,
|
||||||
|
"counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
NodeList counterArr = counters.getElementsByTagName("counter");
|
||||||
|
for (int z = 0; z < counterArr.getLength(); z++) {
|
||||||
|
Element counter = (Element) counterArr.item(z);
|
||||||
|
String counterName = WebServicesTestUtils.getXmlString(counter,
|
||||||
|
"name");
|
||||||
|
assertTrue("counter name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
|
||||||
|
long value = WebServicesTestUtils.getXmlLong(counter, "value");
|
||||||
|
assertTrue("value not >= 0", value >= 0);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,336 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.app.webapp;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.OutputStream;
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONArray;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
|
import org.w3c.dom.NodeList;
|
||||||
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
|
import com.google.common.collect.Maps;
|
||||||
|
import com.google.inject.Guice;
|
||||||
|
import com.google.inject.Injector;
|
||||||
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
import com.google.inject.servlet.ServletModule;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse;
|
||||||
|
import com.sun.jersey.api.client.WebResource;
|
||||||
|
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||||
|
import com.sun.jersey.test.framework.JerseyTest;
|
||||||
|
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the app master web service Rest API for getting the job conf. This
|
||||||
|
* requires created a temporary configuration file.
|
||||||
|
*
|
||||||
|
* /ws/v1/mapreduce/job/{jobid}/conf
|
||||||
|
*/
|
||||||
|
public class TestAMWebServicesJobConf extends JerseyTest {
|
||||||
|
|
||||||
|
private static Configuration conf = new Configuration();
|
||||||
|
private static TestAppContext appContext;
|
||||||
|
|
||||||
|
private static File testConfDir = new File("target",
|
||||||
|
TestAMWebServicesJobConf.class.getSimpleName() + "confDir");
|
||||||
|
|
||||||
|
static class TestAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
TestAppContext(int appid, int numTasks, int numAttempts, Path confPath) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||||
|
Map<JobId, Job> map = Maps.newHashMap();
|
||||||
|
Job job = MockJobs.newJob(appID, 0, numTasks, numAttempts, confPath);
|
||||||
|
map.put(job.getID(), job);
|
||||||
|
jobs = map;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
|
@Override
|
||||||
|
protected void configureServlets() {
|
||||||
|
|
||||||
|
Path confPath = new Path(testConfDir.toString(),
|
||||||
|
MRJobConfig.JOB_CONF_FILE);
|
||||||
|
Configuration config = new Configuration();
|
||||||
|
|
||||||
|
FileSystem localFs;
|
||||||
|
try {
|
||||||
|
localFs = FileSystem.getLocal(config);
|
||||||
|
confPath = localFs.makeQualified(confPath);
|
||||||
|
|
||||||
|
OutputStream out = localFs.create(confPath);
|
||||||
|
try {
|
||||||
|
conf.writeXml(out);
|
||||||
|
} finally {
|
||||||
|
out.close();
|
||||||
|
}
|
||||||
|
if (!localFs.exists(confPath)) {
|
||||||
|
fail("error creating config file: " + confPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (IOException e) {
|
||||||
|
fail("error creating config file: " + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
appContext = new TestAppContext(0, 2, 1, confPath);
|
||||||
|
|
||||||
|
bind(JAXBContextResolver.class);
|
||||||
|
bind(AMWebServices.class);
|
||||||
|
bind(GenericExceptionHandler.class);
|
||||||
|
bind(AppContext.class).toInstance(appContext);
|
||||||
|
bind(Configuration.class).toInstance(conf);
|
||||||
|
|
||||||
|
serve("/*").with(GuiceContainer.class);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Injector getInjector() {
|
||||||
|
return injector;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
testConfDir.mkdir();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
static public void stop() {
|
||||||
|
FileUtil.fullyDelete(testConfDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestAMWebServicesJobConf() {
|
||||||
|
super(new WebAppDescriptor.Builder(
|
||||||
|
"org.apache.hadoop.mapreduce.v2.app.webapp")
|
||||||
|
.contextListenerClass(GuiceServletConfig.class)
|
||||||
|
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||||
|
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobConf() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("conf")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("conf");
|
||||||
|
verifyAMJobConf(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobConfSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("conf/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("conf");
|
||||||
|
verifyAMJobConf(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobConfDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("conf").get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("conf");
|
||||||
|
verifyAMJobConf(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobConfXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("conf")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList info = dom.getElementsByTagName("conf");
|
||||||
|
verifyAMJobConfXML(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMJobConf(JSONObject info, Job job) throws JSONException {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 2, info.length());
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("path", job.getConfFile().toString(),
|
||||||
|
info.getString("path"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
JSONArray properties = info.getJSONArray("property");
|
||||||
|
for (int i = 0; i < properties.length(); i++) {
|
||||||
|
JSONObject prop = properties.getJSONObject(i);
|
||||||
|
String name = prop.getString("name");
|
||||||
|
String value = prop.getString("value");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
assertTrue("value not set", (value != null && !value.isEmpty()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMJobConfXML(NodeList nodes, Job job) {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
WebServicesTestUtils.checkStringMatch("path", job.getConfFile()
|
||||||
|
.toString(), WebServicesTestUtils.getXmlString(element, "path"));
|
||||||
|
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
NodeList properties = element.getElementsByTagName("property");
|
||||||
|
|
||||||
|
for (int j = 0; j < properties.getLength(); j++) {
|
||||||
|
Element property = (Element) properties.item(j);
|
||||||
|
assertNotNull("should have counters in the web service info", property);
|
||||||
|
String name = WebServicesTestUtils.getXmlString(property, "name");
|
||||||
|
String value = WebServicesTestUtils.getXmlString(property, "value");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
assertTrue("name not set", (value != null && !value.isEmpty()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,780 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.app.webapp;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.mapreduce.JobACL;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
import org.apache.hadoop.security.authorize.AccessControlList;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
import org.apache.hadoop.yarn.util.Times;
|
||||||
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONArray;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
|
import org.w3c.dom.NodeList;
|
||||||
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
|
import com.google.inject.Guice;
|
||||||
|
import com.google.inject.Injector;
|
||||||
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
import com.google.inject.servlet.ServletModule;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse;
|
||||||
|
import com.sun.jersey.api.client.UniformInterfaceException;
|
||||||
|
import com.sun.jersey.api.client.WebResource;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse.Status;
|
||||||
|
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||||
|
import com.sun.jersey.test.framework.JerseyTest;
|
||||||
|
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the app master web service Rest API for getting jobs, a specific job,
|
||||||
|
* and job counters.
|
||||||
|
*
|
||||||
|
* /ws/v1/mapreduce/jobs
|
||||||
|
* /ws/v1/mapreduce/jobs/{jobid}
|
||||||
|
* /ws/v1/mapreduce/jobs/{jobid}/counters
|
||||||
|
*/
|
||||||
|
public class TestAMWebServicesJobs extends JerseyTest {
|
||||||
|
|
||||||
|
private static Configuration conf = new Configuration();
|
||||||
|
private static TestAppContext appContext;
|
||||||
|
|
||||||
|
static class TestAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||||
|
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
||||||
|
}
|
||||||
|
|
||||||
|
TestAppContext() {
|
||||||
|
this(0, 1, 2, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
|
@Override
|
||||||
|
protected void configureServlets() {
|
||||||
|
|
||||||
|
appContext = new TestAppContext();
|
||||||
|
bind(JAXBContextResolver.class);
|
||||||
|
bind(AMWebServices.class);
|
||||||
|
bind(GenericExceptionHandler.class);
|
||||||
|
bind(AppContext.class).toInstance(appContext);
|
||||||
|
bind(Configuration.class).toInstance(conf);
|
||||||
|
|
||||||
|
serve("/*").with(GuiceContainer.class);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Injector getInjector() {
|
||||||
|
return injector;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestAMWebServicesJobs() {
|
||||||
|
super(new WebAppDescriptor.Builder(
|
||||||
|
"org.apache.hadoop.mapreduce.v2.app.webapp")
|
||||||
|
.contextListenerClass(GuiceServletConfig.class)
|
||||||
|
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||||
|
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobs() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
JSONObject info = arr.getJSONObject(0);
|
||||||
|
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
|
||||||
|
verifyAMJob(info, job);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs/").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
JSONObject info = arr.getJSONObject(0);
|
||||||
|
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
|
||||||
|
verifyAMJob(info, job);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
JSONObject info = arr.getJSONObject(0);
|
||||||
|
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
|
||||||
|
verifyAMJob(info, job);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsXML() throws Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").accept(MediaType.APPLICATION_XML)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList jobs = dom.getElementsByTagName("jobs");
|
||||||
|
assertEquals("incorrect number of elements", 1, jobs.getLength());
|
||||||
|
NodeList job = dom.getElementsByTagName("job");
|
||||||
|
assertEquals("incorrect number of elements", 1, job.getLength());
|
||||||
|
verifyAMJobXML(job, appContext);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobId() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("job");
|
||||||
|
verifyAMJob(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId + "/").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("job");
|
||||||
|
verifyAMJob(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("job");
|
||||||
|
verifyAMJob(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdNonExist() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("mapreduce").path("jobs")
|
||||||
|
.path("job_1234_1_2").get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: job, job_1234_1_2, is not found", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdInvalid() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("mapreduce").path("jobs").path("job_foo")
|
||||||
|
.get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"For input string: \"foo\"", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NumberFormatException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"java.lang.NumberFormatException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdInvalidBogus() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("mapreduce").path("jobs").path("bogusfoo")
|
||||||
|
.get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: Error parsing job ID: bogusfoo", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdXML() throws Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).accept(MediaType.APPLICATION_XML)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList job = dom.getElementsByTagName("job");
|
||||||
|
verifyAMJobXML(job, appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMJob(JSONObject info, Job job) throws JSONException {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 30, info.length());
|
||||||
|
|
||||||
|
// everyone access fields
|
||||||
|
verifyAMJobGeneric(job, info.getString("id"), info.getString("user"),
|
||||||
|
info.getString("name"), info.getString("state"),
|
||||||
|
info.getLong("startTime"), info.getLong("finishTime"),
|
||||||
|
info.getLong("elapsedTime"), info.getInt("mapsTotal"),
|
||||||
|
info.getInt("mapsCompleted"), info.getInt("reducesTotal"),
|
||||||
|
info.getInt("reducesCompleted"),
|
||||||
|
(float) info.getDouble("reduceProgress"),
|
||||||
|
(float) info.getDouble("mapProgress"));
|
||||||
|
|
||||||
|
String diagnostics = "";
|
||||||
|
if (info.has("diagnostics")) {
|
||||||
|
diagnostics = info.getString("diagnostics");
|
||||||
|
}
|
||||||
|
|
||||||
|
// restricted access fields - if security and acls set
|
||||||
|
verifyAMJobGenericSecure(job, info.getInt("mapsPending"),
|
||||||
|
info.getInt("mapsRunning"), info.getInt("reducesPending"),
|
||||||
|
info.getInt("reducesRunning"), info.getBoolean("uberized"),
|
||||||
|
diagnostics, info.getInt("newReduceAttempts"),
|
||||||
|
info.getInt("runningReduceAttempts"),
|
||||||
|
info.getInt("failedReduceAttempts"),
|
||||||
|
info.getInt("killedReduceAttempts"),
|
||||||
|
info.getInt("successfulReduceAttempts"), info.getInt("newMapAttempts"),
|
||||||
|
info.getInt("runningMapAttempts"), info.getInt("failedMapAttempts"),
|
||||||
|
info.getInt("killedMapAttempts"), info.getInt("successfulMapAttempts"));
|
||||||
|
|
||||||
|
Map<JobACL, AccessControlList> allacls = job.getJobACLs();
|
||||||
|
if (allacls != null) {
|
||||||
|
|
||||||
|
for (Map.Entry<JobACL, AccessControlList> entry : allacls.entrySet()) {
|
||||||
|
String expectName = entry.getKey().getAclName();
|
||||||
|
String expectValue = entry.getValue().getAclString();
|
||||||
|
Boolean found = false;
|
||||||
|
// make sure ws includes it
|
||||||
|
if (info.has("acls")) {
|
||||||
|
JSONArray arr = info.getJSONArray("acls");
|
||||||
|
|
||||||
|
for (int i = 0; i < arr.length(); i++) {
|
||||||
|
JSONObject aclInfo = arr.getJSONObject(i);
|
||||||
|
if (expectName.matches(aclInfo.getString("name"))) {
|
||||||
|
found = true;
|
||||||
|
WebServicesTestUtils.checkStringMatch("value", expectValue,
|
||||||
|
aclInfo.getString("value"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fail("should have acls in the web service info");
|
||||||
|
}
|
||||||
|
assertTrue("acl: " + expectName + " not found in webservice output",
|
||||||
|
found);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMJobXML(NodeList nodes, TestAppContext appContext) {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
|
||||||
|
Job job = appContext.getJob(MRApps.toJobID(WebServicesTestUtils
|
||||||
|
.getXmlString(element, "id")));
|
||||||
|
assertNotNull("Job not found - output incorrect", job);
|
||||||
|
|
||||||
|
verifyAMJobGeneric(job, WebServicesTestUtils.getXmlString(element, "id"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "user"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "name"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "state"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "startTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "finishTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "mapsTotal"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "mapsCompleted"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "reducesTotal"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "reducesCompleted"),
|
||||||
|
WebServicesTestUtils.getXmlFloat(element, "reduceProgress"),
|
||||||
|
WebServicesTestUtils.getXmlFloat(element, "mapProgress"));
|
||||||
|
|
||||||
|
// restricted access fields - if security and acls set
|
||||||
|
verifyAMJobGenericSecure(job,
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "mapsPending"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "mapsRunning"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "reducesPending"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "reducesRunning"),
|
||||||
|
WebServicesTestUtils.getXmlBoolean(element, "uberized"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "diagnostics"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "newReduceAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "runningReduceAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "failedReduceAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "killedReduceAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "successfulReduceAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "newMapAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "runningMapAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "failedMapAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "killedMapAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "successfulMapAttempts"));
|
||||||
|
|
||||||
|
Map<JobACL, AccessControlList> allacls = job.getJobACLs();
|
||||||
|
if (allacls != null) {
|
||||||
|
for (Map.Entry<JobACL, AccessControlList> entry : allacls.entrySet()) {
|
||||||
|
String expectName = entry.getKey().getAclName();
|
||||||
|
String expectValue = entry.getValue().getAclString();
|
||||||
|
Boolean found = false;
|
||||||
|
// make sure ws includes it
|
||||||
|
NodeList id = element.getElementsByTagName("acls");
|
||||||
|
if (id != null) {
|
||||||
|
for (int j = 0; j < id.getLength(); j++) {
|
||||||
|
Element aclElem = (Element) id.item(j);
|
||||||
|
if (aclElem == null) {
|
||||||
|
fail("should have acls in the web service info");
|
||||||
|
}
|
||||||
|
if (expectName.matches(WebServicesTestUtils.getXmlString(aclElem,
|
||||||
|
"name"))) {
|
||||||
|
found = true;
|
||||||
|
WebServicesTestUtils.checkStringMatch("value", expectValue,
|
||||||
|
WebServicesTestUtils.getXmlString(aclElem, "value"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
fail("should have acls in the web service info");
|
||||||
|
}
|
||||||
|
assertTrue("acl: " + expectName + " not found in webservice output",
|
||||||
|
found);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMJobGeneric(Job job, String id, String user, String name,
|
||||||
|
String state, long startTime, long finishTime, long elapsedTime,
|
||||||
|
int mapsTotal, int mapsCompleted, int reducesTotal, int reducesCompleted,
|
||||||
|
float reduceProgress, float mapProgress) {
|
||||||
|
JobReport report = job.getReport();
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
|
||||||
|
id);
|
||||||
|
WebServicesTestUtils.checkStringMatch("user", job.getUserName().toString(),
|
||||||
|
user);
|
||||||
|
WebServicesTestUtils.checkStringMatch("name", job.getName(), name);
|
||||||
|
WebServicesTestUtils.checkStringMatch("state", job.getState().toString(),
|
||||||
|
state);
|
||||||
|
|
||||||
|
assertEquals("startTime incorrect", report.getStartTime(), startTime);
|
||||||
|
assertEquals("finishTime incorrect", report.getFinishTime(), finishTime);
|
||||||
|
assertEquals("elapsedTime incorrect",
|
||||||
|
Times.elapsed(report.getStartTime(), report.getFinishTime()),
|
||||||
|
elapsedTime);
|
||||||
|
assertEquals("mapsTotal incorrect", job.getTotalMaps(), mapsTotal);
|
||||||
|
assertEquals("mapsCompleted incorrect", job.getCompletedMaps(),
|
||||||
|
mapsCompleted);
|
||||||
|
assertEquals("reducesTotal incorrect", job.getTotalReduces(), reducesTotal);
|
||||||
|
assertEquals("reducesCompleted incorrect", job.getCompletedReduces(),
|
||||||
|
reducesCompleted);
|
||||||
|
assertEquals("mapProgress incorrect", report.getMapProgress() * 100,
|
||||||
|
mapProgress, 0);
|
||||||
|
assertEquals("reduceProgress incorrect", report.getReduceProgress() * 100,
|
||||||
|
reduceProgress, 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMJobGenericSecure(Job job, int mapsPending,
|
||||||
|
int mapsRunning, int reducesPending, int reducesRunning,
|
||||||
|
Boolean uberized, String diagnostics, int newReduceAttempts,
|
||||||
|
int runningReduceAttempts, int failedReduceAttempts,
|
||||||
|
int killedReduceAttempts, int successfulReduceAttempts,
|
||||||
|
int newMapAttempts, int runningMapAttempts, int failedMapAttempts,
|
||||||
|
int killedMapAttempts, int successfulMapAttempts) {
|
||||||
|
|
||||||
|
String diagString = "";
|
||||||
|
List<String> diagList = job.getDiagnostics();
|
||||||
|
if (diagList != null && !diagList.isEmpty()) {
|
||||||
|
StringBuffer b = new StringBuffer();
|
||||||
|
for (String diag : diagList) {
|
||||||
|
b.append(diag);
|
||||||
|
}
|
||||||
|
diagString = b.toString();
|
||||||
|
}
|
||||||
|
WebServicesTestUtils.checkStringMatch("diagnostics", diagString,
|
||||||
|
diagnostics);
|
||||||
|
|
||||||
|
assertEquals("isUber incorrect", job.isUber(), uberized);
|
||||||
|
|
||||||
|
// unfortunately the following fields are all calculated in JobInfo
|
||||||
|
// so not easily accessible without doing all the calculations again.
|
||||||
|
// For now just make sure they are present.
|
||||||
|
assertTrue("mapsPending not >= 0", mapsPending >= 0);
|
||||||
|
assertTrue("mapsRunning not >= 0", mapsRunning >= 0);
|
||||||
|
assertTrue("reducesPending not >= 0", reducesPending >= 0);
|
||||||
|
assertTrue("reducesRunning not >= 0", reducesRunning >= 0);
|
||||||
|
|
||||||
|
assertTrue("newReduceAttempts not >= 0", newReduceAttempts >= 0);
|
||||||
|
assertTrue("runningReduceAttempts not >= 0", runningReduceAttempts >= 0);
|
||||||
|
assertTrue("failedReduceAttempts not >= 0", failedReduceAttempts >= 0);
|
||||||
|
assertTrue("killedReduceAttempts not >= 0", killedReduceAttempts >= 0);
|
||||||
|
assertTrue("successfulReduceAttempts not >= 0",
|
||||||
|
successfulReduceAttempts >= 0);
|
||||||
|
|
||||||
|
assertTrue("newMapAttempts not >= 0", newMapAttempts >= 0);
|
||||||
|
assertTrue("runningMapAttempts not >= 0", runningMapAttempts >= 0);
|
||||||
|
assertTrue("failedMapAttempts not >= 0", failedMapAttempts >= 0);
|
||||||
|
assertTrue("killedMapAttempts not >= 0", killedMapAttempts >= 0);
|
||||||
|
assertTrue("successfulMapAttempts not >= 0", successfulMapAttempts >= 0);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobCounters() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("counters")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobCounters");
|
||||||
|
verifyAMJobCounters(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobCountersSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("counters/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobCounters");
|
||||||
|
verifyAMJobCounters(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobCountersDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("counters/").get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobCounters");
|
||||||
|
verifyAMJobCounters(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobCountersXML() throws Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("counters")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList info = dom.getElementsByTagName("jobCounters");
|
||||||
|
verifyAMJobCountersXML(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMJobCounters(JSONObject info, Job job)
|
||||||
|
throws JSONException {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 2, info.length());
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
|
||||||
|
info.getString("id"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
JSONArray counterGroups = info.getJSONArray("counterGroup");
|
||||||
|
for (int i = 0; i < counterGroups.length(); i++) {
|
||||||
|
JSONObject counterGroup = counterGroups.getJSONObject(i);
|
||||||
|
String name = counterGroup.getString("counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
JSONArray counters = counterGroup.getJSONArray("counter");
|
||||||
|
for (int j = 0; j < counters.length(); j++) {
|
||||||
|
JSONObject counter = counters.getJSONObject(i);
|
||||||
|
String counterName = counter.getString("name");
|
||||||
|
assertTrue("counter name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
|
||||||
|
long mapValue = counter.getLong("mapCounterValue");
|
||||||
|
assertTrue("mapCounterValue >= 0", mapValue >= 0);
|
||||||
|
|
||||||
|
long reduceValue = counter.getLong("reduceCounterValue");
|
||||||
|
assertTrue("reduceCounterValue >= 0", reduceValue >= 0);
|
||||||
|
|
||||||
|
long totalValue = counter.getLong("totalCounterValue");
|
||||||
|
assertTrue("totalCounterValue >= 0", totalValue >= 0);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMJobCountersXML(NodeList nodes, Job job) {
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
|
||||||
|
assertNotNull("Job not found - output incorrect", job);
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "id"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
NodeList groups = element.getElementsByTagName("counterGroup");
|
||||||
|
|
||||||
|
for (int j = 0; j < groups.getLength(); j++) {
|
||||||
|
Element counters = (Element) groups.item(j);
|
||||||
|
assertNotNull("should have counters in the web service info", counters);
|
||||||
|
String name = WebServicesTestUtils.getXmlString(counters,
|
||||||
|
"counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
NodeList counterArr = counters.getElementsByTagName("counter");
|
||||||
|
for (int z = 0; z < counterArr.getLength(); z++) {
|
||||||
|
Element counter = (Element) counterArr.item(z);
|
||||||
|
String counterName = WebServicesTestUtils.getXmlString(counter,
|
||||||
|
"name");
|
||||||
|
assertTrue("counter name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
|
||||||
|
long mapValue = WebServicesTestUtils.getXmlLong(counter,
|
||||||
|
"mapCounterValue");
|
||||||
|
assertTrue("mapCounterValue not >= 0", mapValue >= 0);
|
||||||
|
|
||||||
|
long reduceValue = WebServicesTestUtils.getXmlLong(counter,
|
||||||
|
"reduceCounterValue");
|
||||||
|
assertTrue("reduceCounterValue >= 0", reduceValue >= 0);
|
||||||
|
|
||||||
|
long totalValue = WebServicesTestUtils.getXmlLong(counter,
|
||||||
|
"totalCounterValue");
|
||||||
|
assertTrue("totalCounterValue >= 0", totalValue >= 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,821 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.app.webapp;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONArray;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
|
import org.w3c.dom.NodeList;
|
||||||
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
|
import com.google.inject.Guice;
|
||||||
|
import com.google.inject.Injector;
|
||||||
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
import com.google.inject.servlet.ServletModule;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse.Status;
|
||||||
|
import com.sun.jersey.api.client.UniformInterfaceException;
|
||||||
|
import com.sun.jersey.api.client.WebResource;
|
||||||
|
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||||
|
import com.sun.jersey.test.framework.JerseyTest;
|
||||||
|
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the app master web service Rest API for getting tasks, a specific task,
|
||||||
|
* and task counters.
|
||||||
|
*
|
||||||
|
* /ws/v1/mapreduce/jobs/{jobid}/tasks
|
||||||
|
* /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}
|
||||||
|
* /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/counters
|
||||||
|
*/
|
||||||
|
public class TestAMWebServicesTasks extends JerseyTest {
|
||||||
|
|
||||||
|
private static Configuration conf = new Configuration();
|
||||||
|
private static TestAppContext appContext;
|
||||||
|
|
||||||
|
static class TestAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||||
|
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
||||||
|
}
|
||||||
|
|
||||||
|
TestAppContext() {
|
||||||
|
this(0, 1, 2, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
|
@Override
|
||||||
|
protected void configureServlets() {
|
||||||
|
|
||||||
|
appContext = new TestAppContext();
|
||||||
|
bind(JAXBContextResolver.class);
|
||||||
|
bind(AMWebServices.class);
|
||||||
|
bind(GenericExceptionHandler.class);
|
||||||
|
bind(AppContext.class).toInstance(appContext);
|
||||||
|
bind(Configuration.class).toInstance(conf);
|
||||||
|
|
||||||
|
serve("/*").with(GuiceContainer.class);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Injector getInjector() {
|
||||||
|
return injector;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestAMWebServicesTasks() {
|
||||||
|
super(new WebAppDescriptor.Builder(
|
||||||
|
"org.apache.hadoop.mapreduce.v2.app.webapp")
|
||||||
|
.contextListenerClass(GuiceServletConfig.class)
|
||||||
|
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||||
|
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasks() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject tasks = json.getJSONObject("tasks");
|
||||||
|
JSONArray arr = tasks.getJSONArray("task");
|
||||||
|
assertEquals("incorrect number of elements", 2, arr.length());
|
||||||
|
|
||||||
|
verifyAMTask(arr, jobsMap.get(id), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject tasks = json.getJSONObject("tasks");
|
||||||
|
JSONArray arr = tasks.getJSONArray("task");
|
||||||
|
assertEquals("incorrect number of elements", 2, arr.length());
|
||||||
|
|
||||||
|
verifyAMTask(arr, jobsMap.get(id), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject tasks = json.getJSONObject("tasks");
|
||||||
|
JSONArray arr = tasks.getJSONArray("task");
|
||||||
|
assertEquals("incorrect number of elements", 2, arr.length());
|
||||||
|
|
||||||
|
verifyAMTask(arr, jobsMap.get(id), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksXML() throws JSONException, Exception {
|
||||||
|
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList tasks = dom.getElementsByTagName("tasks");
|
||||||
|
assertEquals("incorrect number of elements", 1, tasks.getLength());
|
||||||
|
NodeList task = dom.getElementsByTagName("task");
|
||||||
|
verifyAMTaskXML(task, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksQueryMap() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String type = "m";
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").queryParam("type", type)
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject tasks = json.getJSONObject("tasks");
|
||||||
|
JSONArray arr = tasks.getJSONArray("task");
|
||||||
|
assertEquals("incorrect number of elements", 1, arr.length());
|
||||||
|
verifyAMTask(arr, jobsMap.get(id), type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksQueryReduce() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String type = "r";
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").queryParam("type", type)
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject tasks = json.getJSONObject("tasks");
|
||||||
|
JSONArray arr = tasks.getJSONArray("task");
|
||||||
|
assertEquals("incorrect number of elements", 1, arr.length());
|
||||||
|
verifyAMTask(arr, jobsMap.get(id), type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksQueryInvalid() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
// tasktype must be exactly either "m" or "r"
|
||||||
|
String tasktype = "reduce";
|
||||||
|
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
|
||||||
|
.path("tasks").queryParam("type", tasktype)
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: tasktype must be either m or r", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskId() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("task");
|
||||||
|
verifyAMSingleTask(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid + "/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("task");
|
||||||
|
verifyAMSingleTask(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("task");
|
||||||
|
verifyAMSingleTask(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdBogus() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String tid = "bogustaskid";
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
|
||||||
|
.path("tasks").path(tid).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: Error parsing task ID: bogustaskid", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdNonExist() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String tid = "task_1234_0_0_m_0";
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
|
||||||
|
.path("tasks").path(tid).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: task not found with id task_1234_0_0_m_0",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdInvalid() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String tid = "task_1234_0_0_d_0";
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
|
||||||
|
.path("tasks").path(tid).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: Unknown task symbol: d", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdInvalid2() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String tid = "task_1234_0_m_0";
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
|
||||||
|
.path("tasks").path(tid).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: For input string: \"m\"", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdInvalid3() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String tid = "task_1234_0_0_m";
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
|
||||||
|
.path("tasks").path(tid).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: Error parsing task ID: task_1234_0_0_m",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList nodes = dom.getElementsByTagName("task");
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
verifyAMSingleTaskXML(element, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMSingleTask(JSONObject info, Task task)
|
||||||
|
throws JSONException {
|
||||||
|
assertEquals("incorrect number of elements", 8, info.length());
|
||||||
|
|
||||||
|
verifyTaskGeneric(task, info.getString("id"), info.getString("state"),
|
||||||
|
info.getString("type"), info.getString("successfulAttempt"),
|
||||||
|
info.getLong("startTime"), info.getLong("finishTime"),
|
||||||
|
info.getLong("elapsedTime"), (float) info.getDouble("progress"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMTask(JSONArray arr, Job job, String type)
|
||||||
|
throws JSONException {
|
||||||
|
for (Task task : job.getTasks().values()) {
|
||||||
|
TaskId id = task.getID();
|
||||||
|
String tid = MRApps.toString(id);
|
||||||
|
Boolean found = false;
|
||||||
|
if (type != null && task.getType() == MRApps.taskType(type)) {
|
||||||
|
|
||||||
|
for (int i = 0; i < arr.length(); i++) {
|
||||||
|
JSONObject info = arr.getJSONObject(i);
|
||||||
|
if (tid.matches(info.getString("id"))) {
|
||||||
|
found = true;
|
||||||
|
verifyAMSingleTask(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertTrue("task with id: " + tid + " not in web service output", found);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyTaskGeneric(Task task, String id, String state,
|
||||||
|
String type, String successfulAttempt, long startTime, long finishTime,
|
||||||
|
long elapsedTime, float progress) {
|
||||||
|
|
||||||
|
TaskId taskid = task.getID();
|
||||||
|
String tid = MRApps.toString(taskid);
|
||||||
|
TaskReport report = task.getReport();
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", tid, id);
|
||||||
|
WebServicesTestUtils.checkStringMatch("type", task.getType().toString(),
|
||||||
|
type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("state", report.getTaskState()
|
||||||
|
.toString(), state);
|
||||||
|
// not easily checked without duplicating logic, just make sure its here
|
||||||
|
assertNotNull("successfulAttempt null", successfulAttempt);
|
||||||
|
assertEquals("startTime wrong", report.getStartTime(), startTime);
|
||||||
|
assertEquals("finishTime wrong", report.getFinishTime(), finishTime);
|
||||||
|
assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime);
|
||||||
|
assertEquals("progress wrong", report.getProgress() * 100, progress, 1e-3f);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMSingleTaskXML(Element element, Task task) {
|
||||||
|
verifyTaskGeneric(task, WebServicesTestUtils.getXmlString(element, "id"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "state"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "type"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "successfulAttempt"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "startTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "finishTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
|
||||||
|
WebServicesTestUtils.getXmlFloat(element, "progress"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMTaskXML(NodeList nodes, Job job) {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 2, nodes.getLength());
|
||||||
|
|
||||||
|
for (Task task : job.getTasks().values()) {
|
||||||
|
TaskId id = task.getID();
|
||||||
|
String tid = MRApps.toString(id);
|
||||||
|
Boolean found = false;
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
|
||||||
|
if (tid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
|
||||||
|
found = true;
|
||||||
|
verifyAMSingleTaskXML(element, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertTrue("task with id: " + tid + " not in web service output", found);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdCounters() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid).path("counters")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobTaskCounters");
|
||||||
|
verifyAMJobTaskCounters(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdCountersSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid).path("counters/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobTaskCounters");
|
||||||
|
verifyAMJobTaskCounters(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdCountersDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid).path("counters")
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobTaskCounters");
|
||||||
|
verifyAMJobTaskCounters(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobTaskCountersXML() throws Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid).path("counters")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList info = dom.getElementsByTagName("jobTaskCounters");
|
||||||
|
verifyAMTaskCountersXML(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMJobTaskCounters(JSONObject info, Task task)
|
||||||
|
throws JSONException {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 2, info.length());
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(task.getID()),
|
||||||
|
info.getString("id"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
JSONArray counterGroups = info.getJSONArray("taskCounterGroup");
|
||||||
|
for (int i = 0; i < counterGroups.length(); i++) {
|
||||||
|
JSONObject counterGroup = counterGroups.getJSONObject(i);
|
||||||
|
String name = counterGroup.getString("counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
JSONArray counters = counterGroup.getJSONArray("counter");
|
||||||
|
for (int j = 0; j < counters.length(); j++) {
|
||||||
|
JSONObject counter = counters.getJSONObject(i);
|
||||||
|
String counterName = counter.getString("name");
|
||||||
|
assertTrue("name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
long value = counter.getLong("value");
|
||||||
|
assertTrue("value >= 0", value >= 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyAMTaskCountersXML(NodeList nodes, Task task) {
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
WebServicesTestUtils.checkStringMatch("id",
|
||||||
|
MRApps.toString(task.getID()),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "id"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
NodeList groups = element.getElementsByTagName("taskCounterGroup");
|
||||||
|
|
||||||
|
for (int j = 0; j < groups.getLength(); j++) {
|
||||||
|
Element counters = (Element) groups.item(j);
|
||||||
|
assertNotNull("should have counters in the web service info", counters);
|
||||||
|
String name = WebServicesTestUtils.getXmlString(counters,
|
||||||
|
"counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
NodeList counterArr = counters.getElementsByTagName("counter");
|
||||||
|
for (int z = 0; z < counterArr.getLength(); z++) {
|
||||||
|
Element counter = (Element) counterArr.item(z);
|
||||||
|
String counterName = WebServicesTestUtils.getXmlString(counter,
|
||||||
|
"name");
|
||||||
|
assertTrue("counter name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
|
||||||
|
long value = WebServicesTestUtils.getXmlLong(counter, "value");
|
||||||
|
assertTrue("value not >= 0", value >= 0);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -54,7 +54,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.avro</groupId>
|
<groupId>org.apache.avro</groupId>
|
||||||
<artifactId>avro-maven-plugin</artifactId>
|
<artifactId>avro-maven-plugin</artifactId>
|
||||||
<version>1.5.3</version>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<phase>generate-sources</phase>
|
<phase>generate-sources</phase>
|
||||||
|
|
|
@ -31,14 +31,13 @@ import javax.ws.rs.core.UriInfo;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
|
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.webapp.AMWebServices;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
|
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo;
|
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptCounterInfo;
|
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptCounterInfo;
|
||||||
|
@ -131,7 +130,7 @@ public class HsWebServices {
|
||||||
try {
|
try {
|
||||||
sBegin = Long.parseLong(startedBegin);
|
sBegin = Long.parseLong(startedBegin);
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
throw new BadRequestException(e.getMessage());
|
throw new BadRequestException("Invalid number format: " + e.getMessage());
|
||||||
}
|
}
|
||||||
if (sBegin < 0) {
|
if (sBegin < 0) {
|
||||||
throw new BadRequestException("startedTimeBegin must be greater than 0");
|
throw new BadRequestException("startedTimeBegin must be greater than 0");
|
||||||
|
@ -142,7 +141,7 @@ public class HsWebServices {
|
||||||
try {
|
try {
|
||||||
sEnd = Long.parseLong(startedEnd);
|
sEnd = Long.parseLong(startedEnd);
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
throw new BadRequestException(e.getMessage());
|
throw new BadRequestException("Invalid number format: " + e.getMessage());
|
||||||
}
|
}
|
||||||
if (sEnd < 0) {
|
if (sEnd < 0) {
|
||||||
throw new BadRequestException("startedTimeEnd must be greater than 0");
|
throw new BadRequestException("startedTimeEnd must be greater than 0");
|
||||||
|
@ -158,10 +157,10 @@ public class HsWebServices {
|
||||||
try {
|
try {
|
||||||
fBegin = Long.parseLong(finishBegin);
|
fBegin = Long.parseLong(finishBegin);
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
throw new BadRequestException(e.getMessage());
|
throw new BadRequestException("Invalid number format: " + e.getMessage());
|
||||||
}
|
}
|
||||||
if (fBegin < 0) {
|
if (fBegin < 0) {
|
||||||
throw new BadRequestException("finishTimeBegin must be greater than 0");
|
throw new BadRequestException("finishedTimeBegin must be greater than 0");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (finishEnd != null && !finishEnd.isEmpty()) {
|
if (finishEnd != null && !finishEnd.isEmpty()) {
|
||||||
|
@ -169,15 +168,15 @@ public class HsWebServices {
|
||||||
try {
|
try {
|
||||||
fEnd = Long.parseLong(finishEnd);
|
fEnd = Long.parseLong(finishEnd);
|
||||||
} catch (NumberFormatException e) {
|
} catch (NumberFormatException e) {
|
||||||
throw new BadRequestException(e.getMessage());
|
throw new BadRequestException("Invalid number format: " + e.getMessage());
|
||||||
}
|
}
|
||||||
if (fEnd < 0) {
|
if (fEnd < 0) {
|
||||||
throw new BadRequestException("finishTimeEnd must be greater than 0");
|
throw new BadRequestException("finishedTimeEnd must be greater than 0");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (fBegin > fEnd) {
|
if (fBegin > fEnd) {
|
||||||
throw new BadRequestException(
|
throw new BadRequestException(
|
||||||
"finishTimeEnd must be greater than finishTimeBegin");
|
"finishedTimeEnd must be greater than finishedTimeBegin");
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Job job : appCtx.getAllJobs().values()) {
|
for (Job job : appCtx.getAllJobs().values()) {
|
||||||
|
@ -200,7 +199,7 @@ public class HsWebServices {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (userQuery != null && !userQuery.isEmpty()) {
|
if (userQuery != null && !userQuery.isEmpty()) {
|
||||||
if (!jobInfo.getName().equals(userQuery)) {
|
if (!jobInfo.getUserName().equals(userQuery)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -224,14 +223,8 @@ public class HsWebServices {
|
||||||
@Path("/mapreduce/jobs/{jobid}")
|
@Path("/mapreduce/jobs/{jobid}")
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public JobInfo getJob(@PathParam("jobid") String jid) {
|
public JobInfo getJob(@PathParam("jobid") String jid) {
|
||||||
JobId jobId = MRApps.toJobID(jid);
|
|
||||||
if (jobId == null) {
|
Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
Job job = appCtx.getJob(jobId);
|
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
return new JobInfo(job);
|
return new JobInfo(job);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -239,14 +232,8 @@ public class HsWebServices {
|
||||||
@Path("/mapreduce/jobs/{jobid}/attempts")
|
@Path("/mapreduce/jobs/{jobid}/attempts")
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
|
public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
|
||||||
JobId jobId = MRApps.toJobID(jid);
|
|
||||||
if (jobId == null) {
|
Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
Job job = appCtx.getJob(jobId);
|
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
AMAttemptsInfo amAttempts = new AMAttemptsInfo();
|
AMAttemptsInfo amAttempts = new AMAttemptsInfo();
|
||||||
for (AMInfo amInfo : job.getAMInfos()) {
|
for (AMInfo amInfo : job.getAMInfos()) {
|
||||||
AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(job
|
AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(job
|
||||||
|
@ -261,53 +248,17 @@ public class HsWebServices {
|
||||||
@Path("/mapreduce/jobs/{jobid}/counters")
|
@Path("/mapreduce/jobs/{jobid}/counters")
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public JobCounterInfo getJobCounters(@PathParam("jobid") String jid) {
|
public JobCounterInfo getJobCounters(@PathParam("jobid") String jid) {
|
||||||
JobId jobId = MRApps.toJobID(jid);
|
|
||||||
if (jobId == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
Job job = appCtx.getJob(jobId);
|
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
return new JobCounterInfo(this.appCtx, job);
|
|
||||||
}
|
|
||||||
|
|
||||||
@GET
|
Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
|
||||||
@Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/counters")
|
return new JobCounterInfo(this.appCtx, job);
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
|
||||||
public JobTaskCounterInfo getSingleTaskCounters(
|
|
||||||
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
|
|
||||||
JobId jobId = MRApps.toJobID(jid);
|
|
||||||
if (jobId == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
Job job = this.appCtx.getJob(jobId);
|
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
TaskId taskID = MRApps.toTaskID(tid);
|
|
||||||
if (taskID == null) {
|
|
||||||
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
|
||||||
}
|
|
||||||
Task task = job.getTask(taskID);
|
|
||||||
if (task == null) {
|
|
||||||
throw new NotFoundException("task not found with id " + tid);
|
|
||||||
}
|
|
||||||
return new JobTaskCounterInfo(task);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@GET
|
@GET
|
||||||
@Path("/mapreduce/jobs/{jobid}/conf")
|
@Path("/mapreduce/jobs/{jobid}/conf")
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public ConfInfo getJobConf(@PathParam("jobid") String jid) {
|
public ConfInfo getJobConf(@PathParam("jobid") String jid) {
|
||||||
JobId jobId = MRApps.toJobID(jid);
|
|
||||||
if (jobId == null) {
|
Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
Job job = appCtx.getJob(jobId);
|
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
ConfInfo info;
|
ConfInfo info;
|
||||||
try {
|
try {
|
||||||
info = new ConfInfo(job, this.conf);
|
info = new ConfInfo(job, this.conf);
|
||||||
|
@ -315,7 +266,6 @@ public class HsWebServices {
|
||||||
throw new NotFoundException("unable to load configuration for job: "
|
throw new NotFoundException("unable to load configuration for job: "
|
||||||
+ jid);
|
+ jid);
|
||||||
}
|
}
|
||||||
|
|
||||||
return info;
|
return info;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -324,10 +274,8 @@ public class HsWebServices {
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public TasksInfo getJobTasks(@PathParam("jobid") String jid,
|
public TasksInfo getJobTasks(@PathParam("jobid") String jid,
|
||||||
@QueryParam("type") String type) {
|
@QueryParam("type") String type) {
|
||||||
Job job = this.appCtx.getJob(MRApps.toJobID(jid));
|
|
||||||
if (job == null) {
|
Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
TasksInfo allTasks = new TasksInfo();
|
TasksInfo allTasks = new TasksInfo();
|
||||||
for (Task task : job.getTasks().values()) {
|
for (Task task : job.getTasks().values()) {
|
||||||
TaskType ttype = null;
|
TaskType ttype = null;
|
||||||
|
@ -351,10 +299,20 @@ public class HsWebServices {
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public TaskInfo getJobTask(@PathParam("jobid") String jid,
|
public TaskInfo getJobTask(@PathParam("jobid") String jid,
|
||||||
@PathParam("taskid") String tid) {
|
@PathParam("taskid") String tid) {
|
||||||
Job job = this.appCtx.getJob(MRApps.toJobID(jid));
|
|
||||||
if (job == null) {
|
Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
|
||||||
}
|
return new TaskInfo(task);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@GET
|
||||||
|
@Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/counters")
|
||||||
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
|
public JobTaskCounterInfo getSingleTaskCounters(
|
||||||
|
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
|
||||||
|
|
||||||
|
Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
|
||||||
TaskId taskID = MRApps.toTaskID(tid);
|
TaskId taskID = MRApps.toTaskID(tid);
|
||||||
if (taskID == null) {
|
if (taskID == null) {
|
||||||
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
||||||
|
@ -363,8 +321,7 @@ public class HsWebServices {
|
||||||
if (task == null) {
|
if (task == null) {
|
||||||
throw new NotFoundException("task not found with id " + tid);
|
throw new NotFoundException("task not found with id " + tid);
|
||||||
}
|
}
|
||||||
return new TaskInfo(task);
|
return new JobTaskCounterInfo(task);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@GET
|
@GET
|
||||||
|
@ -372,19 +329,10 @@ public class HsWebServices {
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public TaskAttemptsInfo getJobTaskAttempts(@PathParam("jobid") String jid,
|
public TaskAttemptsInfo getJobTaskAttempts(@PathParam("jobid") String jid,
|
||||||
@PathParam("taskid") String tid) {
|
@PathParam("taskid") String tid) {
|
||||||
|
|
||||||
TaskAttemptsInfo attempts = new TaskAttemptsInfo();
|
TaskAttemptsInfo attempts = new TaskAttemptsInfo();
|
||||||
Job job = this.appCtx.getJob(MRApps.toJobID(jid));
|
Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
|
||||||
if (job == null) {
|
Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
TaskId taskID = MRApps.toTaskID(tid);
|
|
||||||
if (taskID == null) {
|
|
||||||
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
|
||||||
}
|
|
||||||
Task task = job.getTask(taskID);
|
|
||||||
if (task == null) {
|
|
||||||
throw new NotFoundException("task not found with id " + tid);
|
|
||||||
}
|
|
||||||
for (TaskAttempt ta : task.getAttempts().values()) {
|
for (TaskAttempt ta : task.getAttempts().values()) {
|
||||||
if (ta != null) {
|
if (ta != null) {
|
||||||
if (task.getType() == TaskType.REDUCE) {
|
if (task.getType() == TaskType.REDUCE) {
|
||||||
|
@ -402,28 +350,11 @@ public class HsWebServices {
|
||||||
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
|
||||||
public TaskAttemptInfo getJobTaskAttemptId(@PathParam("jobid") String jid,
|
public TaskAttemptInfo getJobTaskAttemptId(@PathParam("jobid") String jid,
|
||||||
@PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
|
@PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
|
||||||
Job job = this.appCtx.getJob(MRApps.toJobID(jid));
|
|
||||||
if (job == null) {
|
Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
|
||||||
}
|
TaskAttempt ta = AMWebServices.getTaskAttemptFromTaskAttemptString(attId,
|
||||||
TaskId taskID = MRApps.toTaskID(tid);
|
task);
|
||||||
if (taskID == null) {
|
|
||||||
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
|
||||||
}
|
|
||||||
Task task = job.getTask(taskID);
|
|
||||||
if (task == null) {
|
|
||||||
throw new NotFoundException("task not found with id " + tid);
|
|
||||||
}
|
|
||||||
TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
|
|
||||||
if (attemptId == null) {
|
|
||||||
throw new NotFoundException("task attempt id " + attId
|
|
||||||
+ " not found or invalid");
|
|
||||||
}
|
|
||||||
TaskAttempt ta = task.getAttempt(attemptId);
|
|
||||||
if (ta == null) {
|
|
||||||
throw new NotFoundException("Error getting info on task attempt id "
|
|
||||||
+ attId);
|
|
||||||
}
|
|
||||||
if (task.getType() == TaskType.REDUCE) {
|
if (task.getType() == TaskType.REDUCE) {
|
||||||
return new ReduceTaskAttemptInfo(ta, task.getType());
|
return new ReduceTaskAttemptInfo(ta, task.getType());
|
||||||
} else {
|
} else {
|
||||||
|
@ -437,32 +368,11 @@ public class HsWebServices {
|
||||||
public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(
|
public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(
|
||||||
@PathParam("jobid") String jid, @PathParam("taskid") String tid,
|
@PathParam("jobid") String jid, @PathParam("taskid") String tid,
|
||||||
@PathParam("attemptid") String attId) {
|
@PathParam("attemptid") String attId) {
|
||||||
JobId jobId = MRApps.toJobID(jid);
|
|
||||||
if (jobId == null) {
|
Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
|
||||||
}
|
TaskAttempt ta = AMWebServices.getTaskAttemptFromTaskAttemptString(attId,
|
||||||
Job job = this.appCtx.getJob(jobId);
|
task);
|
||||||
if (job == null) {
|
|
||||||
throw new NotFoundException("job, " + jid + ", is not found");
|
|
||||||
}
|
|
||||||
TaskId taskID = MRApps.toTaskID(tid);
|
|
||||||
if (taskID == null) {
|
|
||||||
throw new NotFoundException("taskid " + tid + " not found or invalid");
|
|
||||||
}
|
|
||||||
Task task = job.getTask(taskID);
|
|
||||||
if (task == null) {
|
|
||||||
throw new NotFoundException("task not found with id " + tid);
|
|
||||||
}
|
|
||||||
TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
|
|
||||||
if (attemptId == null) {
|
|
||||||
throw new NotFoundException("task attempt id " + attId
|
|
||||||
+ " not found or invalid");
|
|
||||||
}
|
|
||||||
TaskAttempt ta = task.getAttempt(attemptId);
|
|
||||||
if (ta == null) {
|
|
||||||
throw new NotFoundException("Error getting info on task attempt id "
|
|
||||||
+ attId);
|
|
||||||
}
|
|
||||||
return new JobTaskAttemptCounterInfo(ta);
|
return new JobTaskAttemptCounterInfo(ta);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -42,6 +42,8 @@ import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptsInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterGroupInfo;
|
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterGroupInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterInfo;
|
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo;
|
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
|
||||||
|
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
|
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptsInfo;
|
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptsInfo;
|
||||||
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo;
|
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo;
|
||||||
|
@ -57,13 +59,12 @@ public class JAXBContextResolver implements ContextResolver<JAXBContext> {
|
||||||
|
|
||||||
// you have to specify all the dao classes here
|
// you have to specify all the dao classes here
|
||||||
private final Class[] cTypes = { HistoryInfo.class, JobInfo.class,
|
private final Class[] cTypes = { HistoryInfo.class, JobInfo.class,
|
||||||
JobsInfo.class, TasksInfo.class, TaskAttemptsInfo.class, ConfInfo.class,
|
JobsInfo.class, TaskInfo.class, TasksInfo.class, TaskAttemptsInfo.class,
|
||||||
CounterInfo.class, JobTaskCounterInfo.class,
|
ConfInfo.class, CounterInfo.class, JobTaskCounterInfo.class,
|
||||||
JobTaskAttemptCounterInfo.class,
|
JobTaskAttemptCounterInfo.class, TaskCounterInfo.class,
|
||||||
TaskCounterInfo.class, JobCounterInfo.class, ReduceTaskAttemptInfo.class,
|
JobCounterInfo.class, ReduceTaskAttemptInfo.class, TaskAttemptInfo.class,
|
||||||
TaskAttemptInfo.class, TaskAttemptsInfo.class, CounterGroupInfo.class,
|
TaskAttemptsInfo.class, CounterGroupInfo.class,
|
||||||
TaskCounterGroupInfo.class,
|
TaskCounterGroupInfo.class, AMAttemptInfo.class, AMAttemptsInfo.class };
|
||||||
AMAttemptInfo.class, AMAttemptsInfo.class};
|
|
||||||
|
|
||||||
public JAXBContextResolver() throws Exception {
|
public JAXBContextResolver() throws Exception {
|
||||||
this.types = new HashSet<Class>(Arrays.asList(cTypes));
|
this.types = new HashSet<Class>(Arrays.asList(cTypes));
|
||||||
|
|
|
@ -26,6 +26,7 @@ import javax.xml.bind.annotation.XmlRootElement;
|
||||||
import javax.xml.bind.annotation.XmlTransient;
|
import javax.xml.bind.annotation.XmlTransient;
|
||||||
|
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||||
import org.apache.hadoop.yarn.util.BuilderUtils;
|
import org.apache.hadoop.yarn.util.BuilderUtils;
|
||||||
|
|
||||||
|
@ -48,21 +49,28 @@ public class AMAttemptInfo {
|
||||||
|
|
||||||
public AMAttemptInfo(AMInfo amInfo, String jobId, String user, String host,
|
public AMAttemptInfo(AMInfo amInfo, String jobId, String user, String host,
|
||||||
String pathPrefix) {
|
String pathPrefix) {
|
||||||
this.nodeHttpAddress = amInfo.getNodeManagerHost() + ":"
|
this.nodeHttpAddress = "";
|
||||||
+ amInfo.getNodeManagerHttpPort();
|
this.nodeId = "";
|
||||||
NodeId nodeId = BuilderUtils.newNodeId(amInfo.getNodeManagerHost(),
|
String nmHost = amInfo.getNodeManagerHost();
|
||||||
amInfo.getNodeManagerPort());
|
int nmPort = amInfo.getNodeManagerHttpPort();
|
||||||
this.nodeId = nodeId.toString();
|
if (nmHost != null) {
|
||||||
|
this.nodeHttpAddress = nmHost + ":" + nmPort;
|
||||||
|
NodeId nodeId = BuilderUtils.newNodeId(nmHost, nmPort);
|
||||||
|
this.nodeId = nodeId.toString();
|
||||||
|
}
|
||||||
this.id = amInfo.getAppAttemptId().getAttemptId();
|
this.id = amInfo.getAppAttemptId().getAttemptId();
|
||||||
this.startTime = amInfo.getStartTime();
|
this.startTime = amInfo.getStartTime();
|
||||||
this.containerId = amInfo.getContainerId().toString();
|
this.containerId = "";
|
||||||
this.logsLink = join(
|
this.logsLink = "";
|
||||||
host,
|
this.shortLogsLink = "";
|
||||||
pathPrefix,
|
ContainerId containerId = amInfo.getContainerId();
|
||||||
ujoin("logs", nodeId.toString(), amInfo.getContainerId().toString(),
|
if (containerId != null) {
|
||||||
jobId, user));
|
this.containerId = containerId.toString();
|
||||||
this.shortLogsLink = ujoin("logs", nodeId.toString(), amInfo
|
this.logsLink = join(host, pathPrefix,
|
||||||
.getContainerId().toString(), jobId, user);
|
ujoin("logs", this.nodeId, this.containerId, jobId, user));
|
||||||
|
this.shortLogsLink = ujoin("logs", this.nodeId, this.containerId,
|
||||||
|
jobId, user);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getNodeHttpAddress() {
|
public String getNodeHttpAddress() {
|
||||||
|
|
|
@ -92,6 +92,7 @@ public class JobInfo {
|
||||||
this.user = job.getUserName();
|
this.user = job.getUserName();
|
||||||
this.state = job.getState().toString();
|
this.state = job.getState().toString();
|
||||||
this.uberized = job.isUber();
|
this.uberized = job.isUber();
|
||||||
|
this.diagnostics = "";
|
||||||
List<String> diagnostics = job.getDiagnostics();
|
List<String> diagnostics = job.getDiagnostics();
|
||||||
if (diagnostics != null && !diagnostics.isEmpty()) {
|
if (diagnostics != null && !diagnostics.isEmpty()) {
|
||||||
StringBuffer b = new StringBuffer();
|
StringBuffer b = new StringBuffer();
|
||||||
|
|
|
@ -0,0 +1,360 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.hs.webapp;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
|
||||||
|
import org.apache.hadoop.util.VersionInfo;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
|
import org.w3c.dom.NodeList;
|
||||||
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
|
import com.google.inject.Guice;
|
||||||
|
import com.google.inject.Injector;
|
||||||
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
import com.google.inject.servlet.ServletModule;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse.Status;
|
||||||
|
import com.sun.jersey.api.client.UniformInterfaceException;
|
||||||
|
import com.sun.jersey.api.client.WebResource;
|
||||||
|
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||||
|
import com.sun.jersey.test.framework.JerseyTest;
|
||||||
|
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the History Server info web services api's. Also test non-existent urls.
|
||||||
|
*
|
||||||
|
* /ws/v1/history
|
||||||
|
* /ws/v1/history/info
|
||||||
|
*/
|
||||||
|
public class TestHsWebServices extends JerseyTest {
|
||||||
|
|
||||||
|
private static Configuration conf = new Configuration();
|
||||||
|
private static TestAppContext appContext;
|
||||||
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
|
static class TestAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||||
|
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
||||||
|
}
|
||||||
|
|
||||||
|
TestAppContext() {
|
||||||
|
this(0, 1, 1, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
|
@Override
|
||||||
|
protected void configureServlets() {
|
||||||
|
|
||||||
|
appContext = new TestAppContext();
|
||||||
|
JobHistory jobHistoryService = new JobHistory();
|
||||||
|
HistoryContext historyContext = (HistoryContext) jobHistoryService;
|
||||||
|
webApp = new HsWebApp(historyContext);
|
||||||
|
|
||||||
|
bind(JAXBContextResolver.class);
|
||||||
|
bind(HsWebServices.class);
|
||||||
|
bind(GenericExceptionHandler.class);
|
||||||
|
bind(WebApp.class).toInstance(webApp);
|
||||||
|
bind(AppContext.class).toInstance(appContext);
|
||||||
|
bind(Configuration.class).toInstance(conf);
|
||||||
|
|
||||||
|
serve("/*").with(GuiceContainer.class);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Injector getInjector() {
|
||||||
|
return injector;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestHsWebServices() {
|
||||||
|
super(new WebAppDescriptor.Builder(
|
||||||
|
"org.apache.hadoop.mapreduce.v2.hs.webapp")
|
||||||
|
.contextListenerClass(GuiceServletConfig.class)
|
||||||
|
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||||
|
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHS() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHSSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHSDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history/")
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHSXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
verifyHSInfoXML(xml, appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInfo() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("info").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInfoSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("info/").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInfoDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("info/").get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInfoXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("info/").accept(MediaType.APPLICATION_XML)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
verifyHSInfoXML(xml, appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInvalidUri() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
String responseStr = "";
|
||||||
|
try {
|
||||||
|
responseStr = r.path("ws").path("v1").path("history").path("bogus")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(String.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
WebServicesTestUtils.checkStringMatch(
|
||||||
|
"error string exists and shouldn't", "", responseStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInvalidUri2() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
String responseStr = "";
|
||||||
|
try {
|
||||||
|
responseStr = r.path("ws").path("v1").path("invalid")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(String.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
WebServicesTestUtils.checkStringMatch(
|
||||||
|
"error string exists and shouldn't", "", responseStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testInvalidAccept() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
String responseStr = "";
|
||||||
|
try {
|
||||||
|
responseStr = r.path("ws").path("v1").path("history")
|
||||||
|
.accept(MediaType.TEXT_PLAIN).get(String.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.INTERNAL_SERVER_ERROR,
|
||||||
|
response.getClientResponseStatus());
|
||||||
|
WebServicesTestUtils.checkStringMatch(
|
||||||
|
"error string exists and shouldn't", "", responseStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsInfoGeneric(String hadoopVersionBuiltOn,
|
||||||
|
String hadoopBuildVersion, String hadoopVersion) {
|
||||||
|
WebServicesTestUtils.checkStringMatch("hadoopVersionBuiltOn",
|
||||||
|
VersionInfo.getDate(), hadoopVersionBuiltOn);
|
||||||
|
WebServicesTestUtils.checkStringMatch("hadoopBuildVersion",
|
||||||
|
VersionInfo.getBuildVersion(), hadoopBuildVersion);
|
||||||
|
WebServicesTestUtils.checkStringMatch("hadoopVersion",
|
||||||
|
VersionInfo.getVersion(), hadoopVersion);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHSInfo(JSONObject info, TestAppContext ctx)
|
||||||
|
throws JSONException {
|
||||||
|
assertEquals("incorrect number of elements", 3, info.length());
|
||||||
|
|
||||||
|
verifyHsInfoGeneric(info.getString("hadoopVersionBuiltOn"),
|
||||||
|
info.getString("hadoopBuildVersion"), info.getString("hadoopVersion"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHSInfoXML(String xml, TestAppContext ctx)
|
||||||
|
throws JSONException, Exception {
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList nodes = dom.getElementsByTagName("historyInfo");
|
||||||
|
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
verifyHsInfoGeneric(
|
||||||
|
WebServicesTestUtils.getXmlString(element, "hadoopVersionBuiltOn"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "hadoopBuildVersion"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "hadoopVersion"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,745 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.hs.webapp;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||||
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONArray;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
|
import org.w3c.dom.NodeList;
|
||||||
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
|
import com.google.inject.Guice;
|
||||||
|
import com.google.inject.Injector;
|
||||||
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
import com.google.inject.servlet.ServletModule;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse.Status;
|
||||||
|
import com.sun.jersey.api.client.UniformInterfaceException;
|
||||||
|
import com.sun.jersey.api.client.WebResource;
|
||||||
|
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||||
|
import com.sun.jersey.test.framework.JerseyTest;
|
||||||
|
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the history server Rest API for getting task attempts, a
|
||||||
|
* specific task attempt, and task attempt counters
|
||||||
|
*
|
||||||
|
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts
|
||||||
|
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}
|
||||||
|
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/
|
||||||
|
* counters
|
||||||
|
*/
|
||||||
|
public class TestHsWebServicesAttempts extends JerseyTest {
|
||||||
|
|
||||||
|
private static Configuration conf = new Configuration();
|
||||||
|
private static TestAppContext appContext;
|
||||||
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
|
static class TestAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||||
|
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
||||||
|
}
|
||||||
|
|
||||||
|
TestAppContext() {
|
||||||
|
this(0, 1, 2, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
|
@Override
|
||||||
|
protected void configureServlets() {
|
||||||
|
|
||||||
|
appContext = new TestAppContext();
|
||||||
|
webApp = mock(HsWebApp.class);
|
||||||
|
when(webApp.name()).thenReturn("hsmockwebapp");
|
||||||
|
|
||||||
|
bind(JAXBContextResolver.class);
|
||||||
|
bind(HsWebServices.class);
|
||||||
|
bind(GenericExceptionHandler.class);
|
||||||
|
bind(WebApp.class).toInstance(webApp);
|
||||||
|
bind(AppContext.class).toInstance(appContext);
|
||||||
|
bind(Configuration.class).toInstance(conf);
|
||||||
|
|
||||||
|
serve("/*").with(GuiceContainer.class);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Injector getInjector() {
|
||||||
|
return injector;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestHsWebServicesAttempts() {
|
||||||
|
super(new WebAppDescriptor.Builder(
|
||||||
|
"org.apache.hadoop.mapreduce.v2.hs.webapp")
|
||||||
|
.contextListenerClass(GuiceServletConfig.class)
|
||||||
|
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||||
|
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttempts() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("attempts").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
verifyHsTaskAttempts(json, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptsSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("attempts/").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
verifyHsTaskAttempts(json, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptsDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("attempts").get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
verifyHsTaskAttempts(json, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptsXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("attempts").accept(MediaType.APPLICATION_XML)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList attempts = dom.getElementsByTagName("taskAttempts");
|
||||||
|
assertEquals("incorrect number of elements", 1, attempts.getLength());
|
||||||
|
|
||||||
|
NodeList nodes = dom.getElementsByTagName("taskAttempt");
|
||||||
|
verifyHsTaskAttemptsXML(nodes, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptId() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.path(tid).path("attempts").path(attid)
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("taskAttempt");
|
||||||
|
verifyHsTaskAttempt(info, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.path(tid).path("attempts").path(attid + "/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("taskAttempt");
|
||||||
|
verifyHsTaskAttempt(info, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.path(tid).path("attempts").path(attid).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("taskAttempt");
|
||||||
|
verifyHsTaskAttempt(info, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.path(tid).path("attempts").path(attid)
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList nodes = dom.getElementsByTagName("taskAttempt");
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
verifyHsTaskAttemptXML(element, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdBogus() throws JSONException, Exception {
|
||||||
|
|
||||||
|
testTaskAttemptIdErrorGeneric("bogusid",
|
||||||
|
"java.lang.Exception: Error parsing attempt ID: bogusid");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdNonExist() throws JSONException, Exception {
|
||||||
|
|
||||||
|
testTaskAttemptIdErrorGeneric(
|
||||||
|
"attempt_12345_0_0_r_1_0",
|
||||||
|
"java.lang.Exception: Error getting info on task attempt id attempt_12345_0_0_r_1_0");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdInvalid() throws JSONException, Exception {
|
||||||
|
|
||||||
|
testTaskAttemptIdErrorGeneric("attempt_12345_0_0_d_1_0",
|
||||||
|
"java.lang.Exception: Unknown task symbol: d");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdInvalid2() throws JSONException, Exception {
|
||||||
|
|
||||||
|
testTaskAttemptIdErrorGeneric("attempt_12345_0_r_1_0",
|
||||||
|
"java.lang.Exception: For input string: \"r\"");
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdInvalid3() throws JSONException, Exception {
|
||||||
|
|
||||||
|
testTaskAttemptIdErrorGeneric("attempt_12345_0_0_r_1",
|
||||||
|
"java.lang.Exception: Error parsing attempt ID: attempt_12345_0_0_r_1");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void testTaskAttemptIdErrorGeneric(String attid, String error)
|
||||||
|
throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("history").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("attempts").path(attid).accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message", error,
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsTaskAttemptXML(Element element, TaskAttempt att,
|
||||||
|
TaskType ttype) {
|
||||||
|
verifyTaskAttemptGeneric(att, ttype,
|
||||||
|
WebServicesTestUtils.getXmlString(element, "id"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "state"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "type"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "rack"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "nodeHttpAddress"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "diagnostics"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "assignedContainerId"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "startTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "finishTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
|
||||||
|
WebServicesTestUtils.getXmlFloat(element, "progress"));
|
||||||
|
|
||||||
|
if (ttype == TaskType.REDUCE) {
|
||||||
|
verifyReduceTaskAttemptGeneric(att,
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "shuffleFinishTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "mergeFinishTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedShuffleTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedMergeTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedReduceTime"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsTaskAttempt(JSONObject info, TaskAttempt att,
|
||||||
|
TaskType ttype) throws JSONException {
|
||||||
|
if (ttype == TaskType.REDUCE) {
|
||||||
|
assertEquals("incorrect number of elements", 16, info.length());
|
||||||
|
} else {
|
||||||
|
assertEquals("incorrect number of elements", 11, info.length());
|
||||||
|
}
|
||||||
|
|
||||||
|
verifyTaskAttemptGeneric(att, ttype, info.getString("id"),
|
||||||
|
info.getString("state"), info.getString("type"),
|
||||||
|
info.getString("rack"), info.getString("nodeHttpAddress"),
|
||||||
|
info.getString("diagnostics"), info.getString("assignedContainerId"),
|
||||||
|
info.getLong("startTime"), info.getLong("finishTime"),
|
||||||
|
info.getLong("elapsedTime"), (float) info.getDouble("progress"));
|
||||||
|
|
||||||
|
if (ttype == TaskType.REDUCE) {
|
||||||
|
verifyReduceTaskAttemptGeneric(att, info.getLong("shuffleFinishTime"),
|
||||||
|
info.getLong("mergeFinishTime"), info.getLong("elapsedShuffleTime"),
|
||||||
|
info.getLong("elapsedMergeTime"), info.getLong("elapsedReduceTime"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsTaskAttempts(JSONObject json, Task task)
|
||||||
|
throws JSONException {
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject attempts = json.getJSONObject("taskAttempts");
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONArray arr = attempts.getJSONArray("taskAttempt");
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId id = att.getID();
|
||||||
|
String attid = MRApps.toString(id);
|
||||||
|
Boolean found = false;
|
||||||
|
|
||||||
|
for (int i = 0; i < arr.length(); i++) {
|
||||||
|
JSONObject info = arr.getJSONObject(i);
|
||||||
|
if (attid.matches(info.getString("id"))) {
|
||||||
|
found = true;
|
||||||
|
verifyHsTaskAttempt(info, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertTrue("task attempt with id: " + attid
|
||||||
|
+ " not in web service output", found);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsTaskAttemptsXML(NodeList nodes, Task task) {
|
||||||
|
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||||
|
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId id = att.getID();
|
||||||
|
String attid = MRApps.toString(id);
|
||||||
|
Boolean found = false;
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
|
||||||
|
if (attid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
|
||||||
|
found = true;
|
||||||
|
verifyHsTaskAttemptXML(element, att, task.getType());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertTrue("task with id: " + attid + " not in web service output", found);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype,
|
||||||
|
String id, String state, String type, String rack,
|
||||||
|
String nodeHttpAddress, String diagnostics, String assignedContainerId,
|
||||||
|
long startTime, long finishTime, long elapsedTime, float progress) {
|
||||||
|
|
||||||
|
TaskAttemptId attid = ta.getID();
|
||||||
|
String attemptId = MRApps.toString(attid);
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", attemptId, id);
|
||||||
|
WebServicesTestUtils.checkStringMatch("type", ttype.toString(), type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("state", ta.getState().toString(),
|
||||||
|
state);
|
||||||
|
WebServicesTestUtils.checkStringMatch("rack", ta.getNodeRackName(), rack);
|
||||||
|
WebServicesTestUtils.checkStringMatch("nodeHttpAddress",
|
||||||
|
ta.getNodeHttpAddress(), nodeHttpAddress);
|
||||||
|
|
||||||
|
String expectDiag = "";
|
||||||
|
List<String> diagnosticsList = ta.getDiagnostics();
|
||||||
|
if (diagnosticsList != null && !diagnostics.isEmpty()) {
|
||||||
|
StringBuffer b = new StringBuffer();
|
||||||
|
for (String diag : diagnosticsList) {
|
||||||
|
b.append(diag);
|
||||||
|
}
|
||||||
|
expectDiag = b.toString();
|
||||||
|
}
|
||||||
|
WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag,
|
||||||
|
diagnostics);
|
||||||
|
WebServicesTestUtils.checkStringMatch("assignedContainerId",
|
||||||
|
ConverterUtils.toString(ta.getAssignedContainerID()),
|
||||||
|
assignedContainerId);
|
||||||
|
|
||||||
|
assertEquals("startTime wrong", ta.getLaunchTime(), startTime);
|
||||||
|
assertEquals("finishTime wrong", ta.getFinishTime(), finishTime);
|
||||||
|
assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime);
|
||||||
|
assertEquals("progress wrong", ta.getProgress() * 100, progress, 1e-3f);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyReduceTaskAttemptGeneric(TaskAttempt ta,
|
||||||
|
long shuffleFinishTime, long mergeFinishTime, long elapsedShuffleTime,
|
||||||
|
long elapsedMergeTime, long elapsedReduceTime) {
|
||||||
|
|
||||||
|
assertEquals("shuffleFinishTime wrong", ta.getShuffleFinishTime(),
|
||||||
|
shuffleFinishTime);
|
||||||
|
assertEquals("mergeFinishTime wrong", ta.getSortFinishTime(),
|
||||||
|
mergeFinishTime);
|
||||||
|
assertEquals("elapsedShuffleTime wrong",
|
||||||
|
ta.getLaunchTime() - ta.getShuffleFinishTime(), elapsedShuffleTime);
|
||||||
|
assertEquals("elapsedMergeTime wrong",
|
||||||
|
ta.getShuffleFinishTime() - ta.getSortFinishTime(), elapsedMergeTime);
|
||||||
|
assertEquals("elapsedReduceTime wrong",
|
||||||
|
ta.getSortFinishTime() - ta.getFinishTime(), elapsedReduceTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdCounters() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.path(tid).path("attempts").path(attid).path("counters")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("JobTaskAttemptCounters");
|
||||||
|
verifyHsJobTaskAttemptCounters(info, att);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskAttemptIdXMLCounters() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
for (TaskAttempt att : task.getAttempts().values()) {
|
||||||
|
TaskAttemptId attemptid = att.getID();
|
||||||
|
String attid = MRApps.toString(attemptid);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.path(tid).path("attempts").path(attid).path("counters")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList nodes = dom.getElementsByTagName("JobTaskAttemptCounters");
|
||||||
|
|
||||||
|
verifyHsTaskCountersXML(nodes, att);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsJobTaskAttemptCounters(JSONObject info, TaskAttempt att)
|
||||||
|
throws JSONException {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 2, info.length());
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(att.getID()),
|
||||||
|
info.getString("id"));
|
||||||
|
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
JSONArray counterGroups = info.getJSONArray("taskAttemptCounterGroup");
|
||||||
|
for (int i = 0; i < counterGroups.length(); i++) {
|
||||||
|
JSONObject counterGroup = counterGroups.getJSONObject(i);
|
||||||
|
String name = counterGroup.getString("counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
JSONArray counters = counterGroup.getJSONArray("counter");
|
||||||
|
for (int j = 0; j < counters.length(); j++) {
|
||||||
|
JSONObject counter = counters.getJSONObject(i);
|
||||||
|
String counterName = counter.getString("name");
|
||||||
|
assertTrue("name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
long value = counter.getLong("value");
|
||||||
|
assertTrue("value >= 0", value >= 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsTaskCountersXML(NodeList nodes, TaskAttempt att) {
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(att.getID()),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "id"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
NodeList groups = element.getElementsByTagName("taskAttemptCounterGroup");
|
||||||
|
|
||||||
|
for (int j = 0; j < groups.getLength(); j++) {
|
||||||
|
Element counters = (Element) groups.item(j);
|
||||||
|
assertNotNull("should have counters in the web service info", counters);
|
||||||
|
String name = WebServicesTestUtils.getXmlString(counters,
|
||||||
|
"counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
NodeList counterArr = counters.getElementsByTagName("counter");
|
||||||
|
for (int z = 0; z < counterArr.getLength(); z++) {
|
||||||
|
Element counter = (Element) counterArr.item(z);
|
||||||
|
String counterName = WebServicesTestUtils.getXmlString(counter,
|
||||||
|
"name");
|
||||||
|
assertTrue("counter name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
|
||||||
|
long value = WebServicesTestUtils.getXmlLong(counter, "value");
|
||||||
|
assertTrue("value not >= 0", value >= 0);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,345 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.hs.webapp;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.OutputStream;
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONArray;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
|
import org.w3c.dom.NodeList;
|
||||||
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
|
import com.google.common.collect.Maps;
|
||||||
|
import com.google.inject.Guice;
|
||||||
|
import com.google.inject.Injector;
|
||||||
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
import com.google.inject.servlet.ServletModule;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse;
|
||||||
|
import com.sun.jersey.api.client.WebResource;
|
||||||
|
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||||
|
import com.sun.jersey.test.framework.JerseyTest;
|
||||||
|
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the history server Rest API for getting the job conf. This
|
||||||
|
* requires created a temporary configuration file.
|
||||||
|
*
|
||||||
|
* /ws/v1/history/mapreduce/jobs/{jobid}/conf
|
||||||
|
*/
|
||||||
|
public class TestHsWebServicesJobConf extends JerseyTest {
|
||||||
|
|
||||||
|
private static Configuration conf = new Configuration();
|
||||||
|
private static TestAppContext appContext;
|
||||||
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
|
private static File testConfDir = new File("target",
|
||||||
|
TestHsWebServicesJobConf.class.getSimpleName() + "confDir");
|
||||||
|
|
||||||
|
static class TestAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
TestAppContext(int appid, int numTasks, int numAttempts, Path confPath) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||||
|
Map<JobId, Job> map = Maps.newHashMap();
|
||||||
|
Job job = MockJobs.newJob(appID, 0, numTasks, numAttempts, confPath);
|
||||||
|
map.put(job.getID(), job);
|
||||||
|
jobs = map;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
|
@Override
|
||||||
|
protected void configureServlets() {
|
||||||
|
|
||||||
|
Path confPath = new Path(testConfDir.toString(),
|
||||||
|
MRJobConfig.JOB_CONF_FILE);
|
||||||
|
Configuration config = new Configuration();
|
||||||
|
|
||||||
|
FileSystem localFs;
|
||||||
|
try {
|
||||||
|
localFs = FileSystem.getLocal(config);
|
||||||
|
confPath = localFs.makeQualified(confPath);
|
||||||
|
|
||||||
|
OutputStream out = localFs.create(confPath);
|
||||||
|
try {
|
||||||
|
conf.writeXml(out);
|
||||||
|
} finally {
|
||||||
|
out.close();
|
||||||
|
}
|
||||||
|
if (!localFs.exists(confPath)) {
|
||||||
|
fail("error creating config file: " + confPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (IOException e) {
|
||||||
|
fail("error creating config file: " + e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
appContext = new TestAppContext(0, 2, 1, confPath);
|
||||||
|
|
||||||
|
webApp = mock(HsWebApp.class);
|
||||||
|
when(webApp.name()).thenReturn("hsmockwebapp");
|
||||||
|
|
||||||
|
bind(JAXBContextResolver.class);
|
||||||
|
bind(HsWebServices.class);
|
||||||
|
bind(GenericExceptionHandler.class);
|
||||||
|
bind(WebApp.class).toInstance(webApp);
|
||||||
|
bind(AppContext.class).toInstance(appContext);
|
||||||
|
bind(Configuration.class).toInstance(conf);
|
||||||
|
|
||||||
|
serve("/*").with(GuiceContainer.class);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Injector getInjector() {
|
||||||
|
return injector;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
testConfDir.mkdir();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
static public void stop() {
|
||||||
|
FileUtil.fullyDelete(testConfDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestHsWebServicesJobConf() {
|
||||||
|
super(new WebAppDescriptor.Builder(
|
||||||
|
"org.apache.hadoop.mapreduce.v2.hs.webapp")
|
||||||
|
.contextListenerClass(GuiceServletConfig.class)
|
||||||
|
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||||
|
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobConf() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("conf")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("conf");
|
||||||
|
verifyHsJobConf(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobConfSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("conf/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("conf");
|
||||||
|
verifyHsJobConf(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobConfDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("conf").get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("conf");
|
||||||
|
verifyHsJobConf(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobConfXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce")
|
||||||
|
.path("jobs").path(jobId).path("conf")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList info = dom.getElementsByTagName("conf");
|
||||||
|
verifyHsJobConfXML(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsJobConf(JSONObject info, Job job) throws JSONException {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 2, info.length());
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("path", job.getConfFile().toString(),
|
||||||
|
info.getString("path"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
JSONArray properties = info.getJSONArray("property");
|
||||||
|
for (int i = 0; i < properties.length(); i++) {
|
||||||
|
JSONObject prop = properties.getJSONObject(i);
|
||||||
|
String name = prop.getString("name");
|
||||||
|
String value = prop.getString("value");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
assertTrue("value not set", (value != null && !value.isEmpty()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsJobConfXML(NodeList nodes, Job job) {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
WebServicesTestUtils.checkStringMatch("path", job.getConfFile()
|
||||||
|
.toString(), WebServicesTestUtils.getXmlString(element, "path"));
|
||||||
|
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
NodeList properties = element.getElementsByTagName("property");
|
||||||
|
|
||||||
|
for (int j = 0; j < properties.getLength(); j++) {
|
||||||
|
Element property = (Element) properties.item(j);
|
||||||
|
assertNotNull("should have counters in the web service info", property);
|
||||||
|
String name = WebServicesTestUtils.getXmlString(property, "name");
|
||||||
|
String value = WebServicesTestUtils.getXmlString(property, "value");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
assertTrue("name not set", (value != null && !value.isEmpty()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,755 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.hs.webapp;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
||||||
|
import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
import org.apache.hadoop.yarn.util.BuilderUtils;
|
||||||
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONArray;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
|
import org.w3c.dom.NodeList;
|
||||||
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
|
import com.google.inject.Guice;
|
||||||
|
import com.google.inject.Injector;
|
||||||
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
import com.google.inject.servlet.ServletModule;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse.Status;
|
||||||
|
import com.sun.jersey.api.client.UniformInterfaceException;
|
||||||
|
import com.sun.jersey.api.client.WebResource;
|
||||||
|
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||||
|
import com.sun.jersey.test.framework.JerseyTest;
|
||||||
|
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the history server Rest API for getting jobs, a specific job, job
|
||||||
|
* counters, and job attempts.
|
||||||
|
*
|
||||||
|
* /ws/v1/history/mapreduce/jobs /ws/v1/history/mapreduce/jobs/{jobid}
|
||||||
|
* /ws/v1/history/mapreduce/jobs/{jobid}/counters
|
||||||
|
* /ws/v1/history/mapreduce/jobs/{jobid}/attempts
|
||||||
|
*/
|
||||||
|
public class TestHsWebServicesJobs extends JerseyTest {
|
||||||
|
|
||||||
|
private static Configuration conf = new Configuration();
|
||||||
|
private static TestAppContext appContext;
|
||||||
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
|
static class TestAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||||
|
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
||||||
|
}
|
||||||
|
|
||||||
|
TestAppContext() {
|
||||||
|
this(0, 1, 2, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
|
@Override
|
||||||
|
protected void configureServlets() {
|
||||||
|
|
||||||
|
appContext = new TestAppContext();
|
||||||
|
webApp = mock(HsWebApp.class);
|
||||||
|
when(webApp.name()).thenReturn("hsmockwebapp");
|
||||||
|
|
||||||
|
bind(JAXBContextResolver.class);
|
||||||
|
bind(HsWebServices.class);
|
||||||
|
bind(GenericExceptionHandler.class);
|
||||||
|
bind(WebApp.class).toInstance(webApp);
|
||||||
|
bind(AppContext.class).toInstance(appContext);
|
||||||
|
bind(Configuration.class).toInstance(conf);
|
||||||
|
|
||||||
|
serve("/*").with(GuiceContainer.class);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Injector getInjector() {
|
||||||
|
return injector;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestHsWebServicesJobs() {
|
||||||
|
super(new WebAppDescriptor.Builder(
|
||||||
|
"org.apache.hadoop.mapreduce.v2.hs.webapp")
|
||||||
|
.contextListenerClass(GuiceServletConfig.class)
|
||||||
|
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||||
|
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobs() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
assertEquals("incorrect number of elements", 1, arr.length());
|
||||||
|
JSONObject info = arr.getJSONObject(0);
|
||||||
|
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
|
||||||
|
VerifyJobsUtils.verifyHsJob(info, job);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs/").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
assertEquals("incorrect number of elements", 1, arr.length());
|
||||||
|
JSONObject info = arr.getJSONObject(0);
|
||||||
|
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
|
||||||
|
VerifyJobsUtils.verifyHsJob(info, job);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
assertEquals("incorrect number of elements", 1, arr.length());
|
||||||
|
JSONObject info = arr.getJSONObject(0);
|
||||||
|
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
|
||||||
|
VerifyJobsUtils.verifyHsJob(info, job);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsXML() throws Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").accept(MediaType.APPLICATION_XML)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList jobs = dom.getElementsByTagName("jobs");
|
||||||
|
assertEquals("incorrect number of elements", 1, jobs.getLength());
|
||||||
|
NodeList job = dom.getElementsByTagName("job");
|
||||||
|
assertEquals("incorrect number of elements", 1, job.getLength());
|
||||||
|
verifyHsJobXML(job, appContext);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsJobXML(NodeList nodes, TestAppContext appContext) {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 1, nodes.getLength());
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
|
||||||
|
Job job = appContext.getJob(MRApps.toJobID(WebServicesTestUtils
|
||||||
|
.getXmlString(element, "id")));
|
||||||
|
assertNotNull("Job not found - output incorrect", job);
|
||||||
|
|
||||||
|
VerifyJobsUtils.verifyHsJobGeneric(job,
|
||||||
|
WebServicesTestUtils.getXmlString(element, "id"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "user"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "name"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "state"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "queue"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "startTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "finishTime"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "mapsTotal"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "mapsCompleted"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "reducesTotal"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "reducesCompleted"));
|
||||||
|
|
||||||
|
// restricted access fields - if security and acls set
|
||||||
|
VerifyJobsUtils.verifyHsJobGenericSecure(job,
|
||||||
|
WebServicesTestUtils.getXmlBoolean(element, "uberized"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "diagnostics"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "avgMapTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "avgReduceTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "avgShuffleTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "avgMergeTime"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "failedReduceAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "killedReduceAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "successfulReduceAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "failedMapAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "killedMapAttempts"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "successfulMapAttempts"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobId() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId)
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("job");
|
||||||
|
VerifyJobsUtils.verifyHsJob(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId + "/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("job");
|
||||||
|
VerifyJobsUtils.verifyHsJob(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("job");
|
||||||
|
VerifyJobsUtils.verifyHsJob(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdNonExist() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
|
||||||
|
.path("job_1234_1_2").get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: job, job_1234_1_2, is not found", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdInvalid() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
|
||||||
|
.path("job_foo").get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"For input string: \"foo\"", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NumberFormatException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"java.lang.NumberFormatException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdInvalidBogus() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
|
||||||
|
.path("bogusfoo").get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: Error parsing job ID: bogusfoo", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobIdXML() throws Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId)
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList job = dom.getElementsByTagName("job");
|
||||||
|
verifyHsJobXML(job, appContext);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobCounters() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("counters")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobCounters");
|
||||||
|
verifyHsJobCounters(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobCountersSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("counters/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobCounters");
|
||||||
|
verifyHsJobCounters(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobCountersDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("counters/")
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobCounters");
|
||||||
|
verifyHsJobCounters(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobCountersXML() throws Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("counters")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList info = dom.getElementsByTagName("jobCounters");
|
||||||
|
verifyHsJobCountersXML(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsJobCounters(JSONObject info, Job job)
|
||||||
|
throws JSONException {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 2, info.length());
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
|
||||||
|
info.getString("id"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
JSONArray counterGroups = info.getJSONArray("counterGroup");
|
||||||
|
for (int i = 0; i < counterGroups.length(); i++) {
|
||||||
|
JSONObject counterGroup = counterGroups.getJSONObject(i);
|
||||||
|
String name = counterGroup.getString("counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
JSONArray counters = counterGroup.getJSONArray("counter");
|
||||||
|
for (int j = 0; j < counters.length(); j++) {
|
||||||
|
JSONObject counter = counters.getJSONObject(i);
|
||||||
|
String counterName = counter.getString("name");
|
||||||
|
assertTrue("counter name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
|
||||||
|
long mapValue = counter.getLong("mapCounterValue");
|
||||||
|
assertTrue("mapCounterValue >= 0", mapValue >= 0);
|
||||||
|
|
||||||
|
long reduceValue = counter.getLong("reduceCounterValue");
|
||||||
|
assertTrue("reduceCounterValue >= 0", reduceValue >= 0);
|
||||||
|
|
||||||
|
long totalValue = counter.getLong("totalCounterValue");
|
||||||
|
assertTrue("totalCounterValue >= 0", totalValue >= 0);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsJobCountersXML(NodeList nodes, Job job) {
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
|
||||||
|
assertNotNull("Job not found - output incorrect", job);
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "id"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
NodeList groups = element.getElementsByTagName("counterGroup");
|
||||||
|
|
||||||
|
for (int j = 0; j < groups.getLength(); j++) {
|
||||||
|
Element counters = (Element) groups.item(j);
|
||||||
|
assertNotNull("should have counters in the web service info", counters);
|
||||||
|
String name = WebServicesTestUtils.getXmlString(counters,
|
||||||
|
"counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
NodeList counterArr = counters.getElementsByTagName("counter");
|
||||||
|
for (int z = 0; z < counterArr.getLength(); z++) {
|
||||||
|
Element counter = (Element) counterArr.item(z);
|
||||||
|
String counterName = WebServicesTestUtils.getXmlString(counter,
|
||||||
|
"name");
|
||||||
|
assertTrue("counter name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
|
||||||
|
long mapValue = WebServicesTestUtils.getXmlLong(counter,
|
||||||
|
"mapCounterValue");
|
||||||
|
assertTrue("mapCounterValue not >= 0", mapValue >= 0);
|
||||||
|
|
||||||
|
long reduceValue = WebServicesTestUtils.getXmlLong(counter,
|
||||||
|
"reduceCounterValue");
|
||||||
|
assertTrue("reduceCounterValue >= 0", reduceValue >= 0);
|
||||||
|
|
||||||
|
long totalValue = WebServicesTestUtils.getXmlLong(counter,
|
||||||
|
"totalCounterValue");
|
||||||
|
assertTrue("totalCounterValue >= 0", totalValue >= 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobAttempts() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("attempts")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("attempts");
|
||||||
|
verifyHsJobAttempts(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobAttemptsSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("attempts/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("attempts");
|
||||||
|
verifyHsJobAttempts(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobAttemptsDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("attempts")
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("attempts");
|
||||||
|
verifyHsJobAttempts(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobAttemptsXML() throws Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("attempts")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList attempts = dom.getElementsByTagName("attempts");
|
||||||
|
assertEquals("incorrect number of elements", 1, attempts.getLength());
|
||||||
|
NodeList info = dom.getElementsByTagName("attempt");
|
||||||
|
verifyHsJobAttemptsXML(info, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsJobAttempts(JSONObject info, Job job)
|
||||||
|
throws JSONException {
|
||||||
|
|
||||||
|
JSONArray attempts = info.getJSONArray("attempt");
|
||||||
|
assertEquals("incorrect number of elements", 2, attempts.length());
|
||||||
|
for (int i = 0; i < attempts.length(); i++) {
|
||||||
|
JSONObject attempt = attempts.getJSONObject(i);
|
||||||
|
verifyHsJobAttemptsGeneric(job, attempt.getString("nodeHttpAddress"),
|
||||||
|
attempt.getString("nodeId"), attempt.getInt("id"),
|
||||||
|
attempt.getLong("startTime"), attempt.getString("containerId"),
|
||||||
|
attempt.getString("logsLink"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsJobAttemptsXML(NodeList nodes, Job job) {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 2, nodes.getLength());
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
verifyHsJobAttemptsGeneric(job,
|
||||||
|
WebServicesTestUtils.getXmlString(element, "nodeHttpAddress"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "nodeId"),
|
||||||
|
WebServicesTestUtils.getXmlInt(element, "id"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "startTime"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "containerId"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "logsLink"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsJobAttemptsGeneric(Job job, String nodeHttpAddress,
|
||||||
|
String nodeId, int id, long startTime, String containerId, String logsLink) {
|
||||||
|
boolean attemptFound = false;
|
||||||
|
for (AMInfo amInfo : job.getAMInfos()) {
|
||||||
|
if (amInfo.getAppAttemptId().getAttemptId() == id) {
|
||||||
|
attemptFound = true;
|
||||||
|
String nmHost = amInfo.getNodeManagerHost();
|
||||||
|
int nmPort = amInfo.getNodeManagerHttpPort();
|
||||||
|
WebServicesTestUtils.checkStringMatch("nodeHttpAddress", nmHost + ":"
|
||||||
|
+ nmPort, nodeHttpAddress);
|
||||||
|
WebServicesTestUtils.checkStringMatch("nodeId",
|
||||||
|
BuilderUtils.newNodeId(nmHost, nmPort).toString(), nodeId);
|
||||||
|
assertTrue("startime not greater than 0", startTime > 0);
|
||||||
|
WebServicesTestUtils.checkStringMatch("containerId", amInfo
|
||||||
|
.getContainerId().toString(), containerId);
|
||||||
|
|
||||||
|
String localLogsLink = join(
|
||||||
|
"hsmockwebapp",
|
||||||
|
ujoin("logs", nodeId, containerId, MRApps.toString(job.getID()),
|
||||||
|
job.getUserName()));
|
||||||
|
|
||||||
|
assertTrue("logsLink", logsLink.contains(localLogsLink));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertTrue("attempt: " + id + " was not found", attemptFound);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,656 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.hs.webapp;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONArray;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import com.google.inject.Guice;
|
||||||
|
import com.google.inject.Injector;
|
||||||
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
import com.google.inject.servlet.ServletModule;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse.Status;
|
||||||
|
import com.sun.jersey.api.client.WebResource;
|
||||||
|
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||||
|
import com.sun.jersey.test.framework.JerseyTest;
|
||||||
|
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the history server Rest API for getting jobs with various query
|
||||||
|
* parameters.
|
||||||
|
*
|
||||||
|
* /ws/v1/history/mapreduce/jobs?{query=value}
|
||||||
|
*/
|
||||||
|
public class TestHsWebServicesJobsQuery extends JerseyTest {
|
||||||
|
|
||||||
|
private static Configuration conf = new Configuration();
|
||||||
|
private static TestAppContext appContext;
|
||||||
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
|
static class TestAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||||
|
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
||||||
|
}
|
||||||
|
|
||||||
|
TestAppContext() {
|
||||||
|
this(0, 3, 2, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
|
@Override
|
||||||
|
protected void configureServlets() {
|
||||||
|
|
||||||
|
appContext = new TestAppContext();
|
||||||
|
webApp = mock(HsWebApp.class);
|
||||||
|
when(webApp.name()).thenReturn("hsmockwebapp");
|
||||||
|
|
||||||
|
bind(JAXBContextResolver.class);
|
||||||
|
bind(HsWebServices.class);
|
||||||
|
bind(GenericExceptionHandler.class);
|
||||||
|
bind(WebApp.class).toInstance(webApp);
|
||||||
|
bind(AppContext.class).toInstance(appContext);
|
||||||
|
bind(Configuration.class).toInstance(conf);
|
||||||
|
|
||||||
|
serve("/*").with(GuiceContainer.class);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Injector getInjector() {
|
||||||
|
return injector;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestHsWebServicesJobsQuery() {
|
||||||
|
super(new WebAppDescriptor.Builder(
|
||||||
|
"org.apache.hadoop.mapreduce.v2.hs.webapp")
|
||||||
|
.contextListenerClass(GuiceServletConfig.class)
|
||||||
|
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||||
|
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryUserNone() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").queryParam("user", "bogus")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryUser() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").queryParam("user", "mock")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
assertEquals("incorrect number of elements", 3, arr.length());
|
||||||
|
// just verify one of them.
|
||||||
|
JSONObject info = arr.getJSONObject(0);
|
||||||
|
Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
|
||||||
|
VerifyJobsUtils.verifyHsJob(info, job);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryLimit() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").queryParam("limit", "2")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
// make sure we get 2 back
|
||||||
|
assertEquals("incorrect number of elements", 2, arr.length());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryLimitInvalid() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").queryParam("limit", "-1")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: limit value must be greater then 0", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryQueue() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").queryParam("queue", "mockqueue")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
assertEquals("incorrect number of elements", 3, arr.length());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryQueueNonExist() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").queryParam("queue", "bogus")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryStartTimeEnd() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
// the mockJobs start time is the current time - some random amount
|
||||||
|
Long now = System.currentTimeMillis();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("startedTimeEnd", String.valueOf(now))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
assertEquals("incorrect number of elements", 3, arr.length());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryStartTimeBegin() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
// the mockJobs start time is the current time - some random amount
|
||||||
|
Long now = System.currentTimeMillis();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("startedTimeBegin", String.valueOf(now))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryStartTimeBeginEnd() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
int size = jobsMap.size();
|
||||||
|
ArrayList<Long> startTime = new ArrayList<Long>(size);
|
||||||
|
// figure out the middle start Time
|
||||||
|
for (Map.Entry<JobId, Job> entry : jobsMap.entrySet()) {
|
||||||
|
startTime.add(entry.getValue().getReport().getStartTime());
|
||||||
|
}
|
||||||
|
Collections.sort(startTime);
|
||||||
|
|
||||||
|
assertTrue("Error we must have atleast 3 jobs", size >= 3);
|
||||||
|
long midStartTime = startTime.get(size - 2);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("startedTimeBegin", String.valueOf(40000))
|
||||||
|
.queryParam("startedTimeEnd", String.valueOf(midStartTime))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
assertEquals("incorrect number of elements", size - 1, arr.length());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryStartTimeBeginEndInvalid() throws JSONException,
|
||||||
|
Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Long now = System.currentTimeMillis();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("startedTimeBegin", String.valueOf(now))
|
||||||
|
.queryParam("startedTimeEnd", String.valueOf(40000))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils
|
||||||
|
.checkStringMatch(
|
||||||
|
"exception message",
|
||||||
|
"java.lang.Exception: startedTimeEnd must be greater than startTimeBegin",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryStartTimeInvalidformat() throws JSONException,
|
||||||
|
Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").queryParam("startedTimeBegin", "efsd")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils
|
||||||
|
.checkStringMatch(
|
||||||
|
"exception message",
|
||||||
|
"java.lang.Exception: Invalid number format: For input string: \"efsd\"",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryStartTimeEndInvalidformat() throws JSONException,
|
||||||
|
Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").queryParam("startedTimeEnd", "efsd")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils
|
||||||
|
.checkStringMatch(
|
||||||
|
"exception message",
|
||||||
|
"java.lang.Exception: Invalid number format: For input string: \"efsd\"",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryStartTimeNegative() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("startedTimeBegin", String.valueOf(-1000))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils
|
||||||
|
.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: startedTimeBegin must be greater than 0",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryStartTimeEndNegative() throws JSONException,
|
||||||
|
Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("startedTimeEnd", String.valueOf(-1000))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: startedTimeEnd must be greater than 0", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryFinishTimeEndNegative() throws JSONException,
|
||||||
|
Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("finishedTimeEnd", String.valueOf(-1000))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: finishedTimeEnd must be greater than 0", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryFinishTimeBeginNegative() throws JSONException,
|
||||||
|
Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("finishedTimeBegin", String.valueOf(-1000))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: finishedTimeBegin must be greater than 0",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryFinishTimeBeginEndInvalid() throws JSONException,
|
||||||
|
Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Long now = System.currentTimeMillis();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("finishedTimeBegin", String.valueOf(now))
|
||||||
|
.queryParam("finishedTimeEnd", String.valueOf(40000))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils
|
||||||
|
.checkStringMatch(
|
||||||
|
"exception message",
|
||||||
|
"java.lang.Exception: finishedTimeEnd must be greater than finishedTimeBegin",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryFinishTimeInvalidformat() throws JSONException,
|
||||||
|
Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").queryParam("finishedTimeBegin", "efsd")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils
|
||||||
|
.checkStringMatch(
|
||||||
|
"exception message",
|
||||||
|
"java.lang.Exception: Invalid number format: For input string: \"efsd\"",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryFinishTimeEndInvalidformat() throws JSONException,
|
||||||
|
Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").queryParam("finishedTimeEnd", "efsd")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils
|
||||||
|
.checkStringMatch(
|
||||||
|
"exception message",
|
||||||
|
"java.lang.Exception: Invalid number format: For input string: \"efsd\"",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryFinishTimeBegin() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
// the mockJobs finish time is the current time + some random amount
|
||||||
|
Long now = System.currentTimeMillis();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("finishedTimeBegin", String.valueOf(now))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
assertEquals("incorrect number of elements", 3, arr.length());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryFinishTimeEnd() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
// the mockJobs finish time is the current time + some random amount
|
||||||
|
Long now = System.currentTimeMillis();
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("finishedTimeEnd", String.valueOf(now))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobsQueryFinishTimeBeginEnd() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
int size = jobsMap.size();
|
||||||
|
// figure out the mid end time - we expect atleast 3 jobs
|
||||||
|
ArrayList<Long> finishTime = new ArrayList<Long>(size);
|
||||||
|
for (Map.Entry<JobId, Job> entry : jobsMap.entrySet()) {
|
||||||
|
finishTime.add(entry.getValue().getReport().getFinishTime());
|
||||||
|
}
|
||||||
|
Collections.sort(finishTime);
|
||||||
|
|
||||||
|
assertTrue("Error we must have atleast 3 jobs", size >= 3);
|
||||||
|
long midFinishTime = finishTime.get(size - 2);
|
||||||
|
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs")
|
||||||
|
.queryParam("finishedTimeBegin", String.valueOf(40000))
|
||||||
|
.queryParam("finishedTimeEnd", String.valueOf(midFinishTime))
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject jobs = json.getJSONObject("jobs");
|
||||||
|
JSONArray arr = jobs.getJSONArray("job");
|
||||||
|
assertEquals("incorrect number of elements", size - 1, arr.length());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,835 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.hs.webapp;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
import java.io.StringReader;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MediaType;
|
||||||
|
import javax.xml.parsers.DocumentBuilder;
|
||||||
|
import javax.xml.parsers.DocumentBuilderFactory;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.AppContext;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.MockJobs;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Task;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
import org.apache.hadoop.yarn.Clock;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.event.EventHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebApp;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONArray;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.w3c.dom.Document;
|
||||||
|
import org.w3c.dom.Element;
|
||||||
|
import org.w3c.dom.NodeList;
|
||||||
|
import org.xml.sax.InputSource;
|
||||||
|
|
||||||
|
import com.google.inject.Guice;
|
||||||
|
import com.google.inject.Injector;
|
||||||
|
import com.google.inject.servlet.GuiceServletContextListener;
|
||||||
|
import com.google.inject.servlet.ServletModule;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse;
|
||||||
|
import com.sun.jersey.api.client.ClientResponse.Status;
|
||||||
|
import com.sun.jersey.api.client.UniformInterfaceException;
|
||||||
|
import com.sun.jersey.api.client.WebResource;
|
||||||
|
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
|
||||||
|
import com.sun.jersey.test.framework.JerseyTest;
|
||||||
|
import com.sun.jersey.test.framework.WebAppDescriptor;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test the history server Rest API for getting tasks, a specific task,
|
||||||
|
* and task counters.
|
||||||
|
*
|
||||||
|
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks
|
||||||
|
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}
|
||||||
|
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/counters
|
||||||
|
*/
|
||||||
|
public class TestHsWebServicesTasks extends JerseyTest {
|
||||||
|
|
||||||
|
private static Configuration conf = new Configuration();
|
||||||
|
private static TestAppContext appContext;
|
||||||
|
private static HsWebApp webApp;
|
||||||
|
|
||||||
|
static class TestAppContext implements AppContext {
|
||||||
|
final ApplicationAttemptId appAttemptID;
|
||||||
|
final ApplicationId appID;
|
||||||
|
final String user = MockJobs.newUserName();
|
||||||
|
final Map<JobId, Job> jobs;
|
||||||
|
final long startTime = System.currentTimeMillis();
|
||||||
|
|
||||||
|
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
|
||||||
|
appID = MockJobs.newAppID(appid);
|
||||||
|
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
|
||||||
|
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
|
||||||
|
}
|
||||||
|
|
||||||
|
TestAppContext() {
|
||||||
|
this(0, 1, 2, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationAttemptId getApplicationAttemptId() {
|
||||||
|
return appAttemptID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ApplicationId getApplicationID() {
|
||||||
|
return appID;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CharSequence getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Job getJob(JobId jobID) {
|
||||||
|
return jobs.get(jobID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<JobId, Job> getAllJobs() {
|
||||||
|
return jobs; // OK
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("rawtypes")
|
||||||
|
@Override
|
||||||
|
public EventHandler getEventHandler() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Clock getClock() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getApplicationName() {
|
||||||
|
return "TestApp";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getStartTime() {
|
||||||
|
return startTime;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Injector injector = Guice.createInjector(new ServletModule() {
|
||||||
|
@Override
|
||||||
|
protected void configureServlets() {
|
||||||
|
|
||||||
|
appContext = new TestAppContext();
|
||||||
|
webApp = mock(HsWebApp.class);
|
||||||
|
when(webApp.name()).thenReturn("hsmockwebapp");
|
||||||
|
|
||||||
|
bind(JAXBContextResolver.class);
|
||||||
|
bind(HsWebServices.class);
|
||||||
|
bind(GenericExceptionHandler.class);
|
||||||
|
bind(WebApp.class).toInstance(webApp);
|
||||||
|
bind(AppContext.class).toInstance(appContext);
|
||||||
|
bind(Configuration.class).toInstance(conf);
|
||||||
|
|
||||||
|
serve("/*").with(GuiceContainer.class);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
public class GuiceServletConfig extends GuiceServletContextListener {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Injector getInjector() {
|
||||||
|
return injector;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
}
|
||||||
|
|
||||||
|
public TestHsWebServicesTasks() {
|
||||||
|
super(new WebAppDescriptor.Builder(
|
||||||
|
"org.apache.hadoop.mapreduce.v2.hs.webapp")
|
||||||
|
.contextListenerClass(GuiceServletConfig.class)
|
||||||
|
.filterClass(com.google.inject.servlet.GuiceFilter.class)
|
||||||
|
.contextPath("jersey-guice-filter").servletPath("/").build());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasks() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject tasks = json.getJSONObject("tasks");
|
||||||
|
JSONArray arr = tasks.getJSONArray("task");
|
||||||
|
assertEquals("incorrect number of elements", 2, arr.length());
|
||||||
|
|
||||||
|
verifyHsTask(arr, jobsMap.get(id), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject tasks = json.getJSONObject("tasks");
|
||||||
|
JSONArray arr = tasks.getJSONArray("task");
|
||||||
|
assertEquals("incorrect number of elements", 2, arr.length());
|
||||||
|
|
||||||
|
verifyHsTask(arr, jobsMap.get(id), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks/")
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject tasks = json.getJSONObject("tasks");
|
||||||
|
JSONArray arr = tasks.getJSONArray("task");
|
||||||
|
assertEquals("incorrect number of elements", 2, arr.length());
|
||||||
|
|
||||||
|
verifyHsTask(arr, jobsMap.get(id), null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksXML() throws JSONException, Exception {
|
||||||
|
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList tasks = dom.getElementsByTagName("tasks");
|
||||||
|
assertEquals("incorrect number of elements", 1, tasks.getLength());
|
||||||
|
NodeList task = dom.getElementsByTagName("task");
|
||||||
|
verifyHsTaskXML(task, jobsMap.get(id));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksQueryMap() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String type = "m";
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.queryParam("type", type).accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject tasks = json.getJSONObject("tasks");
|
||||||
|
JSONArray arr = tasks.getJSONArray("task");
|
||||||
|
assertEquals("incorrect number of elements", 1, arr.length());
|
||||||
|
verifyHsTask(arr, jobsMap.get(id), type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksQueryReduce() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String type = "r";
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.queryParam("type", type).accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject tasks = json.getJSONObject("tasks");
|
||||||
|
JSONArray arr = tasks.getJSONArray("task");
|
||||||
|
assertEquals("incorrect number of elements", 1, arr.length());
|
||||||
|
verifyHsTask(arr, jobsMap.get(id), type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTasksQueryInvalid() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
// tasktype must be exactly either "m" or "r"
|
||||||
|
String tasktype = "reduce";
|
||||||
|
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
|
||||||
|
.path(jobId).path("tasks").queryParam("type", tasktype)
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: tasktype must be either m or r", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"BadRequestException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskId() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("task");
|
||||||
|
verifyHsSingleTask(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks")
|
||||||
|
.path(tid + "/").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("task");
|
||||||
|
verifyHsSingleTask(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("task");
|
||||||
|
verifyHsSingleTask(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdBogus() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String tid = "bogustaskid";
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
|
||||||
|
.path(jobId).path("tasks").path(tid).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: Error parsing task ID: bogustaskid", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdNonExist() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String tid = "task_1234_0_0_m_0";
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
|
||||||
|
.path(jobId).path("tasks").path(tid).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: task not found with id task_1234_0_0_m_0",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdInvalid() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String tid = "task_1234_0_0_d_0";
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
|
||||||
|
.path(jobId).path("tasks").path(tid).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: Unknown task symbol: d", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdInvalid2() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String tid = "task_1234_0_m_0";
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
|
||||||
|
.path(jobId).path("tasks").path(tid).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: For input string: \"m\"", message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdInvalid3() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
String tid = "task_1234_0_0_m";
|
||||||
|
try {
|
||||||
|
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
|
||||||
|
.path(jobId).path("tasks").path(tid).get(JSONObject.class);
|
||||||
|
fail("should have thrown exception on invalid uri");
|
||||||
|
} catch (UniformInterfaceException ue) {
|
||||||
|
ClientResponse response = ue.getResponse();
|
||||||
|
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject msg = response.getEntity(JSONObject.class);
|
||||||
|
JSONObject exception = msg.getJSONObject("RemoteException");
|
||||||
|
assertEquals("incorrect number of elements", 3, exception.length());
|
||||||
|
String message = exception.getString("message");
|
||||||
|
String type = exception.getString("exception");
|
||||||
|
String classname = exception.getString("javaClassName");
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception message",
|
||||||
|
"java.lang.Exception: Error parsing task ID: task_1234_0_0_m",
|
||||||
|
message);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception type",
|
||||||
|
"NotFoundException", type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("exception classname",
|
||||||
|
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdXML() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
|
||||||
|
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList nodes = dom.getElementsByTagName("task");
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
verifyHsSingleTaskXML(element, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsSingleTask(JSONObject info, Task task)
|
||||||
|
throws JSONException {
|
||||||
|
assertEquals("incorrect number of elements", 8, info.length());
|
||||||
|
|
||||||
|
verifyTaskGeneric(task, info.getString("id"), info.getString("state"),
|
||||||
|
info.getString("type"), info.getString("successfulAttempt"),
|
||||||
|
info.getLong("startTime"), info.getLong("finishTime"),
|
||||||
|
info.getLong("elapsedTime"), (float) info.getDouble("progress"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsTask(JSONArray arr, Job job, String type)
|
||||||
|
throws JSONException {
|
||||||
|
for (Task task : job.getTasks().values()) {
|
||||||
|
TaskId id = task.getID();
|
||||||
|
String tid = MRApps.toString(id);
|
||||||
|
Boolean found = false;
|
||||||
|
if (type != null && task.getType() == MRApps.taskType(type)) {
|
||||||
|
|
||||||
|
for (int i = 0; i < arr.length(); i++) {
|
||||||
|
JSONObject info = arr.getJSONObject(i);
|
||||||
|
if (tid.matches(info.getString("id"))) {
|
||||||
|
found = true;
|
||||||
|
verifyHsSingleTask(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertTrue("task with id: " + tid + " not in web service output", found);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyTaskGeneric(Task task, String id, String state,
|
||||||
|
String type, String successfulAttempt, long startTime, long finishTime,
|
||||||
|
long elapsedTime, float progress) {
|
||||||
|
|
||||||
|
TaskId taskid = task.getID();
|
||||||
|
String tid = MRApps.toString(taskid);
|
||||||
|
TaskReport report = task.getReport();
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", tid, id);
|
||||||
|
WebServicesTestUtils.checkStringMatch("type", task.getType().toString(),
|
||||||
|
type);
|
||||||
|
WebServicesTestUtils.checkStringMatch("state", report.getTaskState()
|
||||||
|
.toString(), state);
|
||||||
|
// not easily checked without duplicating logic, just make sure its here
|
||||||
|
assertNotNull("successfulAttempt null", successfulAttempt);
|
||||||
|
assertEquals("startTime wrong", report.getStartTime(), startTime);
|
||||||
|
assertEquals("finishTime wrong", report.getFinishTime(), finishTime);
|
||||||
|
assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime);
|
||||||
|
assertEquals("progress wrong", report.getProgress() * 100, progress, 1e-3f);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsSingleTaskXML(Element element, Task task) {
|
||||||
|
verifyTaskGeneric(task, WebServicesTestUtils.getXmlString(element, "id"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "state"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "type"),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "successfulAttempt"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "startTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "finishTime"),
|
||||||
|
WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
|
||||||
|
WebServicesTestUtils.getXmlFloat(element, "progress"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsTaskXML(NodeList nodes, Job job) {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 2, nodes.getLength());
|
||||||
|
|
||||||
|
for (Task task : job.getTasks().values()) {
|
||||||
|
TaskId id = task.getID();
|
||||||
|
String tid = MRApps.toString(id);
|
||||||
|
Boolean found = false;
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
|
||||||
|
if (tid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
|
||||||
|
found = true;
|
||||||
|
verifyHsSingleTaskXML(element, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertTrue("task with id: " + tid + " not in web service output", found);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdCounters() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("counters").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobTaskCounters");
|
||||||
|
verifyHsJobTaskCounters(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdCountersSlash() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("counters/").accept(MediaType.APPLICATION_JSON)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobTaskCounters");
|
||||||
|
verifyHsJobTaskCounters(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTaskIdCountersDefault() throws JSONException, Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("counters").get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
|
||||||
|
JSONObject json = response.getEntity(JSONObject.class);
|
||||||
|
assertEquals("incorrect number of elements", 1, json.length());
|
||||||
|
JSONObject info = json.getJSONObject("jobTaskCounters");
|
||||||
|
verifyHsJobTaskCounters(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testJobTaskCountersXML() throws Exception {
|
||||||
|
WebResource r = resource();
|
||||||
|
Map<JobId, Job> jobsMap = appContext.getAllJobs();
|
||||||
|
for (JobId id : jobsMap.keySet()) {
|
||||||
|
String jobId = MRApps.toString(id);
|
||||||
|
for (Task task : jobsMap.get(id).getTasks().values()) {
|
||||||
|
|
||||||
|
String tid = MRApps.toString(task.getID());
|
||||||
|
ClientResponse response = r.path("ws").path("v1").path("history")
|
||||||
|
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
|
||||||
|
.path("counters").accept(MediaType.APPLICATION_XML)
|
||||||
|
.get(ClientResponse.class);
|
||||||
|
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
|
||||||
|
String xml = response.getEntity(String.class);
|
||||||
|
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
|
||||||
|
DocumentBuilder db = dbf.newDocumentBuilder();
|
||||||
|
InputSource is = new InputSource();
|
||||||
|
is.setCharacterStream(new StringReader(xml));
|
||||||
|
Document dom = db.parse(is);
|
||||||
|
NodeList info = dom.getElementsByTagName("jobTaskCounters");
|
||||||
|
verifyHsTaskCountersXML(info, task);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsJobTaskCounters(JSONObject info, Task task)
|
||||||
|
throws JSONException {
|
||||||
|
|
||||||
|
assertEquals("incorrect number of elements", 2, info.length());
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(task.getID()),
|
||||||
|
info.getString("id"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
JSONArray counterGroups = info.getJSONArray("taskCounterGroup");
|
||||||
|
for (int i = 0; i < counterGroups.length(); i++) {
|
||||||
|
JSONObject counterGroup = counterGroups.getJSONObject(i);
|
||||||
|
String name = counterGroup.getString("counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
JSONArray counters = counterGroup.getJSONArray("counter");
|
||||||
|
for (int j = 0; j < counters.length(); j++) {
|
||||||
|
JSONObject counter = counters.getJSONObject(i);
|
||||||
|
String counterName = counter.getString("name");
|
||||||
|
assertTrue("name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
long value = counter.getLong("value");
|
||||||
|
assertTrue("value >= 0", value >= 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void verifyHsTaskCountersXML(NodeList nodes, Task task) {
|
||||||
|
|
||||||
|
for (int i = 0; i < nodes.getLength(); i++) {
|
||||||
|
|
||||||
|
Element element = (Element) nodes.item(i);
|
||||||
|
WebServicesTestUtils.checkStringMatch("id",
|
||||||
|
MRApps.toString(task.getID()),
|
||||||
|
WebServicesTestUtils.getXmlString(element, "id"));
|
||||||
|
// just do simple verification of fields - not data is correct
|
||||||
|
// in the fields
|
||||||
|
NodeList groups = element.getElementsByTagName("taskCounterGroup");
|
||||||
|
|
||||||
|
for (int j = 0; j < groups.getLength(); j++) {
|
||||||
|
Element counters = (Element) groups.item(j);
|
||||||
|
assertNotNull("should have counters in the web service info", counters);
|
||||||
|
String name = WebServicesTestUtils.getXmlString(counters,
|
||||||
|
"counterGroupName");
|
||||||
|
assertTrue("name not set", (name != null && !name.isEmpty()));
|
||||||
|
NodeList counterArr = counters.getElementsByTagName("counter");
|
||||||
|
for (int z = 0; z < counterArr.getLength(); z++) {
|
||||||
|
Element counter = (Element) counterArr.item(z);
|
||||||
|
String counterName = WebServicesTestUtils.getXmlString(counter,
|
||||||
|
"name");
|
||||||
|
assertTrue("counter name not set",
|
||||||
|
(counterName != null && !counterName.isEmpty()));
|
||||||
|
|
||||||
|
long value = WebServicesTestUtils.getXmlLong(counter, "value");
|
||||||
|
assertTrue("value not >= 0", value >= 0);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,133 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.mapreduce.v2.hs.webapp;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.app.job.Job;
|
||||||
|
import org.apache.hadoop.mapreduce.v2.util.MRApps;
|
||||||
|
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
|
||||||
|
import org.codehaus.jettison.json.JSONException;
|
||||||
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
|
||||||
|
public class VerifyJobsUtils {
|
||||||
|
|
||||||
|
public static void verifyHsJob(JSONObject info, Job job) throws JSONException {
|
||||||
|
|
||||||
|
// this is 23 instead of 24 because acls not being checked since
|
||||||
|
// we are using mock job instead of CompletedJob
|
||||||
|
assertEquals("incorrect number of elements", 23, info.length());
|
||||||
|
|
||||||
|
// everyone access fields
|
||||||
|
verifyHsJobGeneric(job, info.getString("id"), info.getString("user"),
|
||||||
|
info.getString("name"), info.getString("state"),
|
||||||
|
info.getString("queue"), info.getLong("startTime"),
|
||||||
|
info.getLong("finishTime"), info.getInt("mapsTotal"),
|
||||||
|
info.getInt("mapsCompleted"), info.getInt("reducesTotal"),
|
||||||
|
info.getInt("reducesCompleted"));
|
||||||
|
|
||||||
|
String diagnostics = "";
|
||||||
|
if (info.has("diagnostics")) {
|
||||||
|
diagnostics = info.getString("diagnostics");
|
||||||
|
}
|
||||||
|
|
||||||
|
// restricted access fields - if security and acls set
|
||||||
|
verifyHsJobGenericSecure(job, info.getBoolean("uberized"), diagnostics,
|
||||||
|
info.getLong("avgMapTime"), info.getLong("avgReduceTime"),
|
||||||
|
info.getLong("avgShuffleTime"), info.getLong("avgMergeTime"),
|
||||||
|
info.getInt("failedReduceAttempts"),
|
||||||
|
info.getInt("killedReduceAttempts"),
|
||||||
|
info.getInt("successfulReduceAttempts"),
|
||||||
|
info.getInt("failedMapAttempts"), info.getInt("killedMapAttempts"),
|
||||||
|
info.getInt("successfulMapAttempts"));
|
||||||
|
|
||||||
|
// acls not being checked since
|
||||||
|
// we are using mock job instead of CompletedJob
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void verifyHsJobGeneric(Job job, String id, String user,
|
||||||
|
String name, String state, String queue, long startTime, long finishTime,
|
||||||
|
int mapsTotal, int mapsCompleted, int reducesTotal, int reducesCompleted) {
|
||||||
|
JobReport report = job.getReport();
|
||||||
|
|
||||||
|
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
|
||||||
|
id);
|
||||||
|
WebServicesTestUtils.checkStringMatch("user", job.getUserName().toString(),
|
||||||
|
user);
|
||||||
|
WebServicesTestUtils.checkStringMatch("name", job.getName(), name);
|
||||||
|
WebServicesTestUtils.checkStringMatch("state", job.getState().toString(),
|
||||||
|
state);
|
||||||
|
WebServicesTestUtils.checkStringMatch("queue", job.getQueueName(), queue);
|
||||||
|
|
||||||
|
assertEquals("startTime incorrect", report.getStartTime(), startTime);
|
||||||
|
assertEquals("finishTime incorrect", report.getFinishTime(), finishTime);
|
||||||
|
|
||||||
|
assertEquals("mapsTotal incorrect", job.getTotalMaps(), mapsTotal);
|
||||||
|
assertEquals("mapsCompleted incorrect", job.getCompletedMaps(),
|
||||||
|
mapsCompleted);
|
||||||
|
assertEquals("reducesTotal incorrect", job.getTotalReduces(), reducesTotal);
|
||||||
|
assertEquals("reducesCompleted incorrect", job.getCompletedReduces(),
|
||||||
|
reducesCompleted);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void verifyHsJobGenericSecure(Job job, Boolean uberized,
|
||||||
|
String diagnostics, long avgMapTime, long avgReduceTime,
|
||||||
|
long avgShuffleTime, long avgMergeTime, int failedReduceAttempts,
|
||||||
|
int killedReduceAttempts, int successfulReduceAttempts,
|
||||||
|
int failedMapAttempts, int killedMapAttempts, int successfulMapAttempts) {
|
||||||
|
|
||||||
|
String diagString = "";
|
||||||
|
List<String> diagList = job.getDiagnostics();
|
||||||
|
if (diagList != null && !diagList.isEmpty()) {
|
||||||
|
StringBuffer b = new StringBuffer();
|
||||||
|
for (String diag : diagList) {
|
||||||
|
b.append(diag);
|
||||||
|
}
|
||||||
|
diagString = b.toString();
|
||||||
|
}
|
||||||
|
WebServicesTestUtils.checkStringMatch("diagnostics", diagString,
|
||||||
|
diagnostics);
|
||||||
|
|
||||||
|
assertEquals("isUber incorrect", job.isUber(), uberized);
|
||||||
|
|
||||||
|
// unfortunately the following fields are all calculated in JobInfo
|
||||||
|
// so not easily accessible without doing all the calculations again.
|
||||||
|
// For now just make sure they are present.
|
||||||
|
|
||||||
|
assertTrue("failedReduceAttempts not >= 0", failedReduceAttempts >= 0);
|
||||||
|
assertTrue("killedReduceAttempts not >= 0", killedReduceAttempts >= 0);
|
||||||
|
assertTrue("successfulReduceAttempts not >= 0",
|
||||||
|
successfulReduceAttempts >= 0);
|
||||||
|
|
||||||
|
assertTrue("failedMapAttempts not >= 0", failedMapAttempts >= 0);
|
||||||
|
assertTrue("killedMapAttempts not >= 0", killedMapAttempts >= 0);
|
||||||
|
assertTrue("successfulMapAttempts not >= 0", successfulMapAttempts >= 0);
|
||||||
|
|
||||||
|
assertTrue("avgMapTime not >= 0", avgMapTime >= 0);
|
||||||
|
assertTrue("avgReduceTime not >= 0", avgReduceTime >= 0);
|
||||||
|
assertTrue("avgShuffleTime not >= 0", avgShuffleTime >= 0);
|
||||||
|
assertTrue("avgMergeTime not >= 0", avgMergeTime >= 0);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -30,121 +30,14 @@
|
||||||
<!-- Needed for generating FindBugs warnings using parent pom -->
|
<!-- Needed for generating FindBugs warnings using parent pom -->
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
|
|
||||||
<dependencyManagement>
|
|
||||||
<dependencies>
|
|
||||||
<!-- begin MNG-4223 workaround -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
<type>pom</type>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-api</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-server</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
<type>pom</type>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-server-web-proxy</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-server-common</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<!-- end MNG-4223 workaround -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<!-- mr security depends on hdfs -->
|
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-common</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-common</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
<type>test-jar</type>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-server-tests</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
<type>test-jar</type>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-server-nodemanager</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-server-resourcemanager</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-server-resourcemanager</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
<type>test-jar</type>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-mapreduce-client-core</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-mapreduce-client-common</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-mapreduce-client-app</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-mapreduce-client-app</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
<type>test-jar</type>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-mapreduce-client-hs</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-mapreduce-client-shuffle</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
</dependencyManagement>
|
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-java</artifactId>
|
<artifactId>protobuf-java</artifactId>
|
||||||
<version>2.4.0a</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.avro</groupId>
|
<groupId>org.apache.avro</groupId>
|
||||||
<artifactId>avro</artifactId>
|
<artifactId>avro</artifactId>
|
||||||
<version>1.5.3</version>
|
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>org.mortbay.jetty</groupId>
|
<groupId>org.mortbay.jetty</groupId>
|
||||||
|
@ -175,7 +68,6 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-common</artifactId>
|
<artifactId>hadoop-common</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
|
@ -204,28 +96,23 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-api</artifactId>
|
<artifactId>slf4j-api</artifactId>
|
||||||
<version>1.6.1</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-log4j12</artifactId>
|
<artifactId>slf4j-log4j12</artifactId>
|
||||||
<version>1.6.1</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-annotations</artifactId>
|
<artifactId>hadoop-annotations</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.mockito</groupId>
|
<groupId>org.mockito</groupId>
|
||||||
<artifactId>mockito-all</artifactId>
|
<artifactId>mockito-all</artifactId>
|
||||||
<version>1.8.5</version>
|
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-common</artifactId>
|
<artifactId>hadoop-common</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
<type>test-jar</type>
|
<type>test-jar</type>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
@ -233,27 +120,22 @@
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<!-- needed for security and runtime -->
|
<!-- needed for security and runtime -->
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
<artifactId>hadoop-hdfs</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.inject.extensions</groupId>
|
<groupId>com.google.inject.extensions</groupId>
|
||||||
<artifactId>guice-servlet</artifactId>
|
<artifactId>guice-servlet</artifactId>
|
||||||
<version>3.0</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>4.8.2</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.jboss.netty</groupId>
|
<groupId>org.jboss.netty</groupId>
|
||||||
<artifactId>netty</artifactId>
|
<artifactId>netty</artifactId>
|
||||||
<version>3.2.3.Final</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.cenqua.clover</groupId>
|
<groupId>com.cenqua.clover</groupId>
|
||||||
<artifactId>clover</artifactId>
|
<artifactId>clover</artifactId>
|
||||||
<version>3.0.2</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
|
@ -33,43 +33,36 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-yarn-api</artifactId>
|
<artifactId>hadoop-yarn-api</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-yarn-common</artifactId>
|
<artifactId>hadoop-yarn-common</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-yarn-server-nodemanager</artifactId>
|
<artifactId>hadoop-yarn-server-nodemanager</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-yarn-server-resourcemanager</artifactId>
|
<artifactId>hadoop-yarn-server-resourcemanager</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-yarn-server-common</artifactId>
|
<artifactId>hadoop-yarn-server-common</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-mapreduce-client-core</artifactId>
|
<artifactId>hadoop-mapreduce-client-core</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-yarn-server-tests</artifactId>
|
<artifactId>hadoop-yarn-server-tests</artifactId>
|
||||||
<type>test-jar</type>
|
<type>test-jar</type>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
|
|
|
@ -33,17 +33,6 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>log4j</groupId>
|
<groupId>log4j</groupId>
|
||||||
<artifactId>log4j</artifactId>
|
<artifactId>log4j</artifactId>
|
||||||
<version>1.2.12</version>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>com.sun.jdmk</groupId>
|
|
||||||
<artifactId>jmxtools</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>com.sun.jmx</groupId>
|
|
||||||
<artifactId>jmxri</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
|
|
|
@ -80,6 +80,10 @@ public class ResponseInfo implements Iterable<ResponseInfo.Item> {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void clear() {
|
||||||
|
items.clear();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Iterator<Item> iterator() {
|
public Iterator<Item> iterator() {
|
||||||
return items.iterator();
|
return items.iterator();
|
||||||
|
|
|
@ -50,6 +50,9 @@ public class WebServicesTestUtils {
|
||||||
public static String getXmlString(Element element, String name) {
|
public static String getXmlString(Element element, String name) {
|
||||||
NodeList id = element.getElementsByTagName(name);
|
NodeList id = element.getElementsByTagName(name);
|
||||||
Element line = (Element) id.item(0);
|
Element line = (Element) id.item(0);
|
||||||
|
if (line == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
Node first = line.getFirstChild();
|
Node first = line.getFirstChild();
|
||||||
// handle empty <key></key>
|
// handle empty <key></key>
|
||||||
if (first == null) {
|
if (first == null) {
|
||||||
|
|
|
@ -79,7 +79,6 @@
|
||||||
<property>
|
<property>
|
||||||
<description>The Kerberos principal for the resource manager.</description>
|
<description>The Kerberos principal for the resource manager.</description>
|
||||||
<name>yarn.resourcemanager.principal</name>
|
<name>yarn.resourcemanager.principal</name>
|
||||||
<value>rm/sightbusy-lx@LOCALHOST</value>
|
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
|
@ -430,7 +429,7 @@
|
||||||
<property>
|
<property>
|
||||||
<description>The kerberos principal for the node manager.</description>
|
<description>The kerberos principal for the node manager.</description>
|
||||||
<name>yarn.nodemanager.principal</name>
|
<name>yarn.nodemanager.principal</name>
|
||||||
<value>nm/sightbusy-lx@LOCALHOST</value>
|
<value></value>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
<property>
|
<property>
|
||||||
|
|
|
@ -45,7 +45,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
<artifactId>make-maven-plugin</artifactId>
|
<artifactId>make-maven-plugin</artifactId>
|
||||||
<version>1.0-beta-1</version>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<id>compile</id>
|
<id>compile</id>
|
||||||
|
@ -102,7 +101,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
<artifactId>exec-maven-plugin</artifactId>
|
<artifactId>exec-maven-plugin</artifactId>
|
||||||
<version>1.2</version>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<phase>compile</phase>
|
<phase>compile</phase>
|
||||||
|
|
|
@ -1,83 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.server.nodemanager;
|
|
||||||
|
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
|
||||||
import org.apache.hadoop.yarn.factories.RecordFactory;
|
|
||||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
|
|
||||||
import org.apache.hadoop.yarn.util.BuilderUtils;
|
|
||||||
|
|
||||||
public class MockApp implements Application {
|
|
||||||
|
|
||||||
final String user;
|
|
||||||
final ApplicationId appId;
|
|
||||||
Map<ContainerId, Container> containers = new HashMap<ContainerId, Container>();
|
|
||||||
ApplicationState appState;
|
|
||||||
Application app;
|
|
||||||
|
|
||||||
public MockApp(int uniqId) {
|
|
||||||
this("mockUser", 1234, uniqId);
|
|
||||||
}
|
|
||||||
|
|
||||||
public MockApp(String user, long clusterTimeStamp, int uniqId) {
|
|
||||||
super();
|
|
||||||
this.user = user;
|
|
||||||
// Add an application and the corresponding containers
|
|
||||||
RecordFactory recordFactory = RecordFactoryProvider
|
|
||||||
.getRecordFactory(new Configuration());
|
|
||||||
this.appId = BuilderUtils.newApplicationId(recordFactory, clusterTimeStamp,
|
|
||||||
uniqId);
|
|
||||||
appState = ApplicationState.NEW;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setState(ApplicationState state) {
|
|
||||||
this.appState = state;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<ContainerId, Container> getContainers() {
|
|
||||||
return containers;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ApplicationId getAppId() {
|
|
||||||
return appId;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ApplicationState getApplicationState() {
|
|
||||||
return appState;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void handle(ApplicationEvent event) {}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,120 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.apache.hadoop.yarn.server.nodemanager;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.apache.hadoop.security.Credentials;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
|
|
||||||
import org.apache.hadoop.yarn.api.records.ContainerStatus;
|
|
||||||
import org.apache.hadoop.yarn.event.Dispatcher;
|
|
||||||
import org.apache.hadoop.yarn.factories.RecordFactory;
|
|
||||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEvent;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState;
|
|
||||||
import org.apache.hadoop.yarn.util.BuilderUtils;
|
|
||||||
|
|
||||||
public class MockContainer implements Container {
|
|
||||||
|
|
||||||
private ContainerId id;
|
|
||||||
private ContainerState state;
|
|
||||||
private String user;
|
|
||||||
private ContainerLaunchContext launchContext;
|
|
||||||
private final Map<Path, String> resource = new HashMap<Path, String>();
|
|
||||||
private RecordFactory recordFactory;
|
|
||||||
|
|
||||||
public MockContainer(ApplicationAttemptId appAttemptId,
|
|
||||||
Dispatcher dispatcher, Configuration conf, String user,
|
|
||||||
ApplicationId appId, int uniqId) {
|
|
||||||
|
|
||||||
this.user = user;
|
|
||||||
this.recordFactory = RecordFactoryProvider.getRecordFactory(conf);
|
|
||||||
this.id = BuilderUtils.newContainerId(recordFactory, appId, appAttemptId,
|
|
||||||
uniqId);
|
|
||||||
this.launchContext = recordFactory
|
|
||||||
.newRecordInstance(ContainerLaunchContext.class);
|
|
||||||
launchContext.setContainerId(id);
|
|
||||||
launchContext.setUser(user);
|
|
||||||
this.state = ContainerState.NEW;
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setState(ContainerState state) {
|
|
||||||
this.state = state;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ContainerId getContainerID() {
|
|
||||||
return id;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String getUser() {
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ContainerState getContainerState() {
|
|
||||||
return state;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ContainerLaunchContext getLaunchContext() {
|
|
||||||
return launchContext;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Credentials getCredentials() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Map<Path, String> getLocalizedResources() {
|
|
||||||
return resource;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ContainerStatus cloneAndGetContainerStatus() {
|
|
||||||
ContainerStatus containerStatus = recordFactory
|
|
||||||
.newRecordInstance(ContainerStatus.class);
|
|
||||||
containerStatus
|
|
||||||
.setState(org.apache.hadoop.yarn.api.records.ContainerState.RUNNING);
|
|
||||||
containerStatus.setContainerId(this.launchContext.getContainerId());
|
|
||||||
containerStatus.setDiagnostics("testing");
|
|
||||||
containerStatus.setExitStatus(0);
|
|
||||||
return containerStatus;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public String toString() {
|
|
||||||
return "";
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void handle(ContainerEvent event) {
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -0,0 +1,80 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.yarn.server.nodemanager.webapp;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||||
|
import org.apache.hadoop.yarn.factories.RecordFactory;
|
||||||
|
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
||||||
|
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
|
||||||
|
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
|
||||||
|
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState;
|
||||||
|
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
|
||||||
|
import org.apache.hadoop.yarn.util.BuilderUtils;
|
||||||
|
|
||||||
|
public class MockApp implements Application {
|
||||||
|
|
||||||
|
final String user;
|
||||||
|
final ApplicationId appId;
|
||||||
|
Map<ContainerId, Container> containers = new HashMap<ContainerId, Container>();
|
||||||
|
ApplicationState appState;
|
||||||
|
Application app;
|
||||||
|
|
||||||
|
public MockApp(int uniqId) {
|
||||||
|
this("mockUser", 1234, uniqId);
|
||||||
|
}
|
||||||
|
|
||||||
|
public MockApp(String user, long clusterTimeStamp, int uniqId) {
|
||||||
|
super();
|
||||||
|
this.user = user;
|
||||||
|
// Add an application and the corresponding containers
|
||||||
|
RecordFactory recordFactory = RecordFactoryProvider
|
||||||
|
.getRecordFactory(new Configuration());
|
||||||
|
this.appId = BuilderUtils.newApplicationId(recordFactory, clusterTimeStamp,
|
||||||
|
uniqId);
|
||||||
|
appState = ApplicationState.NEW;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setState(ApplicationState state) {
|
||||||
|
this.appState = state;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Map<ContainerId, Container> getContainers() {
|
||||||
|
return containers;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ApplicationId getAppId() {
|
||||||
|
return appId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ApplicationState getApplicationState() {
|
||||||
|
return appState;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void handle(ApplicationEvent event) {}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,120 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.hadoop.yarn.server.nodemanager.webapp;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.security.Credentials;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
|
||||||
|
import org.apache.hadoop.yarn.api.records.ContainerStatus;
|
||||||
|
import org.apache.hadoop.yarn.event.Dispatcher;
|
||||||
|
import org.apache.hadoop.yarn.factories.RecordFactory;
|
||||||
|
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
||||||
|
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
|
||||||
|
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEvent;
|
||||||
|
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState;
|
||||||
|
import org.apache.hadoop.yarn.util.BuilderUtils;
|
||||||
|
|
||||||
|
public class MockContainer implements Container {
|
||||||
|
|
||||||
|
private ContainerId id;
|
||||||
|
private ContainerState state;
|
||||||
|
private String user;
|
||||||
|
private ContainerLaunchContext launchContext;
|
||||||
|
private final Map<Path, String> resource = new HashMap<Path, String>();
|
||||||
|
private RecordFactory recordFactory;
|
||||||
|
|
||||||
|
public MockContainer(ApplicationAttemptId appAttemptId,
|
||||||
|
Dispatcher dispatcher, Configuration conf, String user,
|
||||||
|
ApplicationId appId, int uniqId) {
|
||||||
|
|
||||||
|
this.user = user;
|
||||||
|
this.recordFactory = RecordFactoryProvider.getRecordFactory(conf);
|
||||||
|
this.id = BuilderUtils.newContainerId(recordFactory, appId, appAttemptId,
|
||||||
|
uniqId);
|
||||||
|
this.launchContext = recordFactory
|
||||||
|
.newRecordInstance(ContainerLaunchContext.class);
|
||||||
|
launchContext.setContainerId(id);
|
||||||
|
launchContext.setUser(user);
|
||||||
|
this.state = ContainerState.NEW;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setState(ContainerState state) {
|
||||||
|
this.state = state;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ContainerId getContainerID() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getUser() {
|
||||||
|
return user;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ContainerState getContainerState() {
|
||||||
|
return state;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ContainerLaunchContext getLaunchContext() {
|
||||||
|
return launchContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Credentials getCredentials() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Map<Path, String> getLocalizedResources() {
|
||||||
|
return resource;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ContainerStatus cloneAndGetContainerStatus() {
|
||||||
|
ContainerStatus containerStatus = recordFactory
|
||||||
|
.newRecordInstance(ContainerStatus.class);
|
||||||
|
containerStatus
|
||||||
|
.setState(org.apache.hadoop.yarn.api.records.ContainerState.RUNNING);
|
||||||
|
containerStatus.setContainerId(this.launchContext.getContainerId());
|
||||||
|
containerStatus.setDiagnostics("testing");
|
||||||
|
containerStatus.setExitStatus(0);
|
||||||
|
return containerStatus;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return "";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void handle(ContainerEvent event) {
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -38,8 +38,6 @@ import org.apache.hadoop.yarn.event.AsyncDispatcher;
|
||||||
import org.apache.hadoop.yarn.event.Dispatcher;
|
import org.apache.hadoop.yarn.event.Dispatcher;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.Context;
|
import org.apache.hadoop.yarn.server.nodemanager.Context;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
|
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.MockApp;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.MockContainer;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService;
|
import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
|
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
|
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
|
||||||
|
|
|
@ -39,8 +39,6 @@ import org.apache.hadoop.yarn.event.AsyncDispatcher;
|
||||||
import org.apache.hadoop.yarn.event.Dispatcher;
|
import org.apache.hadoop.yarn.event.Dispatcher;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.Context;
|
import org.apache.hadoop.yarn.server.nodemanager.Context;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
|
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.MockApp;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.MockContainer;
|
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService;
|
import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
|
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
|
||||||
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
|
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
|
||||||
|
|
|
@ -129,7 +129,6 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
<artifactId>exec-maven-plugin</artifactId>
|
<artifactId>exec-maven-plugin</artifactId>
|
||||||
<version>1.2</version>
|
|
||||||
<executions>
|
<executions>
|
||||||
<execution>
|
<execution>
|
||||||
<phase>compile</phase>
|
<phase>compile</phase>
|
||||||
|
|
|
@ -20,18 +20,22 @@ package org.apache.hadoop.yarn.server.resourcemanager.webapp;
|
||||||
|
|
||||||
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
import static org.apache.hadoop.yarn.util.StringHelper.join;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
|
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
|
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
|
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.ParentQueue;
|
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerInfo;
|
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerInfo;
|
||||||
|
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerLeafQueueInfo;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerQueueInfo;
|
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerQueueInfo;
|
||||||
|
import org.apache.hadoop.yarn.webapp.ResponseInfo;
|
||||||
import org.apache.hadoop.yarn.webapp.SubView;
|
import org.apache.hadoop.yarn.webapp.SubView;
|
||||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
|
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
|
||||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
|
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
|
||||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.LI;
|
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.LI;
|
||||||
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.UL;
|
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.UL;
|
||||||
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
|
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
|
||||||
|
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
|
||||||
|
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
import com.google.inject.servlet.RequestScoped;
|
import com.google.inject.servlet.RequestScoped;
|
||||||
|
@ -45,23 +49,61 @@ class CapacitySchedulerPage extends RmView {
|
||||||
static final float EPSILON = 1e-8f;
|
static final float EPSILON = 1e-8f;
|
||||||
|
|
||||||
@RequestScoped
|
@RequestScoped
|
||||||
static class Parent {
|
static class CSQInfo {
|
||||||
CSQueue queue;
|
CapacitySchedulerInfo csinfo;
|
||||||
|
CapacitySchedulerQueueInfo qinfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
static class LeafQueueInfoBlock extends HtmlBlock {
|
||||||
|
final CapacitySchedulerLeafQueueInfo lqinfo;
|
||||||
|
|
||||||
|
@Inject LeafQueueInfoBlock(ViewContext ctx, CSQInfo info) {
|
||||||
|
super(ctx);
|
||||||
|
lqinfo = (CapacitySchedulerLeafQueueInfo) info.qinfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected void render(Block html) {
|
||||||
|
ResponseInfo ri = info("\'" + lqinfo.getQueuePath().substring(5) + "\' Queue Status").
|
||||||
|
_("Queue State:", lqinfo.getQueueState()).
|
||||||
|
_("Capacity:", percent(lqinfo.getCapacity() / 100)).
|
||||||
|
_("Max Capacity:", percent(lqinfo.getMaxCapacity() / 100)).
|
||||||
|
_("Used Capacity:", percent(lqinfo.getUsedCapacity() / 100)).
|
||||||
|
_("Absolute Capacity:", percent(lqinfo.getAbsoluteCapacity() / 100)).
|
||||||
|
_("Absolute Max Capacity:", percent(lqinfo.getAbsoluteMaxCapacity() / 100)).
|
||||||
|
_("Utilization:", percent(lqinfo.getUtilization() / 100)).
|
||||||
|
_("Used Resources:", lqinfo.getUsedResources().toString()).
|
||||||
|
_("Num Active Applications:", Integer.toString(lqinfo.getNumActiveApplications())).
|
||||||
|
_("Num Pending Applications:", Integer.toString(lqinfo.getNumPendingApplications())).
|
||||||
|
_("Num Containers:", Integer.toString(lqinfo.getNumContainers())).
|
||||||
|
_("Max Applications:", Integer.toString(lqinfo.getMaxApplications())).
|
||||||
|
_("Max Applications Per User:", Integer.toString(lqinfo.getMaxApplicationsPerUser())).
|
||||||
|
_("Max Active Applications:", Integer.toString(lqinfo.getMaxActiveApplications())).
|
||||||
|
_("Max Active Applications Per User:", Integer.toString(lqinfo.getMaxActiveApplicationsPerUser())).
|
||||||
|
_("User Limit:", Integer.toString(lqinfo.getUserLimit()) + "%").
|
||||||
|
_("User Limit Factor:", String.format("%.1f", lqinfo.getUserLimitFactor()));
|
||||||
|
|
||||||
|
html._(InfoBlock.class);
|
||||||
|
|
||||||
|
// clear the info contents so this queue's info doesn't accumulate into another queue's info
|
||||||
|
ri.clear();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class QueueBlock extends HtmlBlock {
|
public static class QueueBlock extends HtmlBlock {
|
||||||
final Parent parent;
|
final CSQInfo csqinfo;
|
||||||
final CapacitySchedulerInfo sinfo;
|
|
||||||
|
|
||||||
@Inject QueueBlock(Parent parent) {
|
@Inject QueueBlock(CSQInfo info) {
|
||||||
this.parent = parent;
|
csqinfo = info;
|
||||||
sinfo = new CapacitySchedulerInfo(parent.queue);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void render(Block html) {
|
public void render(Block html) {
|
||||||
|
ArrayList<CapacitySchedulerQueueInfo> subQueues =
|
||||||
|
(csqinfo.qinfo == null) ? csqinfo.csinfo.getSubQueues()
|
||||||
|
: csqinfo.qinfo.getSubQueues();
|
||||||
UL<Hamlet> ul = html.ul();
|
UL<Hamlet> ul = html.ul();
|
||||||
for (CapacitySchedulerQueueInfo info : sinfo.getSubQueues()) {
|
for (CapacitySchedulerQueueInfo info : subQueues) {
|
||||||
float used = info.getUsedCapacity() / 100;
|
float used = info.getUsedCapacity() / 100;
|
||||||
float set = info.getCapacity() / 100;
|
float set = info.getCapacity() / 100;
|
||||||
float delta = Math.abs(set - used) + 0.001f;
|
float delta = Math.abs(set - used) + 0.001f;
|
||||||
|
@ -76,11 +118,12 @@ class CapacitySchedulerPage extends RmView {
|
||||||
used > set ? OVER : UNDER, ';',
|
used > set ? OVER : UNDER, ';',
|
||||||
used > set ? left(set/max) : left(used/max)))._('.')._().
|
used > set ? left(set/max) : left(used/max)))._('.')._().
|
||||||
span(".q", info.getQueuePath().substring(5))._();
|
span(".q", info.getQueuePath().substring(5))._();
|
||||||
if (info.getQueue() instanceof ParentQueue) {
|
|
||||||
// this could be optimized better
|
csqinfo.qinfo = info;
|
||||||
parent.queue = info.getQueue();
|
if (info.getSubQueues() == null) {
|
||||||
li.
|
li.ul("#lq").li()._(LeafQueueInfoBlock.class)._()._();
|
||||||
_(QueueBlock.class);
|
} else {
|
||||||
|
li._(QueueBlock.class);
|
||||||
}
|
}
|
||||||
li._();
|
li._();
|
||||||
}
|
}
|
||||||
|
@ -91,11 +134,11 @@ class CapacitySchedulerPage extends RmView {
|
||||||
|
|
||||||
static class QueuesBlock extends HtmlBlock {
|
static class QueuesBlock extends HtmlBlock {
|
||||||
final CapacityScheduler cs;
|
final CapacityScheduler cs;
|
||||||
final Parent parent;
|
final CSQInfo csqinfo;
|
||||||
|
|
||||||
@Inject QueuesBlock(ResourceManager rm, Parent parent) {
|
@Inject QueuesBlock(ResourceManager rm, CSQInfo info) {
|
||||||
cs = (CapacityScheduler) rm.getResourceScheduler();
|
cs = (CapacityScheduler) rm.getResourceScheduler();
|
||||||
this.parent = parent;
|
csqinfo = info;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -115,8 +158,10 @@ class CapacitySchedulerPage extends RmView {
|
||||||
span(".q", "default")._()._();
|
span(".q", "default")._()._();
|
||||||
} else {
|
} else {
|
||||||
CSQueue root = cs.getRootQueue();
|
CSQueue root = cs.getRootQueue();
|
||||||
parent.queue = root;
|
CapacitySchedulerInfo sinfo = new CapacitySchedulerInfo(root);
|
||||||
CapacitySchedulerInfo sinfo = new CapacitySchedulerInfo(parent.queue);
|
csqinfo.csinfo = sinfo;
|
||||||
|
csqinfo.qinfo = null;
|
||||||
|
|
||||||
float used = sinfo.getUsedCapacity() / 100;
|
float used = sinfo.getUsedCapacity() / 100;
|
||||||
float set = sinfo.getCapacity() / 100;
|
float set = sinfo.getCapacity() / 100;
|
||||||
float delta = Math.abs(set - used) + 0.001f;
|
float delta = Math.abs(set - used) + 0.001f;
|
||||||
|
@ -144,13 +189,16 @@ class CapacitySchedulerPage extends RmView {
|
||||||
"#cs ul { list-style: none }",
|
"#cs ul { list-style: none }",
|
||||||
"#cs a { font-weight: normal; margin: 2px; position: relative }",
|
"#cs a { font-weight: normal; margin: 2px; position: relative }",
|
||||||
"#cs a span { font-weight: normal; font-size: 80% }",
|
"#cs a span { font-weight: normal; font-size: 80% }",
|
||||||
"#cs-wrapper .ui-widget-header { padding: 0.2em 0.5em }")._().
|
"#cs-wrapper .ui-widget-header { padding: 0.2em 0.5em }",
|
||||||
|
"table.info tr th {width: 50%}")._(). // to center info table
|
||||||
script("/static/jt/jquery.jstree.js").
|
script("/static/jt/jquery.jstree.js").
|
||||||
script().$type("text/javascript").
|
script().$type("text/javascript").
|
||||||
_("$(function() {",
|
_("$(function() {",
|
||||||
" $('#cs a span').addClass('ui-corner-all').css('position', 'absolute');",
|
" $('#cs a span').addClass('ui-corner-all').css('position', 'absolute');",
|
||||||
" $('#cs').bind('loaded.jstree', function (e, data) {",
|
" $('#cs').bind('loaded.jstree', function (e, data) {",
|
||||||
" data.inst.open_all(); }).",
|
" data.inst.open_all();",
|
||||||
|
" data.inst.close_node('#lq', true);",
|
||||||
|
" }).",
|
||||||
" jstree({",
|
" jstree({",
|
||||||
" core: { animation: 188, html_titles: true },",
|
" core: { animation: 188, html_titles: true },",
|
||||||
" plugins: ['themeroller', 'html_data', 'ui'],",
|
" plugins: ['themeroller', 'html_data', 'ui'],",
|
||||||
|
@ -160,8 +208,9 @@ class CapacitySchedulerPage extends RmView {
|
||||||
" });",
|
" });",
|
||||||
" $('#cs').bind('select_node.jstree', function(e, data) {",
|
" $('#cs').bind('select_node.jstree', function(e, data) {",
|
||||||
" var q = $('.q', data.rslt.obj).first().text();",
|
" var q = $('.q', data.rslt.obj).first().text();",
|
||||||
" if (q == 'root') q = '';",
|
" if (q == 'root') q = '';",
|
||||||
" $('#apps').dataTable().fnFilter(q, 3);",
|
" else q = '^' + q.substr(q.lastIndexOf('.') + 1) + '$';",
|
||||||
|
" $('#apps').dataTable().fnFilter(q, 3, true);",
|
||||||
" });",
|
" });",
|
||||||
" $('#cs').show();",
|
" $('#cs').show();",
|
||||||
"});")._();
|
"});")._();
|
||||||
|
|
|
@ -26,9 +26,8 @@ import javax.xml.bind.annotation.XmlRootElement;
|
||||||
import javax.xml.bind.annotation.XmlTransient;
|
import javax.xml.bind.annotation.XmlTransient;
|
||||||
import javax.xml.bind.annotation.XmlType;
|
import javax.xml.bind.annotation.XmlType;
|
||||||
|
|
||||||
import org.apache.hadoop.yarn.api.records.QueueState;
|
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
|
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.ParentQueue;
|
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.LeafQueue;
|
||||||
|
|
||||||
@XmlRootElement(name = "capacityScheduler")
|
@XmlRootElement(name = "capacityScheduler")
|
||||||
@XmlType(name = "capacityScheduler")
|
@XmlType(name = "capacityScheduler")
|
||||||
|
@ -83,21 +82,11 @@ public class CapacitySchedulerInfo extends SchedulerInfo {
|
||||||
CSQueue parentQueue = parent;
|
CSQueue parentQueue = parent;
|
||||||
ArrayList<CapacitySchedulerQueueInfo> queuesInfo = new ArrayList<CapacitySchedulerQueueInfo>();
|
ArrayList<CapacitySchedulerQueueInfo> queuesInfo = new ArrayList<CapacitySchedulerQueueInfo>();
|
||||||
for (CSQueue queue : parentQueue.getChildQueues()) {
|
for (CSQueue queue : parentQueue.getChildQueues()) {
|
||||||
float usedCapacity = queue.getUsedCapacity() * 100;
|
CapacitySchedulerQueueInfo info;
|
||||||
float capacity = queue.getCapacity() * 100;
|
if (queue instanceof LeafQueue) {
|
||||||
String queueName = queue.getQueueName();
|
info = new CapacitySchedulerLeafQueueInfo((LeafQueue)queue);
|
||||||
String queuePath = queue.getQueuePath();
|
} else {
|
||||||
float max = queue.getMaximumCapacity();
|
info = new CapacitySchedulerQueueInfo(queue);
|
||||||
if (max < EPSILON || max > 1f)
|
|
||||||
max = 1f;
|
|
||||||
float maxCapacity = max * 100;
|
|
||||||
QueueState state = queue.getState();
|
|
||||||
CapacitySchedulerQueueInfo info = new CapacitySchedulerQueueInfo(
|
|
||||||
capacity, usedCapacity, maxCapacity, queueName, state, queuePath);
|
|
||||||
|
|
||||||
if (queue instanceof ParentQueue) {
|
|
||||||
info.isParent = true;
|
|
||||||
info.queue = queue;
|
|
||||||
info.subQueues = getQueues(queue);
|
info.subQueues = getQueues(queue);
|
||||||
}
|
}
|
||||||
queuesInfo.add(info);
|
queuesInfo.add(info);
|
||||||
|
|
|
@ -0,0 +1,91 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.yarn.server.resourcemanager.webapp.dao;
|
||||||
|
|
||||||
|
import javax.xml.bind.annotation.XmlAccessType;
|
||||||
|
import javax.xml.bind.annotation.XmlAccessorType;
|
||||||
|
import javax.xml.bind.annotation.XmlRootElement;
|
||||||
|
|
||||||
|
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.LeafQueue;
|
||||||
|
|
||||||
|
@XmlRootElement
|
||||||
|
@XmlAccessorType(XmlAccessType.FIELD)
|
||||||
|
public class CapacitySchedulerLeafQueueInfo extends CapacitySchedulerQueueInfo {
|
||||||
|
|
||||||
|
protected int numActiveApplications;
|
||||||
|
protected int numPendingApplications;
|
||||||
|
protected int numContainers;
|
||||||
|
protected int maxApplications;
|
||||||
|
protected int maxApplicationsPerUser;
|
||||||
|
protected int maxActiveApplications;
|
||||||
|
protected int maxActiveApplicationsPerUser;
|
||||||
|
protected int userLimit;
|
||||||
|
protected float userLimitFactor;
|
||||||
|
|
||||||
|
CapacitySchedulerLeafQueueInfo() {
|
||||||
|
};
|
||||||
|
|
||||||
|
CapacitySchedulerLeafQueueInfo(LeafQueue q) {
|
||||||
|
super(q);
|
||||||
|
numActiveApplications = q.getNumActiveApplications();
|
||||||
|
numPendingApplications = q.getNumPendingApplications();
|
||||||
|
numContainers = q.getNumContainers();
|
||||||
|
maxApplications = q.getMaxApplications();
|
||||||
|
maxApplicationsPerUser = q.getMaxApplicationsPerUser();
|
||||||
|
maxActiveApplications = q.getMaximumActiveApplications();
|
||||||
|
maxActiveApplicationsPerUser = q.getMaximumActiveApplicationsPerUser();
|
||||||
|
userLimit = q.getUserLimit();
|
||||||
|
userLimitFactor = q.getUserLimitFactor();
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getNumActiveApplications() {
|
||||||
|
return numActiveApplications;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getNumPendingApplications() {
|
||||||
|
return numPendingApplications;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getNumContainers() {
|
||||||
|
return numContainers;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getMaxApplications() {
|
||||||
|
return maxApplications;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getMaxApplicationsPerUser() {
|
||||||
|
return maxApplicationsPerUser;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getMaxActiveApplications() {
|
||||||
|
return maxActiveApplications;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getMaxActiveApplicationsPerUser() {
|
||||||
|
return maxActiveApplicationsPerUser;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getUserLimit() {
|
||||||
|
return userLimit;
|
||||||
|
}
|
||||||
|
|
||||||
|
public float getUserLimitFactor() {
|
||||||
|
return userLimitFactor;
|
||||||
|
}
|
||||||
|
}
|
|
@ -22,50 +22,54 @@ import java.util.ArrayList;
|
||||||
import javax.xml.bind.annotation.XmlAccessType;
|
import javax.xml.bind.annotation.XmlAccessType;
|
||||||
import javax.xml.bind.annotation.XmlAccessorType;
|
import javax.xml.bind.annotation.XmlAccessorType;
|
||||||
import javax.xml.bind.annotation.XmlRootElement;
|
import javax.xml.bind.annotation.XmlRootElement;
|
||||||
|
import javax.xml.bind.annotation.XmlSeeAlso;
|
||||||
import javax.xml.bind.annotation.XmlTransient;
|
import javax.xml.bind.annotation.XmlTransient;
|
||||||
|
|
||||||
import org.apache.hadoop.yarn.api.records.QueueState;
|
|
||||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
|
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
|
||||||
|
|
||||||
@XmlRootElement
|
@XmlRootElement
|
||||||
@XmlAccessorType(XmlAccessType.FIELD)
|
@XmlAccessorType(XmlAccessType.FIELD)
|
||||||
|
@XmlSeeAlso({CapacitySchedulerLeafQueueInfo.class})
|
||||||
public class CapacitySchedulerQueueInfo {
|
public class CapacitySchedulerQueueInfo {
|
||||||
|
|
||||||
@XmlTransient
|
@XmlTransient
|
||||||
protected String queuePath;
|
static final float EPSILON = 1e-8f;
|
||||||
@XmlTransient
|
|
||||||
protected Boolean isParent = false;
|
|
||||||
|
|
||||||
// bit odd to store this but makes html easier for now
|
|
||||||
@XmlTransient
|
@XmlTransient
|
||||||
protected CSQueue queue;
|
protected String queuePath;
|
||||||
|
|
||||||
protected float capacity;
|
protected float capacity;
|
||||||
protected float usedCapacity;
|
protected float usedCapacity;
|
||||||
protected float maxCapacity;
|
protected float maxCapacity;
|
||||||
|
protected float absoluteCapacity;
|
||||||
|
protected float absoluteMaxCapacity;
|
||||||
|
protected float utilization;
|
||||||
|
protected int numApplications;
|
||||||
|
protected String usedResources;
|
||||||
protected String queueName;
|
protected String queueName;
|
||||||
protected QueueState state;
|
protected String state;
|
||||||
protected ArrayList<CapacitySchedulerQueueInfo> subQueues;
|
protected ArrayList<CapacitySchedulerQueueInfo> subQueues;
|
||||||
|
|
||||||
CapacitySchedulerQueueInfo() {
|
CapacitySchedulerQueueInfo() {
|
||||||
};
|
};
|
||||||
|
|
||||||
CapacitySchedulerQueueInfo(float cap, float used, float max, String name,
|
CapacitySchedulerQueueInfo(CSQueue q) {
|
||||||
QueueState state, String path) {
|
queuePath = q.getQueuePath();
|
||||||
this.capacity = cap;
|
capacity = q.getCapacity() * 100;
|
||||||
this.usedCapacity = used;
|
usedCapacity = q.getUsedCapacity() * 100;
|
||||||
this.maxCapacity = max;
|
|
||||||
this.queueName = name;
|
|
||||||
this.state = state;
|
|
||||||
this.queuePath = path;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Boolean isParent() {
|
maxCapacity = q.getMaximumCapacity();
|
||||||
return this.isParent;
|
if (maxCapacity < EPSILON || maxCapacity > 1f)
|
||||||
}
|
maxCapacity = 1f;
|
||||||
|
maxCapacity *= 100;
|
||||||
|
|
||||||
public CSQueue getQueue() {
|
absoluteCapacity = cap(q.getAbsoluteCapacity(), 0f, 1f) * 100;
|
||||||
return this.queue;
|
absoluteMaxCapacity = cap(q.getAbsoluteMaximumCapacity(), 0f, 1f) * 100;
|
||||||
|
utilization = q.getUtilization() * 100;
|
||||||
|
numApplications = q.getNumApplications();
|
||||||
|
usedResources = q.getUsedResources().toString();
|
||||||
|
queueName = q.getQueueName();
|
||||||
|
state = q.getState().toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public float getCapacity() {
|
public float getCapacity() {
|
||||||
|
@ -80,12 +84,32 @@ public class CapacitySchedulerQueueInfo {
|
||||||
return this.maxCapacity;
|
return this.maxCapacity;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public float getAbsoluteCapacity() {
|
||||||
|
return absoluteCapacity;
|
||||||
|
}
|
||||||
|
|
||||||
|
public float getAbsoluteMaxCapacity() {
|
||||||
|
return absoluteMaxCapacity;
|
||||||
|
}
|
||||||
|
|
||||||
|
public float getUtilization() {
|
||||||
|
return utilization;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int getNumApplications() {
|
||||||
|
return numApplications;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getUsedResources() {
|
||||||
|
return usedResources;
|
||||||
|
}
|
||||||
|
|
||||||
public String getQueueName() {
|
public String getQueueName() {
|
||||||
return this.queueName;
|
return this.queueName;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getQueueState() {
|
public String getQueueState() {
|
||||||
return this.state.toString();
|
return this.state;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getQueuePath() {
|
public String getQueuePath() {
|
||||||
|
@ -96,4 +120,14 @@ public class CapacitySchedulerQueueInfo {
|
||||||
return this.subQueues;
|
return this.subQueues;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Limit a value to a specified range.
|
||||||
|
* @param val the value to be capped
|
||||||
|
* @param low the lower bound of the range (inclusive)
|
||||||
|
* @param hi the upper bound of the range (inclusive)
|
||||||
|
* @return the capped value
|
||||||
|
*/
|
||||||
|
static float cap(float val, float low, float hi) {
|
||||||
|
return Math.min(Math.max(val, low), hi);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -210,17 +210,21 @@ public class TestRMWebServicesCapacitySched extends JerseyTest {
|
||||||
Element qElem = (Element) queues.item(j);
|
Element qElem = (Element) queues.item(j);
|
||||||
String qName = WebServicesTestUtils.getXmlString(qElem, "queueName");
|
String qName = WebServicesTestUtils.getXmlString(qElem, "queueName");
|
||||||
String q = CapacitySchedulerConfiguration.ROOT + "." + qName;
|
String q = CapacitySchedulerConfiguration.ROOT + "." + qName;
|
||||||
verifySubQueueXML(qElem, q);
|
verifySubQueueXML(qElem, q, 100);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void verifySubQueueXML(Element qElem, String q) throws Exception {
|
public void verifySubQueueXML(Element qElem, String q, float parentAbsCapacity)
|
||||||
|
throws Exception {
|
||||||
|
float absCapacity = WebServicesTestUtils.getXmlFloat(qElem, "absoluteCapacity");
|
||||||
verifySubQueueGeneric(q,
|
verifySubQueueGeneric(q,
|
||||||
WebServicesTestUtils.getXmlFloat(qElem, "usedCapacity"),
|
WebServicesTestUtils.getXmlFloat(qElem, "usedCapacity"),
|
||||||
WebServicesTestUtils.getXmlFloat(qElem, "capacity"),
|
WebServicesTestUtils.getXmlFloat(qElem, "capacity"),
|
||||||
WebServicesTestUtils.getXmlFloat(qElem, "maxCapacity"),
|
WebServicesTestUtils.getXmlFloat(qElem, "maxCapacity"),
|
||||||
|
absCapacity,
|
||||||
|
WebServicesTestUtils.getXmlFloat(qElem, "absoluteMaxCapacity"),
|
||||||
|
parentAbsCapacity,
|
||||||
WebServicesTestUtils.getXmlString(qElem, "queueName"),
|
WebServicesTestUtils.getXmlString(qElem, "queueName"),
|
||||||
WebServicesTestUtils.getXmlString(qElem, "state"));
|
WebServicesTestUtils.getXmlString(qElem, "state"));
|
||||||
|
|
||||||
|
@ -230,8 +234,12 @@ public class TestRMWebServicesCapacitySched extends JerseyTest {
|
||||||
Element subqElem = (Element) queues.item(j);
|
Element subqElem = (Element) queues.item(j);
|
||||||
String qName = WebServicesTestUtils.getXmlString(subqElem, "queueName");
|
String qName = WebServicesTestUtils.getXmlString(subqElem, "queueName");
|
||||||
String q2 = q + "." + qName;
|
String q2 = q + "." + qName;
|
||||||
verifySubQueueXML(subqElem, q2);
|
verifySubQueueXML(subqElem, q2, absCapacity);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
verifyLeafQueueGeneric(q,
|
||||||
|
WebServicesTestUtils.getXmlInt(qElem, "userLimit"),
|
||||||
|
WebServicesTestUtils.getXmlFloat(qElem, "userLimitFactor"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -254,7 +262,7 @@ public class TestRMWebServicesCapacitySched extends JerseyTest {
|
||||||
for (int i = 0; i < arr.length(); i++) {
|
for (int i = 0; i < arr.length(); i++) {
|
||||||
JSONObject obj = arr.getJSONObject(i);
|
JSONObject obj = arr.getJSONObject(i);
|
||||||
String q = CapacitySchedulerConfiguration.ROOT + "." + obj.getString("queueName");
|
String q = CapacitySchedulerConfiguration.ROOT + "." + obj.getString("queueName");
|
||||||
verifySubQueue(obj, q);
|
verifySubQueue(obj, q, 100);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -268,31 +276,46 @@ public class TestRMWebServicesCapacitySched extends JerseyTest {
|
||||||
assertTrue("queueName doesn't match", "root".matches(queueName));
|
assertTrue("queueName doesn't match", "root".matches(queueName));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void verifySubQueue(JSONObject info, String q) throws JSONException,
|
private void verifySubQueue(JSONObject info, String q, float parentAbsCapacity)
|
||||||
Exception {
|
throws JSONException, Exception {
|
||||||
if (info.has("subQueues")) {
|
int numExpectedElements = 11;
|
||||||
assertEquals("incorrect number of elements", 6, info.length());
|
boolean isParentQueue = true;
|
||||||
} else {
|
if (!info.has("subQueues")) {
|
||||||
assertEquals("incorrect number of elements", 5, info.length());
|
numExpectedElements = 20;
|
||||||
|
isParentQueue = false;
|
||||||
}
|
}
|
||||||
|
assertEquals("incorrect number of elements", numExpectedElements, info.length());
|
||||||
|
|
||||||
|
float absCapacity = (float) info.getDouble("absoluteCapacity");
|
||||||
|
|
||||||
verifySubQueueGeneric(q, (float) info.getDouble("usedCapacity"),
|
verifySubQueueGeneric(q, (float) info.getDouble("usedCapacity"),
|
||||||
(float) info.getDouble("capacity"),
|
(float) info.getDouble("capacity"),
|
||||||
(float) info.getDouble("maxCapacity"), info.getString("queueName"),
|
(float) info.getDouble("maxCapacity"),
|
||||||
|
absCapacity,
|
||||||
|
(float) info.getDouble("absoluteMaxCapacity"),
|
||||||
|
parentAbsCapacity,
|
||||||
|
info.getString("queueName"),
|
||||||
info.getString("state"));
|
info.getString("state"));
|
||||||
|
|
||||||
if (info.has("subQueues")) {
|
if (isParentQueue) {
|
||||||
JSONArray arr = info.getJSONArray("subQueues");
|
JSONArray arr = info.getJSONArray("subQueues");
|
||||||
// test subqueues
|
// test subqueues
|
||||||
for (int i = 0; i < arr.length(); i++) {
|
for (int i = 0; i < arr.length(); i++) {
|
||||||
JSONObject obj = arr.getJSONObject(i);
|
JSONObject obj = arr.getJSONObject(i);
|
||||||
String q2 = q + "." + obj.getString("queueName");
|
String q2 = q + "." + obj.getString("queueName");
|
||||||
verifySubQueue(obj, q2);
|
verifySubQueue(obj, q2, absCapacity);
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
verifyLeafQueueGeneric(q, info.getInt("userLimit"),
|
||||||
|
(float) info.getDouble("userLimitFactor"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void verifySubQueueGeneric(String q, float usedCapacity,
|
private void verifySubQueueGeneric(String q, float usedCapacity,
|
||||||
float capacity, float maxCapacity, String qname, String state)
|
float capacity, float maxCapacity,
|
||||||
|
float absCapacity, float absMaxCapacity,
|
||||||
|
float parentAbsCapacity,
|
||||||
|
String qname, String state)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
String[] qArr = q.split("\\.");
|
String[] qArr = q.split("\\.");
|
||||||
assertTrue("q name invalid: " + q, qArr.length > 1);
|
assertTrue("q name invalid: " + q, qArr.length > 1);
|
||||||
|
@ -302,15 +325,28 @@ public class TestRMWebServicesCapacitySched extends JerseyTest {
|
||||||
assertEquals("capacity doesn't match", csConf.getCapacity(q), capacity,
|
assertEquals("capacity doesn't match", csConf.getCapacity(q), capacity,
|
||||||
1e-3f);
|
1e-3f);
|
||||||
float expectCapacity = csConf.getMaximumCapacity(q);
|
float expectCapacity = csConf.getMaximumCapacity(q);
|
||||||
|
float expectAbsMaxCapacity = parentAbsCapacity * (maxCapacity/100);
|
||||||
if (CapacitySchedulerConfiguration.UNDEFINED == expectCapacity) {
|
if (CapacitySchedulerConfiguration.UNDEFINED == expectCapacity) {
|
||||||
expectCapacity = 100;
|
expectCapacity = 100;
|
||||||
|
expectAbsMaxCapacity = 100;
|
||||||
}
|
}
|
||||||
assertEquals("maxCapacity doesn't match", expectCapacity, maxCapacity,
|
assertEquals("maxCapacity doesn't match", expectCapacity, maxCapacity,
|
||||||
1e-3f);
|
1e-3f);
|
||||||
|
assertEquals("absoluteCapacity doesn't match",
|
||||||
|
parentAbsCapacity * (capacity/100), absCapacity, 1e-3f);
|
||||||
|
assertEquals("absoluteMaxCapacity doesn't match",
|
||||||
|
expectAbsMaxCapacity, absMaxCapacity, 1e-3f);
|
||||||
assertTrue("queueName doesn't match, got: " + qname + " expected: " + q,
|
assertTrue("queueName doesn't match, got: " + qname + " expected: " + q,
|
||||||
qshortName.matches(qname));
|
qshortName.matches(qname));
|
||||||
assertTrue("state doesn't match",
|
assertTrue("state doesn't match",
|
||||||
(csConf.getState(q).toString()).matches(state));
|
(csConf.getState(q).toString()).matches(state));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void verifyLeafQueueGeneric(String q, int userLimit,
|
||||||
|
float userLimitFactor) throws Exception {
|
||||||
|
assertEquals("userLimit doesn't match", csConf.getUserLimit(q), userLimit);
|
||||||
|
assertEquals("userLimitFactor doesn't match",
|
||||||
|
csConf.getUserLimitFactor(q), userLimitFactor, 1e-3f);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,7 +34,6 @@
|
||||||
<groupId>javax.servlet</groupId>
|
<groupId>javax.servlet</groupId>
|
||||||
<artifactId>servlet-api</artifactId>
|
<artifactId>servlet-api</artifactId>
|
||||||
<scope>compile</scope>
|
<scope>compile</scope>
|
||||||
<version>2.5</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
|
|
@ -33,48 +33,10 @@
|
||||||
<yarn.basedir>${basedir}</yarn.basedir>
|
<yarn.basedir>${basedir}</yarn.basedir>
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<distributionManagement>
|
|
||||||
<repository>
|
|
||||||
<id>apache.releases.https</id>
|
|
||||||
<name>Apache Release Distribution Repository</name>
|
|
||||||
<url>https://repository.apache.org/service/local/staging/deploy/maven2</url>
|
|
||||||
</repository>
|
|
||||||
<snapshotRepository>
|
|
||||||
<id>apache.snapshots.https</id>
|
|
||||||
<name>Apache Development Snapshot Repository</name>
|
|
||||||
<url>https://repository.apache.org/content/repositories/snapshots</url>
|
|
||||||
</snapshotRepository>
|
|
||||||
</distributionManagement>
|
|
||||||
|
|
||||||
<repositories>
|
|
||||||
<repository>
|
|
||||||
<id>repository.jboss.org</id>
|
|
||||||
<url>http://repository.jboss.org/nexus/content/groups/public/</url>
|
|
||||||
<snapshots>
|
|
||||||
<enabled>false</enabled>
|
|
||||||
</snapshots>
|
|
||||||
</repository>
|
|
||||||
<repository>
|
|
||||||
<id>apache.snapshots</id>
|
|
||||||
<url>http://repository.apache.org/snapshots</url>
|
|
||||||
<!-- until we move to hadoop-common/hdfs trunk and/or maven 3 -->
|
|
||||||
<!-- cf. MNG-4326 -->
|
|
||||||
<snapshots>
|
|
||||||
<enabled>true</enabled>
|
|
||||||
</snapshots>
|
|
||||||
</repository>
|
|
||||||
</repositories>
|
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
<dependency>
|
|
||||||
<groupId>com.google.protobuf</groupId>
|
|
||||||
<artifactId>protobuf-java</artifactId>
|
|
||||||
<version>2.4.0a</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-common</artifactId>
|
<artifactId>hadoop-common</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
|
@ -103,295 +65,80 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-api</artifactId>
|
<artifactId>slf4j-api</artifactId>
|
||||||
<version>1.6.1</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-log4j12</artifactId>
|
<artifactId>slf4j-log4j12</artifactId>
|
||||||
<version>1.6.1</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-annotations</artifactId>
|
<artifactId>hadoop-annotations</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.mockito</groupId>
|
<groupId>org.mockito</groupId>
|
||||||
<artifactId>mockito-all</artifactId>
|
<artifactId>mockito-all</artifactId>
|
||||||
<version>1.8.5</version>
|
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-common</artifactId>
|
<artifactId>hadoop-common</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
<type>test-jar</type>
|
<type>test-jar</type>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<!-- needed for security and runtime -->
|
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.inject.extensions</groupId>
|
<groupId>com.google.inject.extensions</groupId>
|
||||||
<artifactId>guice-servlet</artifactId>
|
<artifactId>guice-servlet</artifactId>
|
||||||
<version>3.0</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>junit</groupId>
|
|
||||||
<artifactId>junit</artifactId>
|
|
||||||
<version>4.8.2</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.jboss.netty</groupId>
|
<groupId>org.jboss.netty</groupId>
|
||||||
<artifactId>netty</artifactId>
|
<artifactId>netty</artifactId>
|
||||||
<version>3.2.3.Final</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.cenqua.clover</groupId>
|
<groupId>com.cenqua.clover</groupId>
|
||||||
<artifactId>clover</artifactId>
|
<artifactId>clover</artifactId>
|
||||||
<version>3.0.2</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-java</artifactId>
|
<artifactId>protobuf-java</artifactId>
|
||||||
<version>2.4.0a</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-common</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>commons-el</groupId>
|
|
||||||
<artifactId>commons-el</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>tomcat</groupId>
|
|
||||||
<artifactId>jasper-runtime</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>tomcat</groupId>
|
|
||||||
<artifactId>jasper-compiler</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>org.mortbay.jetty</groupId>
|
|
||||||
<artifactId>jsp-2.1-jetty</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>hsqldb</groupId>
|
|
||||||
<artifactId>hsqldb</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-annotations</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>4.8.2</version>
|
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>commons-io</groupId>
|
<groupId>commons-io</groupId>
|
||||||
<artifactId>commons-io</artifactId>
|
<artifactId>commons-io</artifactId>
|
||||||
<version>2.1</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.mockito</groupId>
|
|
||||||
<artifactId>mockito-all</artifactId>
|
|
||||||
<version>1.8.5</version>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-common</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
<type>test-jar</type>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
<artifactId>hadoop-hdfs</artifactId>
|
||||||
<version>${project.version}</version>
|
<scope>provided</scope>
|
||||||
<scope>runtime</scope>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.inject</groupId>
|
<groupId>com.google.inject</groupId>
|
||||||
<artifactId>guice</artifactId>
|
<artifactId>guice</artifactId>
|
||||||
<version>3.0</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.sun.jersey.jersey-test-framework</groupId>
|
<groupId>com.sun.jersey.jersey-test-framework</groupId>
|
||||||
<artifactId>jersey-test-framework-core</artifactId>
|
<artifactId>jersey-test-framework-core</artifactId>
|
||||||
<version>1.8</version>
|
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.sun.jersey.jersey-test-framework</groupId>
|
<groupId>com.sun.jersey.jersey-test-framework</groupId>
|
||||||
<artifactId>jersey-test-framework-grizzly2</artifactId>
|
<artifactId>jersey-test-framework-grizzly2</artifactId>
|
||||||
<version>1.8</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.sun.jersey</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-server</artifactId>
|
<artifactId>jersey-server</artifactId>
|
||||||
<version>1.8</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.sun.jersey.contribs</groupId>
|
<groupId>com.sun.jersey.contribs</groupId>
|
||||||
<artifactId>jersey-guice</artifactId>
|
<artifactId>jersey-guice</artifactId>
|
||||||
<version>1.8</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.jboss.netty</groupId>
|
|
||||||
<artifactId>netty</artifactId>
|
|
||||||
<version>3.2.3.Final</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.slf4j</groupId>
|
|
||||||
<artifactId>slf4j-api</artifactId>
|
|
||||||
<version>1.6.1</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.slf4j</groupId>
|
|
||||||
<artifactId>slf4j-log4j12</artifactId>
|
|
||||||
<version>1.6.1</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>junit</groupId>
|
|
||||||
<artifactId>junit</artifactId>
|
|
||||||
<version>4.8.2</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<dependencyManagement>
|
|
||||||
<dependencies>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-api</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-common</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-common</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
<type>test-jar</type>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-server-common</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-server-web-proxy</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-server-resourcemanager</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-yarn-server-nodemanager</artifactId>
|
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.zookeeper</groupId>
|
|
||||||
<artifactId>zookeeper</artifactId>
|
|
||||||
<version>3.4.2</version>
|
|
||||||
<exclusions>
|
|
||||||
<exclusion>
|
|
||||||
<!-- otherwise seems to drag in junit 3.8.1 via jline -->
|
|
||||||
<groupId>junit</groupId>
|
|
||||||
<artifactId>junit</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>com.sun.jdmk</groupId>
|
|
||||||
<artifactId>jmxtools</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
<exclusion>
|
|
||||||
<groupId>com.sun.jmx</groupId>
|
|
||||||
<artifactId>jmxri</artifactId>
|
|
||||||
</exclusion>
|
|
||||||
</exclusions>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
</dependencyManagement>
|
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
<pluginManagement>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
|
||||||
<artifactId>findbugs-maven-plugin</artifactId>
|
|
||||||
<version>2.3.2</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-compiler-plugin</artifactId>
|
|
||||||
<!-- pre 2.1 ignores project.build.sourceEncoding -->
|
|
||||||
<version>2.3.2</version>
|
|
||||||
<configuration>
|
|
||||||
<source>1.6</source>
|
|
||||||
<target>1.6</target>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<artifactId>maven-clean-plugin</artifactId>
|
|
||||||
<version>2.4.1</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>com.atlassian.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-clover2-plugin</artifactId>
|
|
||||||
<version>3.0.2</version>
|
|
||||||
<configuration>
|
|
||||||
<licenseLocation>/home/y/conf/clover/clover.license</licenseLocation>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-antrun-plugin</artifactId>
|
|
||||||
<version>1.6</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
|
||||||
<artifactId>exec-maven-plugin</artifactId>
|
|
||||||
<version>1.2</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
|
||||||
<artifactId>build-helper-maven-plugin</artifactId>
|
|
||||||
<version>1.5</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-install-plugin</artifactId>
|
|
||||||
<version>2.3.1</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-jar-plugin</artifactId>
|
|
||||||
<version>2.3.1</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-source-plugin</artifactId>
|
|
||||||
<version>2.1.2</version>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</pluginManagement>
|
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
<groupId>org.codehaus.mojo</groupId>
|
||||||
|
|
|
@ -48,12 +48,10 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.protobuf</groupId>
|
<groupId>com.google.protobuf</groupId>
|
||||||
<artifactId>protobuf-java</artifactId>
|
<artifactId>protobuf-java</artifactId>
|
||||||
<version>2.4.0a</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.avro</groupId>
|
<groupId>org.apache.avro</groupId>
|
||||||
<artifactId>avro</artifactId>
|
<artifactId>avro</artifactId>
|
||||||
<version>1.5.3</version>
|
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
<groupId>org.mortbay.jetty</groupId>
|
<groupId>org.mortbay.jetty</groupId>
|
||||||
|
@ -84,7 +82,6 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-common</artifactId>
|
<artifactId>hadoop-common</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
<exclusions>
|
<exclusions>
|
||||||
<exclusion>
|
<exclusion>
|
||||||
|
@ -113,28 +110,23 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-api</artifactId>
|
<artifactId>slf4j-api</artifactId>
|
||||||
<version>1.6.1</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-log4j12</artifactId>
|
<artifactId>slf4j-log4j12</artifactId>
|
||||||
<version>1.6.1</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-annotations</artifactId>
|
<artifactId>hadoop-annotations</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.mockito</groupId>
|
<groupId>org.mockito</groupId>
|
||||||
<artifactId>mockito-all</artifactId>
|
<artifactId>mockito-all</artifactId>
|
||||||
<version>1.8.5</version>
|
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-common</artifactId>
|
<artifactId>hadoop-common</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
<type>test-jar</type>
|
<type>test-jar</type>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
@ -142,118 +134,43 @@
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<!-- needed for security and runtime -->
|
<!-- needed for security and runtime -->
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
<artifactId>hadoop-hdfs</artifactId>
|
||||||
<version>${project.version}</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.inject</groupId>
|
<groupId>com.google.inject</groupId>
|
||||||
<artifactId>guice</artifactId>
|
<artifactId>guice</artifactId>
|
||||||
<version>3.0</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.sun.jersey</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-server</artifactId>
|
<artifactId>jersey-server</artifactId>
|
||||||
<version>1.8</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.sun.jersey.contribs</groupId>
|
<groupId>com.sun.jersey.contribs</groupId>
|
||||||
<artifactId>jersey-guice</artifactId>
|
<artifactId>jersey-guice</artifactId>
|
||||||
<version>1.8</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.inject.extensions</groupId>
|
<groupId>com.google.inject.extensions</groupId>
|
||||||
<artifactId>guice-servlet</artifactId>
|
<artifactId>guice-servlet</artifactId>
|
||||||
<version>3.0</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>4.8.2</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.jboss.netty</groupId>
|
<groupId>org.jboss.netty</groupId>
|
||||||
<artifactId>netty</artifactId>
|
<artifactId>netty</artifactId>
|
||||||
<version>3.2.3.Final</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>commons-io</groupId>
|
<groupId>commons-io</groupId>
|
||||||
<artifactId>commons-io</artifactId>
|
<artifactId>commons-io</artifactId>
|
||||||
<version>2.1</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.cenqua.clover</groupId>
|
<groupId>com.cenqua.clover</groupId>
|
||||||
<artifactId>clover</artifactId>
|
<artifactId>clover</artifactId>
|
||||||
<version>3.0.2</version>
|
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
<pluginManagement>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
|
||||||
<artifactId>findbugs-maven-plugin</artifactId>
|
|
||||||
<version>2.3.2</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<artifactId>maven-clean-plugin</artifactId>
|
|
||||||
<version>2.4.1</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>com.atlassian.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-clover2-plugin</artifactId>
|
|
||||||
<version>3.0.2</version>
|
|
||||||
<configuration>
|
|
||||||
<licenseLocation>/home/y/conf/clover/clover.license</licenseLocation>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-compiler-plugin</artifactId>
|
|
||||||
<!-- pre 2.1 ignores project.build.sourceEncoding -->
|
|
||||||
<version>2.3.2</version>
|
|
||||||
<configuration>
|
|
||||||
<source>1.6</source>
|
|
||||||
<target>1.6</target>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-assembly-plugin</artifactId>
|
|
||||||
<version>2.2.1</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-antrun-plugin</artifactId>
|
|
||||||
<version>1.6</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
|
||||||
<artifactId>exec-maven-plugin</artifactId>
|
|
||||||
<version>1.2</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.codehaus.mojo</groupId>
|
|
||||||
<artifactId>build-helper-maven-plugin</artifactId>
|
|
||||||
<version>1.5</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-install-plugin</artifactId>
|
|
||||||
<version>2.3.1</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-jar-plugin</artifactId>
|
|
||||||
<version>2.3.1</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-source-plugin</artifactId>
|
|
||||||
<version>2.1.2</version>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</pluginManagement>
|
|
||||||
<plugins>
|
<plugins>
|
||||||
<plugin>
|
<plugin>
|
||||||
<artifactId>maven-antrun-plugin</artifactId>
|
<artifactId>maven-antrun-plugin</artifactId>
|
||||||
|
|
|
@ -213,7 +213,7 @@
|
||||||
<artifactItem>
|
<artifactItem>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-annotations</artifactId>
|
<artifactId>hadoop-annotations</artifactId>
|
||||||
<version>${hadoop.annotations.version}</version>
|
<version>${project.version}</version>
|
||||||
<overWrite>false</overWrite>
|
<overWrite>false</overWrite>
|
||||||
<outputDirectory>${project.build.directory}</outputDirectory>
|
<outputDirectory>${project.build.directory}</outputDirectory>
|
||||||
<destFileName>hadoop-annotations.jar</destFileName>
|
<destFileName>hadoop-annotations.jar</destFileName>
|
||||||
|
|
|
@ -39,7 +39,6 @@
|
||||||
|
|
||||||
<!-- These 2 versions are defined here becuase they are used -->
|
<!-- These 2 versions are defined here becuase they are used -->
|
||||||
<!-- JDIFF generation from embedded ant in the antrun plugin -->
|
<!-- JDIFF generation from embedded ant in the antrun plugin -->
|
||||||
<hadoop.annotations.version>${project.version}</hadoop.annotations.version>
|
|
||||||
<jdiff.version>1.0.9</jdiff.version>
|
<jdiff.version>1.0.9</jdiff.version>
|
||||||
|
|
||||||
<hadoop.assemblies.version>${project.version}</hadoop.assemblies.version>
|
<hadoop.assemblies.version>${project.version}</hadoop.assemblies.version>
|
||||||
|
@ -65,7 +64,7 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-annotations</artifactId>
|
<artifactId>hadoop-annotations</artifactId>
|
||||||
<version>${hadoop.annotations.version}</version>
|
<version>${project.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
@ -99,6 +98,17 @@
|
||||||
<artifactId>hadoop-mapreduce-client-app</artifactId>
|
<artifactId>hadoop-mapreduce-client-app</artifactId>
|
||||||
<version>${project.version}</version>
|
<version>${project.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-mapreduce-client-app</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
<type>test-jar</type>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-mapreduce-client-common</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-yarn-api</artifactId>
|
<artifactId>hadoop-yarn-api</artifactId>
|
||||||
|
@ -117,6 +127,37 @@
|
||||||
<version>${project.version}</version>
|
<version>${project.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-mapreduce-client-shuffle</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-yarn</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
<type>pom</type>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-yarn-server</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-yarn-server-web-proxy</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-yarn-server-common</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-yarn-server-tests</artifactId>
|
<artifactId>hadoop-yarn-server-tests</artifactId>
|
||||||
|
@ -124,6 +165,35 @@
|
||||||
<type>test-jar</type>
|
<type>test-jar</type>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-yarn-common</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-yarn-common</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
<type>test-jar</type>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-yarn-server-nodemanager</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-yarn-server-resourcemanager</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-yarn-server-resourcemanager</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
<type>test-jar</type>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
|
<artifactId>hadoop-mapreduce-client-jobclient</artifactId>
|
||||||
|
@ -143,6 +213,17 @@
|
||||||
<version>${project.version}</version>
|
<version>${project.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-streaming</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-archives</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.google.guava</groupId>
|
<groupId>com.google.guava</groupId>
|
||||||
<artifactId>guava</artifactId>
|
<artifactId>guava</artifactId>
|
||||||
|
@ -214,6 +295,12 @@
|
||||||
<groupId>com.sun.jersey</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
<artifactId>jersey-json</artifactId>
|
<artifactId>jersey-json</artifactId>
|
||||||
<version>1.8</version>
|
<version>1.8</version>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>javax.xml.stream</groupId>
|
||||||
|
<artifactId>stax-api</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.sun.jersey</groupId>
|
<groupId>com.sun.jersey</groupId>
|
||||||
|
@ -221,6 +308,48 @@
|
||||||
<version>1.8</version>
|
<version>1.8</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.google.inject</groupId>
|
||||||
|
<artifactId>guice</artifactId>
|
||||||
|
<version>3.0</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.google.inject.extensions</groupId>
|
||||||
|
<artifactId>guice-servlet</artifactId>
|
||||||
|
<version>3.0</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.sun.jersey.contribs</groupId>
|
||||||
|
<artifactId>jersey-guice</artifactId>
|
||||||
|
<version>1.8</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.sun.jersey.jersey-test-framework</groupId>
|
||||||
|
<artifactId>jersey-test-framework-core</artifactId>
|
||||||
|
<version>1.8</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.sun.jersey.jersey-test-framework</groupId>
|
||||||
|
<artifactId>jersey-test-framework-grizzly2</artifactId>
|
||||||
|
<version>1.8</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.jboss.netty</groupId>
|
||||||
|
<artifactId>netty</artifactId>
|
||||||
|
<version>3.2.3.Final</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>commons-io</groupId>
|
||||||
|
<artifactId>commons-io</artifactId>
|
||||||
|
<version>2.1</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.mortbay.jetty</groupId>
|
<groupId>org.mortbay.jetty</groupId>
|
||||||
<artifactId>jetty-servlet-tester</artifactId>
|
<artifactId>jetty-servlet-tester</artifactId>
|
||||||
|
@ -335,7 +464,7 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>4.8.1</version>
|
<version>4.8.2</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>commons-lang</groupId>
|
<groupId>commons-lang</groupId>
|
||||||
|
@ -360,12 +489,12 @@
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-api</artifactId>
|
<artifactId>slf4j-api</artifactId>
|
||||||
<version>1.5.11</version>
|
<version>1.6.1</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
<artifactId>slf4j-log4j12</artifactId>
|
<artifactId>slf4j-log4j12</artifactId>
|
||||||
<version>1.5.11</version>
|
<version>1.6.1</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.eclipse.jdt</groupId>
|
<groupId>org.eclipse.jdt</groupId>
|
||||||
|
@ -437,16 +566,58 @@
|
||||||
<artifactId>json-simple</artifactId>
|
<artifactId>json-simple</artifactId>
|
||||||
<version>1.1</version>
|
<version>1.1</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.cenqua.clover</groupId>
|
||||||
|
<artifactId>clover</artifactId>
|
||||||
|
<version>3.0.2</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.zookeeper</groupId>
|
||||||
|
<artifactId>zookeeper</artifactId>
|
||||||
|
<version>3.4.2</version>
|
||||||
|
<exclusions>
|
||||||
|
<exclusion>
|
||||||
|
<!-- otherwise seems to drag in junit 3.8.1 via jline -->
|
||||||
|
<groupId>junit</groupId>
|
||||||
|
<artifactId>junit</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.sun.jdmk</groupId>
|
||||||
|
<artifactId>jmxtools</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.sun.jmx</groupId>
|
||||||
|
<artifactId>jmxri</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.bookkeeper</groupId>
|
||||||
|
<artifactId>bookkeeper-server</artifactId>
|
||||||
|
<version>4.0.0</version>
|
||||||
|
<scope>compile</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
</dependencyManagement>
|
</dependencyManagement>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
<pluginManagement>
|
<pluginManagement>
|
||||||
<plugins>
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<artifactId>maven-clean-plugin</artifactId>
|
||||||
|
<version>2.4.1</version>
|
||||||
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-compiler-plugin</artifactId>
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
<version>2.3.2</version>
|
<version>2.3.2</version>
|
||||||
|
<configuration>
|
||||||
|
<source>1.6</source>
|
||||||
|
<target>1.6</target>
|
||||||
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
@ -463,6 +634,11 @@
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<version>2.10</version>
|
<version>2.10</version>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-install-plugin</artifactId>
|
||||||
|
<version>2.3.1</version>
|
||||||
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-jar-plugin</artifactId>
|
<artifactId>maven-jar-plugin</artifactId>
|
||||||
|
@ -471,18 +647,13 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-assembly-plugin</artifactId>
|
<artifactId>maven-assembly-plugin</artifactId>
|
||||||
<version>2.2-beta-3</version>
|
<version>2.2.1</version>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-javadoc-plugin</artifactId>
|
<artifactId>maven-javadoc-plugin</artifactId>
|
||||||
<version>2.7</version>
|
<version>2.7</version>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-antrun-plugin</artifactId>
|
|
||||||
<version>1.6</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-war-plugin</artifactId>
|
<artifactId>maven-war-plugin</artifactId>
|
||||||
|
@ -528,11 +699,6 @@
|
||||||
<artifactId>jspc-maven-plugin</artifactId>
|
<artifactId>jspc-maven-plugin</artifactId>
|
||||||
<version>2.0-alpha-3</version>
|
<version>2.0-alpha-3</version>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-site-plugin</artifactId>
|
|
||||||
<version>3.0</version>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-project-info-reports-plugin</artifactId>
|
<artifactId>maven-project-info-reports-plugin</artifactId>
|
||||||
|
@ -566,14 +732,6 @@
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-compiler-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<source>1.6</source>
|
|
||||||
<target>1.6</target>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
|
|
|
@ -0,0 +1,68 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!--
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License. See accompanying LICENSE file.
|
||||||
|
-->
|
||||||
|
<project>
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<parent>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-project-dist</artifactId>
|
||||||
|
<version>0.24.0-SNAPSHOT</version>
|
||||||
|
<relativePath>../../hadoop-project-dist</relativePath>
|
||||||
|
</parent>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-tools-dist</artifactId>
|
||||||
|
<version>0.24.0-SNAPSHOT</version>
|
||||||
|
<description>Apache Hadoop Tools Dist</description>
|
||||||
|
<name>Apache Hadoop Tools Dist</name>
|
||||||
|
<packaging>jar</packaging>
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<hadoop.component>tools</hadoop.component>
|
||||||
|
<is.hadoop.component>false</is.hadoop.component>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-streaming</artifactId>
|
||||||
|
<scope>compile</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-archives</artifactId>
|
||||||
|
<scope>compile</scope>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<artifactId>maven-deploy-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<skip>true</skip>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.rat</groupId>
|
||||||
|
<artifactId>apache-rat-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<includes>
|
||||||
|
<include>pom.xml</include>
|
||||||
|
</includes>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
|
||||||
|
</project>
|
|
@ -30,6 +30,7 @@
|
||||||
<modules>
|
<modules>
|
||||||
<module>hadoop-streaming</module>
|
<module>hadoop-streaming</module>
|
||||||
<module>hadoop-archives</module>
|
<module>hadoop-archives</module>
|
||||||
|
<module>hadoop-tools-dist</module>
|
||||||
</modules>
|
</modules>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
|
|
8
pom.xml
8
pom.xml
|
@ -108,9 +108,6 @@
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
<artifactId>maven-site-plugin</artifactId>
|
<artifactId>maven-site-plugin</artifactId>
|
||||||
<version>3.0</version>
|
<version>3.0</version>
|
||||||
<configuration>
|
|
||||||
<generateReports>true</generateReports>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
</plugin>
|
||||||
</plugins>
|
</plugins>
|
||||||
</pluginManagement>
|
</pluginManagement>
|
||||||
|
@ -164,7 +161,7 @@
|
||||||
</includes>
|
</includes>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<artifactId>maven-site-plugin</artifactId>
|
<artifactId>maven-site-plugin</artifactId>
|
||||||
<version>3.0</version>
|
<version>3.0</version>
|
||||||
<executions>
|
<executions>
|
||||||
|
@ -173,6 +170,9 @@
|
||||||
<goals>
|
<goals>
|
||||||
<goal>attach-descriptor</goal>
|
<goal>attach-descriptor</goal>
|
||||||
</goals>
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<generateReports>true</generateReports>
|
||||||
|
</configuration>
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
Loading…
Reference in New Issue