diff --git a/hadoop-assemblies/pom.xml b/hadoop-assemblies/pom.xml
index d69870b2ddf..c33fbce5b6e 100644
--- a/hadoop-assemblies/pom.xml
+++ b/hadoop-assemblies/pom.xml
@@ -34,26 +34,6 @@
-
-
-
- org.apache.maven.plugins
- maven-enforcer-plugin
- 1.0
-
-
- org.apache.maven.plugins
- maven-assembly-plugin
- 2.2-beta-3
-
-
- org.apache.rat
- apache-rat-plugin
- 0.7
-
-
-
-
org.apache.maven.plugins
diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
index 788ba4a8f9c..4762861e639 100644
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
@@ -94,6 +94,9 @@
${project.artifactId}-${project.version}-sources.jar
${project.artifactId}-${project.version}-test-sources.jar
+
+ hadoop-tools-dist-*.jar
+
${basedir}/dev-support/jdiff
diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-httpfs-dist.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-httpfs-dist.xml
index 79bad491229..6468a8ae65c 100644
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-httpfs-dist.xml
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-httpfs-dist.xml
@@ -27,14 +27,6 @@
*
-
-
- ${basedir}
- /
-
- *.txt
-
-
${basedir}/src/main/sbin
/sbin
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 4764b9db94d..443176aeb46 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -46,16 +46,9 @@ Trunk (unreleased changes)
if the override value is same as the final parameter value.
(Ravi Prakash via suresh)
- HADOOP-7737. normalize hadoop-mapreduce & hadoop-dist dist/tar build with
- common/hdfs. (tucu)
-
- HADOOP-7743. Add Maven profile to create a full source tarball. (tucu)
-
HADOOP-7729. Send back valid HTTP response if user hits IPC port with
HTTP GET. (todd)
- HADOOP-7758. Make GlobFilter class public. (tucu)
-
HADOOP-7728. Enable task memory management to be configurable in hadoop
config setup script. (ramya)
@@ -67,9 +60,7 @@ Trunk (unreleased changes)
HADOOP-7688. Add servlet handler check in HttpServer.start().
(Uma Maheswara Rao G via szetszwo)
- HADOOP-7590. Mavenize streaming and MR examples. (tucu)
-
- HADOOP-7862. Move the support for multiple protocols to lower layer so
+ HADOOP-7862. Move the support for multiple protocols to lower layer so
that Writable, PB and Avro can all use it (Sanjay)
HADOOP-7876. Provided access to encoded key in DelegationKey for
@@ -91,7 +82,11 @@ Trunk (unreleased changes)
HADOOP-7808. Port HADOOP-7510 - Add configurable option to use original
hostname in token instead of IP to allow server IP change.
(Daryn Sharp via suresh)
-
+
+ HADOOP-7957. Classes deriving GetGroupsBase should be able to override
+ proxy creation. (jitendra)
+
+ HADOOP-4515. Configuration#getBoolean must not be case sensitive. (Sho Shimauchi via harsh)
BUGS
@@ -132,29 +127,14 @@ Trunk (unreleased changes)
HADOOP-7833. Fix findbugs warnings in protobuf generated code.
(John Lee via suresh)
- HADOOP-7853. multiple javax security configurations cause conflicts.
- (daryn via tucu)
-
- HDFS-2614. hadoop dist tarball is missing hdfs headers. (tucu)
-
- HADOOP-7874. native libs should be under lib/native/ dir. (tucu)
-
- HADOOP-7887. KerberosAuthenticatorHandler is not setting
- KerberosName name rules from configuration. (tucu)
-
HADOOP-7888. TestFailoverProxy fails intermittently on trunk. (Jason Lowe
via atm)
HADOOP-7897. ProtobufRpcEngine client side exception mechanism is not
consistent with WritableRpcEngine. (suresh)
- HADOOP-7902. skipping name rules setting (if already set) should be done
- on UGI initialization only. (tucu)
-
HADOOP-7913 Fix bug in ProtoBufRpcEngine (sanjay)
- HADOOP-7810. move hadoop archive to core from tools. (tucu)
-
HADOOP-7892. IPC logs too verbose after "RpcKind" introduction (todd)
HADOOP-7931. o.a.h.ipc.WritableRpcEngine should have a way to force
@@ -164,8 +144,6 @@ Trunk (unreleased changes)
HADOOP-7761. Improve the performance of raw comparisons. (todd)
- HADOOP_7917. compilation of protobuf files fails in windows/cygwin. (tucu)
-
Release 0.23.1 - Unreleased
INCOMPATIBLE CHANGES
@@ -213,6 +191,17 @@ Release 0.23.1 - Unreleased
HADOOP-7933. Add a getDelegationTokens api to FileSystem which checks
for known tokens in the passed Credentials object. (sseth)
+ HADOOP-7737. normalize hadoop-mapreduce & hadoop-dist dist/tar build with
+ common/hdfs. (tucu)
+
+ HADOOP-7743. Add Maven profile to create a full source tarball. (tucu)
+
+ HADOOP-7758. Make GlobFilter class public. (tucu)
+
+ HADOOP-7590. Mavenize streaming and MR examples. (tucu)
+
+ HADOOP-7934. Normalize dependencies versions across all modules. (tucu)
+
OPTIMIZATIONS
BUG FIXES
@@ -252,6 +241,27 @@ Release 0.23.1 - Unreleased
HADOOP-7949. Updated maxIdleTime default in the code to match
core-default.xml (eli)
+ HADOOP-7853. multiple javax security configurations cause conflicts.
+ (daryn via tucu)
+
+ HDFS-2614. hadoop dist tarball is missing hdfs headers. (tucu)
+
+ HADOOP-7874. native libs should be under lib/native/ dir. (tucu)
+
+ HADOOP-7887. KerberosAuthenticatorHandler is not setting
+ KerberosName name rules from configuration. (tucu)
+
+ HADOOP-7902. skipping name rules setting (if already set) should be done
+ on UGI initialization only. (tucu)
+
+ HADOOP-7810. move hadoop archive to core from tools. (tucu)
+
+ HADOOP_7917. compilation of protobuf files fails in windows/cygwin. (tucu)
+
+ HADOOP-7907. hadoop-tools JARs are not part of the distro. (tucu)
+
+ HADOOP-7936. There's a Hoop README in the root dir of the tarball. (tucu)
+
Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES
@@ -973,6 +983,9 @@ Release 0.22.1 - Unreleased
BUG FIXES
+ HADOOP-7937. Forward port SequenceFile#syncFs and friends from Hadoop 1.x.
+ (tomwhite)
+
Release 0.22.0 - 2011-11-29
INCOMPATIBLE CHANGES
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 41514a3f36c..79e0793253c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -826,6 +826,12 @@ public class Configuration implements Iterable>,
*/
public boolean getBoolean(String name, boolean defaultValue) {
String valueString = getTrimmed(name);
+ if (null == valueString || "".equals(valueString)) {
+ return defaultValue;
+ }
+
+ valueString = valueString.toLowerCase();
+
if ("true".equals(valueString))
return true;
else if ("false".equals(valueString))
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
index ce3030e660e..a64bd1bf9e2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
@@ -1193,6 +1193,13 @@ public class SequenceFile {
}
}
+ /** flush all currently written data to the file system */
+ public void syncFs() throws IOException {
+ if (out != null) {
+ out.sync(); // flush contents to file system
+ }
+ }
+
/** Returns the configuration of this file. */
Configuration getConf() { return conf; }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java
index 4d627cbb5f8..da7830de6b5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/GetGroupsBase.java
@@ -94,7 +94,7 @@ public abstract class GetGroupsBase extends Configured implements Tool {
* @return A {@link GetUserMappingsProtocol} client proxy.
* @throws IOException
*/
- private GetUserMappingsProtocol getUgmProtocol() throws IOException {
+ protected GetUserMappingsProtocol getUgmProtocol() throws IOException {
GetUserMappingsProtocol userGroupMappingProtocol =
RPC.getProxy(GetUserMappingsProtocol.class,
GetUserMappingsProtocol.versionID,
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
index 24d3a691f31..bbe82914b21 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java
@@ -451,6 +451,9 @@ public class TestConfiguration extends TestCase {
appendProperty("test.bool3", " true ");
appendProperty("test.bool4", " false ");
appendProperty("test.bool5", "foo");
+ appendProperty("test.bool6", "TRUE");
+ appendProperty("test.bool7", "FALSE");
+ appendProperty("test.bool8", "");
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
@@ -459,6 +462,9 @@ public class TestConfiguration extends TestCase {
assertEquals(true, conf.getBoolean("test.bool3", false));
assertEquals(false, conf.getBoolean("test.bool4", true));
assertEquals(true, conf.getBoolean("test.bool5", true));
+ assertEquals(true, conf.getBoolean("test.bool6", false));
+ assertEquals(false, conf.getBoolean("test.bool7", true));
+ assertEquals(false, conf.getBoolean("test.bool8", false));
}
public void testFloatValues() throws IOException {
diff --git a/hadoop-dist/pom.xml b/hadoop-dist/pom.xml
index 888d5333270..de47700c993 100644
--- a/hadoop-dist/pom.xml
+++ b/hadoop-dist/pom.xml
@@ -118,6 +118,7 @@
run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* .
run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .
+ run cp -r $ROOT/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version}/* .
echo
echo "Hadoop dist layout available at: ${project.build.directory}/hadoop-${project.version}"
echo
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
index 956540dcf5d..28abf116246 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/pom.xml
@@ -270,11 +270,11 @@
-
-
- org.apache.maven.plugins
- maven-eclipse-plugin
- 2.6
+
+
+ org.apache.maven.plugins
+ maven-eclipse-plugin
+ 2.6
org.apache.maven.plugins
diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 0f274bd1652..f45b7fa1cc9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -27,9 +27,6 @@ Trunk (unreleased changes)
HDFS-2430. The number of failed or low-resource volumes the NN can tolerate
should be configurable. (atm)
- HDFS-2178. Contributing Hoop to HDFS, replacement for HDFS proxy with
- read/write capabilities. (tucu)
-
HDFS-2642. Protobuf translators for DatanodeProtocol. (jitendra)
HDFS-2647. Used protobuf based RPC for InterDatanodeProtocol,
@@ -45,6 +42,9 @@ Trunk (unreleased changes)
HDFS-2661. Enable protobuf RPC for DatanodeProtocol. (jitendra)
+ HDFS-2697. Move RefreshAuthPolicy, RefreshUserMappings, GetUserMappings
+ protocol to protocol buffers. (jitendra)
+
IMPROVEMENTS
HADOOP-7524 Change RPC to allow multiple protocols including multuple
@@ -76,11 +76,6 @@ Trunk (unreleased changes)
HDFS-2181 Separate HDFS Client wire protocol data types (sanjay)
- HDFS-2294. Download of commons-daemon TAR should not be under target (tucu)
-
- HDFS-2322. the build fails in Windows because commons-daemon TAR cannot be
- fetched. (tucu)
-
HDFS-2489. Move Finalize and Register to separate file out of
DatanodeCommand.java. (suresh)
@@ -109,8 +104,6 @@ Trunk (unreleased changes)
HDFS-2597 ClientNameNodeProtocol in Protocol Buffers (sanjay)
- HDFS-2511. Add dev script to generate HDFS protobufs. (tucu)
-
HDFS-2651 ClientNameNodeProtocol Translators for Protocol Buffers (sanjay)
HDFS-2650. Replace @inheritDoc with @Override. (Hari Mankude via suresh).
@@ -172,9 +165,6 @@ Trunk (unreleased changes)
HDFS-2532. TestDfsOverAvroRpc timing out in trunk (Uma Maheswara Rao G
via todd)
- HDFS-2606. webhdfs client filesystem impl must set the content-type
- header for create/append. (tucu)
-
HDFS-1765. Block Replication should respect under-replication
block priority. (Uma Maheswara Rao G via eli)
@@ -186,19 +176,6 @@ Trunk (unreleased changes)
HDFS-2700. Fix failing TestDataNodeMultipleRegistrations in trunk
(Uma Maheswara Rao G via todd)
- HDFS-2658. HttpFS introduced 70 javadoc warnings. (tucu)
-
- HDFS-2646. Hadoop HttpFS introduced 4 findbug warnings. (tucu)
-
- HDFS-2657. TestHttpFSServer and TestServerWebApp are failing on trunk.
- (tucu)
-
- HttpFS server should check that upload requests have correct
- content-type. (tucu)
-
- HDFS-2707. HttpFS should read the hadoop-auth secret from a file
- instead inline from the configuration. (tucu)
-
Release 0.23.1 - UNRELEASED
INCOMPATIBLE CHANGES
@@ -214,6 +191,9 @@ Release 0.23.1 - UNRELEASED
HDFS-2545. Change WebHDFS to support multiple namenodes in federation.
(szetszwo)
+ HDFS-2178. Contributing Hoop to HDFS, replacement for HDFS proxy with
+ read/write capabilities. (tucu)
+
IMPROVEMENTS
HDFS-2560. Refactor BPOfferService to be a static inner class (todd)
@@ -265,6 +245,13 @@ Release 0.23.1 - UNRELEASED
HDFS-2710. Add HDFS tests related to HADOOP-7933. (sid via suresh)
+ HDFS-2294. Download of commons-daemon TAR should not be under target (tucu)
+
+ HDFS-2322. the build fails in Windows because commons-daemon TAR cannot be
+ fetched. (tucu)
+
+ HDFS-2511. Add dev script to generate HDFS protobufs. (tucu)
+
OPTIMIZATIONS
HDFS-2130. Switch default checksum to CRC32C. (todd)
@@ -312,6 +299,22 @@ Release 0.23.1 - UNRELEASED
HDFS-2706. Use configuration for blockInvalidateLimit if it is set.
(szetszwo)
+ HDFS-2606. webhdfs client filesystem impl must set the content-type
+ header for create/append. (tucu)
+
+ HDFS-2658. HttpFS introduced 70 javadoc warnings. (tucu)
+
+ HDFS-2646. Hadoop HttpFS introduced 4 findbug warnings. (tucu)
+
+ HDFS-2657. TestHttpFSServer and TestServerWebApp are failing on trunk.
+ (tucu)
+
+ HttpFS server should check that upload requests have correct
+ content-type. (tucu)
+
+ HDFS-2707. HttpFS should read the hadoop-auth secret from a file
+ instead inline from the configuration. (tucu)
+
Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index a6fbee13c60..f0971d6dc53 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -322,6 +322,49 @@
+
+ xprepare-package-hadoop-daemon
+ prepare-package
+
+ run
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
@@ -360,55 +403,6 @@
-
- org.apache.maven.plugins
- maven-antrun-plugin
-
-
- xprepare-package-hadoop-daemon
- prepare-package
-
- run
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
index f908347f0a1..1fba8466ed2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
@@ -41,27 +41,28 @@
org.apache.hadoop
hadoop-common
- 0.24.0-SNAPSHOT
provided
org.apache.hadoop
hadoop-hdfs
- 0.24.0-SNAPSHOT
provided
org.apache.hadoop
hadoop-hdfs
- 0.24.0-SNAPSHOT
test-jar
test
org.apache.bookkeeper
bookkeeper-server
- 4.0.0
compile
+
+ junit
+ junit
+ test
+
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java
new file mode 100644
index 00000000000..d34d2ffeb8d
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java
@@ -0,0 +1,89 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.protocolPB;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto;
+import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.ipc.ProtobufHelper;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.ProtocolSignature;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.tools.GetUserMappingsProtocol;
+
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
+public class GetUserMappingsProtocolClientSideTranslatorPB implements
+ GetUserMappingsProtocol, Closeable {
+
+ /** RpcController is not used and hence is set to null */
+ private final static RpcController NULL_CONTROLLER = null;
+ private final GetUserMappingsProtocolPB rpcProxy;
+
+ public GetUserMappingsProtocolClientSideTranslatorPB(
+ InetSocketAddress nameNodeAddr, UserGroupInformation ugi,
+ Configuration conf) throws IOException {
+ RPC.setProtocolEngine(conf, GetUserMappingsProtocolPB.class,
+ ProtobufRpcEngine.class);
+ rpcProxy = RPC.getProxy(GetUserMappingsProtocolPB.class,
+ RPC.getProtocolVersion(GetUserMappingsProtocolPB.class),
+ NameNode.getAddress(conf), ugi, conf,
+ NetUtils.getSocketFactory(conf, GetUserMappingsProtocol.class));
+ }
+
+ @Override
+ public long getProtocolVersion(String protocol, long clientVersion)
+ throws IOException {
+ return rpcProxy.getProtocolVersion(protocol, clientVersion);
+ }
+
+ @Override
+ public ProtocolSignature getProtocolSignature(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException {
+ return ProtocolSignatureWritable.convert(rpcProxy.getProtocolSignature2(
+ protocol, clientVersion, clientMethodsHash));
+ }
+
+ @Override
+ public void close() throws IOException {
+ RPC.stopProxy(rpcProxy);
+ }
+
+ @Override
+ public String[] getGroupsForUser(String user) throws IOException {
+ GetGroupsForUserRequestProto request = GetGroupsForUserRequestProto
+ .newBuilder().setUser(user).build();
+ GetGroupsForUserResponseProto resp;
+ try {
+ resp = rpcProxy.getGroupsForUser(NULL_CONTROLLER, request);
+ } catch (ServiceException se) {
+ throw ProtobufHelper.getRemoteException(se);
+ }
+ return resp.getGroupsList().toArray(new String[resp.getGroupsCount()]);
+ }
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolPB.java
new file mode 100644
index 00000000000..54eeb0a6494
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolPB.java
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.protocolPB;
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService;
+import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
+import org.apache.hadoop.ipc.ProtocolInfo;
+import org.apache.hadoop.ipc.VersionedProtocol;
+
+@ProtocolInfo(
+ protocolName = "org.apache.hadoop.tools.GetUserMappingsProtocol",
+ protocolVersion = 1)
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public interface GetUserMappingsProtocolPB extends
+ GetUserMappingsProtocolService.BlockingInterface, VersionedProtocol {
+
+ /**
+ * This method is defined to get the protocol signature using
+ * the R23 protocol - hence we have added the suffix of 2 the method name
+ * to avoid conflict.
+ */
+ public ProtocolSignatureWritable getProtocolSignature2(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException;
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolServerSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolServerSideTranslatorPB.java
new file mode 100644
index 00000000000..4dc771dc61b
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/GetUserMappingsProtocolServerSideTranslatorPB.java
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.protocolPB;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto;
+import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
+import org.apache.hadoop.ipc.ProtocolSignature;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.tools.GetUserMappingsProtocol;
+
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
+public class GetUserMappingsProtocolServerSideTranslatorPB implements
+ GetUserMappingsProtocolPB {
+
+ private final GetUserMappingsProtocol impl;
+
+ public GetUserMappingsProtocolServerSideTranslatorPB(
+ GetUserMappingsProtocol impl) {
+ this.impl = impl;
+ }
+
+ @Override
+ public long getProtocolVersion(String protocol, long clientVersion)
+ throws IOException {
+ return RPC.getProtocolVersion(GetUserMappingsProtocolPB.class);
+ }
+
+ @Override
+ public ProtocolSignature getProtocolSignature(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException {
+ /**
+ * Don't forward this to the server. The protocol version and signature is
+ * that of {@link GetUserMappingsProtocol}
+ */
+ if (!protocol.equals(RPC
+ .getProtocolName(GetUserMappingsProtocolPB.class))) {
+ throw new IOException("Namenode Serverside implements "
+ + RPC.getProtocolName(GetUserMappingsProtocolPB.class)
+ + ". The following requested protocol is unknown: " + protocol);
+ }
+
+ return ProtocolSignature.getProtocolSignature(clientMethodsHash,
+ RPC.getProtocolVersion(GetUserMappingsProtocolPB.class),
+ GetUserMappingsProtocolPB.class);
+ }
+
+ @Override
+ public ProtocolSignatureWritable getProtocolSignature2(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException {
+ /**
+ * Don't forward this to the server. The protocol version and signature is
+ * that of {@link GetUserMappingsProtocolPB}
+ */
+ return ProtocolSignatureWritable.convert(this.getProtocolSignature(
+ protocol, clientVersion, clientMethodsHash));
+ }
+
+ @Override
+ public GetGroupsForUserResponseProto getGroupsForUser(
+ RpcController controller, GetGroupsForUserRequestProto request)
+ throws ServiceException {
+ String[] groups;
+ try {
+ groups = impl.getGroupsForUser(request.getUser());
+ } catch (IOException e) {
+ throw new ServiceException(e);
+ }
+ GetGroupsForUserResponseProto.Builder builder = GetGroupsForUserResponseProto
+ .newBuilder();
+ for (String g : groups) {
+ builder.addGroups(g);
+ }
+ return builder.build();
+ }
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshAuthorizationPolicyProtocolClientSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshAuthorizationPolicyProtocolClientSideTranslatorPB.java
new file mode 100644
index 00000000000..400d62eba1f
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshAuthorizationPolicyProtocolClientSideTranslatorPB.java
@@ -0,0 +1,86 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.protocolPB;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshServiceAclRequestProto;
+import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.ipc.ProtobufHelper;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.ProtocolSignature;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
+
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
+public class RefreshAuthorizationPolicyProtocolClientSideTranslatorPB implements
+ RefreshAuthorizationPolicyProtocol, Closeable {
+
+ /** RpcController is not used and hence is set to null */
+ private final static RpcController NULL_CONTROLLER = null;
+ private final RefreshAuthorizationPolicyProtocolPB rpcProxy;
+
+ public RefreshAuthorizationPolicyProtocolClientSideTranslatorPB(
+ InetSocketAddress nameNodeAddr, UserGroupInformation ugi,
+ Configuration conf) throws IOException {
+ RPC.setProtocolEngine(conf, RefreshAuthorizationPolicyProtocolPB.class,
+ ProtobufRpcEngine.class);
+ rpcProxy = RPC.getProxy(RefreshAuthorizationPolicyProtocolPB.class,
+ RPC.getProtocolVersion(RefreshAuthorizationPolicyProtocolPB.class),
+ NameNode.getAddress(conf), ugi, conf,
+ NetUtils.getSocketFactory(conf, RefreshAuthorizationPolicyProtocol.class));
+ }
+
+ @Override
+ public long getProtocolVersion(String protocol, long clientVersion)
+ throws IOException {
+ return rpcProxy.getProtocolVersion(protocol, clientVersion);
+ }
+
+ @Override
+ public ProtocolSignature getProtocolSignature(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException {
+ return ProtocolSignatureWritable.convert(rpcProxy.getProtocolSignature2(
+ protocol, clientVersion, clientMethodsHash));
+ }
+
+ @Override
+ public void close() throws IOException {
+ RPC.stopProxy(rpcProxy);
+ }
+
+ @Override
+ public void refreshServiceAcl() throws IOException {
+ RefreshServiceAclRequestProto request = RefreshServiceAclRequestProto
+ .newBuilder().build();
+ try {
+ rpcProxy.refreshServiceAcl(NULL_CONTROLLER, request);
+ } catch (ServiceException se) {
+ throw ProtobufHelper.getRemoteException(se);
+ }
+ }
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshAuthorizationPolicyProtocolPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshAuthorizationPolicyProtocolPB.java
new file mode 100644
index 00000000000..842926c9324
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshAuthorizationPolicyProtocolPB.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.protocolPB;
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshAuthorizationPolicyProtocolService;
+import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
+import org.apache.hadoop.ipc.ProtocolInfo;
+import org.apache.hadoop.ipc.VersionedProtocol;
+import org.apache.hadoop.security.KerberosInfo;
+
+@KerberosInfo(
+ serverPrincipal=CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY)
+@ProtocolInfo(
+ protocolName = "org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol",
+ protocolVersion = 1)
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public interface RefreshAuthorizationPolicyProtocolPB extends
+ RefreshAuthorizationPolicyProtocolService.BlockingInterface, VersionedProtocol {
+
+ /**
+ * This method is defined to get the protocol signature using
+ * the R23 protocol - hence we have added the suffix of 2 the method name
+ * to avoid conflict.
+ */
+ public ProtocolSignatureWritable getProtocolSignature2(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException;
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshAuthorizationPolicyProtocolServerSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshAuthorizationPolicyProtocolServerSideTranslatorPB.java
new file mode 100644
index 00000000000..3ae8c7165dd
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshAuthorizationPolicyProtocolServerSideTranslatorPB.java
@@ -0,0 +1,90 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.protocolPB;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshServiceAclRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshServiceAclResponseProto;
+import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
+import org.apache.hadoop.ipc.ProtocolSignature;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
+
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
+public class RefreshAuthorizationPolicyProtocolServerSideTranslatorPB implements
+ RefreshAuthorizationPolicyProtocolPB {
+
+ private final RefreshAuthorizationPolicyProtocol impl;
+
+ public RefreshAuthorizationPolicyProtocolServerSideTranslatorPB(
+ RefreshAuthorizationPolicyProtocol impl) {
+ this.impl = impl;
+ }
+
+ @Override
+ public long getProtocolVersion(String protocol, long clientVersion)
+ throws IOException {
+ return RPC.getProtocolVersion(RefreshAuthorizationPolicyProtocolPB.class);
+ }
+
+ @Override
+ public ProtocolSignature getProtocolSignature(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException {
+ /**
+ * Don't forward this to the server. The protocol version and signature is
+ * that of {@link RefreshAuthorizationPolicyProtocol}
+ */
+ if (!protocol.equals(RPC
+ .getProtocolName(RefreshAuthorizationPolicyProtocolPB.class))) {
+ throw new IOException("Namenode Serverside implements "
+ + RPC.getProtocolName(RefreshAuthorizationPolicyProtocolPB.class)
+ + ". The following requested protocol is unknown: " + protocol);
+ }
+
+ return ProtocolSignature.getProtocolSignature(clientMethodsHash,
+ RPC.getProtocolVersion(RefreshAuthorizationPolicyProtocolPB.class),
+ RefreshAuthorizationPolicyProtocolPB.class);
+ }
+
+ @Override
+ public ProtocolSignatureWritable getProtocolSignature2(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException {
+ /**
+ * Don't forward this to the server. The protocol version and signature is
+ * that of {@link RefreshAuthorizationPolicyProtocolPB}
+ */
+ return ProtocolSignatureWritable.convert(this.getProtocolSignature(
+ protocol, clientVersion, clientMethodsHash));
+ }
+
+ @Override
+ public RefreshServiceAclResponseProto refreshServiceAcl(
+ RpcController controller, RefreshServiceAclRequestProto request)
+ throws ServiceException {
+ try {
+ impl.refreshServiceAcl();
+ } catch (IOException e) {
+ throw new ServiceException(e);
+ }
+ return RefreshServiceAclResponseProto.newBuilder().build();
+ }
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshUserMappingsProtocolClientSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshUserMappingsProtocolClientSideTranslatorPB.java
new file mode 100644
index 00000000000..65d588326bc
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshUserMappingsProtocolClientSideTranslatorPB.java
@@ -0,0 +1,98 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.protocolPB;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshSuperUserGroupsConfigurationRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserToGroupsMappingsRequestProto;
+import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
+import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.ipc.ProtobufHelper;
+import org.apache.hadoop.ipc.ProtobufRpcEngine;
+import org.apache.hadoop.ipc.ProtocolSignature;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.RefreshUserMappingsProtocol;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
+public class RefreshUserMappingsProtocolClientSideTranslatorPB implements
+ RefreshUserMappingsProtocol, Closeable {
+
+ /** RpcController is not used and hence is set to null */
+ private final static RpcController NULL_CONTROLLER = null;
+ private final RefreshUserMappingsProtocolPB rpcProxy;
+
+ public RefreshUserMappingsProtocolClientSideTranslatorPB(
+ InetSocketAddress nameNodeAddr, UserGroupInformation ugi,
+ Configuration conf) throws IOException {
+ RPC.setProtocolEngine(conf, RefreshUserMappingsProtocolPB.class,
+ ProtobufRpcEngine.class);
+ rpcProxy = RPC.getProxy(RefreshUserMappingsProtocolPB.class,
+ RPC.getProtocolVersion(RefreshUserMappingsProtocolPB.class),
+ NameNode.getAddress(conf), ugi, conf,
+ NetUtils.getSocketFactory(conf, RefreshUserMappingsProtocol.class));
+ }
+
+ @Override
+ public long getProtocolVersion(String protocol, long clientVersion)
+ throws IOException {
+ return rpcProxy.getProtocolVersion(protocol, clientVersion);
+ }
+
+ @Override
+ public ProtocolSignature getProtocolSignature(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException {
+ return ProtocolSignatureWritable.convert(rpcProxy.getProtocolSignature2(
+ protocol, clientVersion, clientMethodsHash));
+ }
+
+ @Override
+ public void close() throws IOException {
+ RPC.stopProxy(rpcProxy);
+ }
+
+ @Override
+ public void refreshUserToGroupsMappings() throws IOException {
+ RefreshUserToGroupsMappingsRequestProto request =
+ RefreshUserToGroupsMappingsRequestProto.newBuilder().build();
+ try {
+ rpcProxy.refreshUserToGroupsMappings(NULL_CONTROLLER, request);
+ } catch (ServiceException se) {
+ throw ProtobufHelper.getRemoteException(se);
+ }
+ }
+
+ @Override
+ public void refreshSuperUserGroupsConfiguration() throws IOException {
+ RefreshSuperUserGroupsConfigurationRequestProto request =
+ RefreshSuperUserGroupsConfigurationRequestProto.newBuilder().build();
+ try {
+ rpcProxy.refreshSuperUserGroupsConfiguration(NULL_CONTROLLER, request);
+ } catch (ServiceException se) {
+ throw ProtobufHelper.getRemoteException(se);
+ }
+ }
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshUserMappingsProtocolPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshUserMappingsProtocolPB.java
new file mode 100644
index 00000000000..16a77ff6862
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshUserMappingsProtocolPB.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.protocolPB;
+
+import java.io.IOException;
+
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserMappingsProtocolService;
+import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
+import org.apache.hadoop.ipc.ProtocolInfo;
+import org.apache.hadoop.ipc.VersionedProtocol;
+import org.apache.hadoop.security.KerberosInfo;
+
+@KerberosInfo(
+ serverPrincipal=CommonConfigurationKeys.HADOOP_SECURITY_SERVICE_USER_NAME_KEY)
+@ProtocolInfo(
+ protocolName = "org.apache.hadoop.security.RefreshUserMappingsProtocol",
+ protocolVersion = 1)
+@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
+@InterfaceStability.Evolving
+public interface RefreshUserMappingsProtocolPB extends
+ RefreshUserMappingsProtocolService.BlockingInterface, VersionedProtocol {
+
+ /**
+ * This method is defined to get the protocol signature using
+ * the R23 protocol - hence we have added the suffix of 2 the method name
+ * to avoid conflict.
+ */
+ public ProtocolSignatureWritable getProtocolSignature2(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException;
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshUserMappingsProtocolServerSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshUserMappingsProtocolServerSideTranslatorPB.java
new file mode 100644
index 00000000000..005c654f2ee
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/RefreshUserMappingsProtocolServerSideTranslatorPB.java
@@ -0,0 +1,105 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.protocolPB;
+
+import java.io.IOException;
+
+import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshSuperUserGroupsConfigurationRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshSuperUserGroupsConfigurationResponseProto;
+import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserToGroupsMappingsRequestProto;
+import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserToGroupsMappingsResponseProto;
+import org.apache.hadoop.hdfs.protocolR23Compatible.ProtocolSignatureWritable;
+import org.apache.hadoop.ipc.ProtocolSignature;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.security.RefreshUserMappingsProtocol;
+
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
+public class RefreshUserMappingsProtocolServerSideTranslatorPB implements RefreshUserMappingsProtocolPB {
+
+ private final RefreshUserMappingsProtocol impl;
+
+ public RefreshUserMappingsProtocolServerSideTranslatorPB(RefreshUserMappingsProtocol impl) {
+ this.impl = impl;
+ }
+
+ @Override
+ public RefreshUserToGroupsMappingsResponseProto
+ refreshUserToGroupsMappings(RpcController controller,
+ RefreshUserToGroupsMappingsRequestProto request)
+ throws ServiceException {
+ try {
+ impl.refreshUserToGroupsMappings();
+ } catch (IOException e) {
+ throw new ServiceException(e);
+ }
+ return RefreshUserToGroupsMappingsResponseProto.newBuilder().build();
+ }
+
+ @Override
+ public RefreshSuperUserGroupsConfigurationResponseProto
+ refreshSuperUserGroupsConfiguration(RpcController controller,
+ RefreshSuperUserGroupsConfigurationRequestProto request)
+ throws ServiceException {
+ try {
+ impl.refreshSuperUserGroupsConfiguration();
+ } catch (IOException e) {
+ throw new ServiceException(e);
+ }
+ return RefreshSuperUserGroupsConfigurationResponseProto.newBuilder()
+ .build();
+ }
+
+ @Override
+ public long getProtocolVersion(String protocol, long clientVersion)
+ throws IOException {
+ return RPC.getProtocolVersion(RefreshUserMappingsProtocolPB.class);
+ }
+
+ @Override
+ public ProtocolSignature getProtocolSignature(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException {
+ /**
+ * Don't forward this to the server. The protocol version and signature is
+ * that of {@link RefreshUserMappingsProtocol}
+ */
+ if (!protocol.equals(RPC
+ .getProtocolName(RefreshUserMappingsProtocolPB.class))) {
+ throw new IOException("Namenode Serverside implements "
+ + RPC.getProtocolName(RefreshUserMappingsProtocolPB.class)
+ + ". The following requested protocol is unknown: " + protocol);
+ }
+
+ return ProtocolSignature.getProtocolSignature(clientMethodsHash,
+ RPC.getProtocolVersion(RefreshUserMappingsProtocolPB.class),
+ RefreshUserMappingsProtocolPB.class);
+ }
+
+ @Override
+ public ProtocolSignatureWritable getProtocolSignature2(String protocol,
+ long clientVersion, int clientMethodsHash) throws IOException {
+ /**
+ * Don't forward this to the server. The protocol version and signature is
+ * that of {@link RefreshUserMappingsProtocolPB}
+ */
+ return ProtocolSignatureWritable.convert(this.getProtocolSignature(
+ protocol, clientVersion, clientMethodsHash));
+ }
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
index b7433ef7b65..8bbcc3f60b8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java
@@ -65,12 +65,21 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants.UpgradeAction;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.ClientNamenodeProtocol;
import org.apache.hadoop.hdfs.protocol.proto.NamenodeProtocolProtos.NamenodeProtocolService;
import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeProtocolService;
+import org.apache.hadoop.hdfs.protocol.proto.GetUserMappingsProtocolProtos.GetUserMappingsProtocolService;
+import org.apache.hadoop.hdfs.protocol.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshAuthorizationPolicyProtocolService;
+import org.apache.hadoop.hdfs.protocol.proto.RefreshUserMappingsProtocolProtos.RefreshUserMappingsProtocolService;
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolServerSideTranslatorPB;
+import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolPB;
+import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.NamenodeProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolPB;
import org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB;
+import org.apache.hadoop.hdfs.protocolPB.RefreshAuthorizationPolicyProtocolPB;
+import org.apache.hadoop.hdfs.protocolPB.RefreshAuthorizationPolicyProtocolServerSideTranslatorPB;
+import org.apache.hadoop.hdfs.protocolPB.RefreshUserMappingsProtocolPB;
+import org.apache.hadoop.hdfs.protocolPB.RefreshUserMappingsProtocolServerSideTranslatorPB;
import org.apache.hadoop.hdfs.security.token.block.ExportedBlockKeys;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.common.IncorrectVersionException;
@@ -99,9 +108,9 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.ProtocolSignature;
import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.ipc.WritableRpcEngine;
-import org.apache.hadoop.ipc.RpcPayloadHeader.RpcKind;
import org.apache.hadoop.net.Node;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.Groups;
@@ -167,7 +176,22 @@ class NameNodeRpcServer implements NamenodeProtocols {
new NamenodeProtocolServerSideTranslatorPB(this);
BlockingService NNPbService = NamenodeProtocolService
.newReflectiveBlockingService(namenodeProtocolXlator);
-
+
+ RefreshAuthorizationPolicyProtocolServerSideTranslatorPB refreshAuthPolicyXlator =
+ new RefreshAuthorizationPolicyProtocolServerSideTranslatorPB(this);
+ BlockingService refreshAuthService = RefreshAuthorizationPolicyProtocolService
+ .newReflectiveBlockingService(refreshAuthPolicyXlator);
+
+ RefreshUserMappingsProtocolServerSideTranslatorPB refreshUserMappingXlator =
+ new RefreshUserMappingsProtocolServerSideTranslatorPB(this);
+ BlockingService refreshUserMappingService = RefreshUserMappingsProtocolService
+ .newReflectiveBlockingService(refreshUserMappingXlator);
+
+ GetUserMappingsProtocolServerSideTranslatorPB getUserMappingXlator =
+ new GetUserMappingsProtocolServerSideTranslatorPB(this);
+ BlockingService getUserMappingService = GetUserMappingsProtocolService
+ .newReflectiveBlockingService(getUserMappingXlator);
+
WritableRpcEngine.ensureInitialized();
InetSocketAddress dnSocketAddr = nn.getServiceRpcServerAddress(conf);
@@ -182,19 +206,19 @@ class NameNodeRpcServer implements NamenodeProtocols {
dnSocketAddr.getHostName(), dnSocketAddr.getPort(),
serviceHandlerCount,
false, conf, namesystem.getDelegationTokenSecretManager());
- this.serviceRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
- RefreshAuthorizationPolicyProtocol.class, this);
- this.serviceRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
- RefreshUserMappingsProtocol.class, this);
- this.serviceRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
- GetUserMappingsProtocol.class, this);
this.serviceRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
HAServiceProtocol.class, this);
DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
serviceRpcServer);
DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
serviceRpcServer);
-
+ DFSUtil.addPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
+ refreshAuthService, serviceRpcServer);
+ DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
+ refreshUserMappingService, serviceRpcServer);
+ DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class,
+ getUserMappingService, serviceRpcServer);
+
this.serviceRPCAddress = this.serviceRpcServer.getListenerAddress();
nn.setRpcServiceServerAddress(conf, serviceRPCAddress);
} else {
@@ -207,18 +231,18 @@ class NameNodeRpcServer implements NamenodeProtocols {
clientNNPbService, socAddr.getHostName(),
socAddr.getPort(), handlerCount, false, conf,
namesystem.getDelegationTokenSecretManager());
- this.clientRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
- RefreshAuthorizationPolicyProtocol.class, this);
- this.clientRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
- RefreshUserMappingsProtocol.class, this);
- this.clientRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
- GetUserMappingsProtocol.class, this);
this.clientRpcServer.addProtocol(RpcKind.RPC_WRITABLE,
HAServiceProtocol.class, this);
DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
clientRpcServer);
DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
clientRpcServer);
+ DFSUtil.addPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
+ refreshAuthService, clientRpcServer);
+ DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
+ refreshUserMappingService, clientRpcServer);
+ DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class,
+ getUserMappingService, clientRpcServer);
// set service-level authorization security policy
if (serviceAuthEnabled =
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
index 2144203965c..2d2e4fc0f76 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java
@@ -43,14 +43,14 @@ import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.UpgradeAction;
+import org.apache.hadoop.hdfs.protocolPB.RefreshAuthorizationPolicyProtocolClientSideTranslatorPB;
+import org.apache.hadoop.hdfs.protocolPB.RefreshUserMappingsProtocolClientSideTranslatorPB;
import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.security.RefreshUserMappingsProtocol;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ToolRunner;
@@ -790,13 +790,9 @@ public class DFSAdmin extends FsShell {
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, ""));
// Create the client
- RefreshAuthorizationPolicyProtocol refreshProtocol =
- (RefreshAuthorizationPolicyProtocol)
- RPC.getProxy(RefreshAuthorizationPolicyProtocol.class,
- RefreshAuthorizationPolicyProtocol.versionID,
- NameNode.getAddress(conf), getUGI(), conf,
- NetUtils.getSocketFactory(conf,
- RefreshAuthorizationPolicyProtocol.class));
+ RefreshAuthorizationPolicyProtocolClientSideTranslatorPB refreshProtocol =
+ new RefreshAuthorizationPolicyProtocolClientSideTranslatorPB(
+ NameNode.getAddress(conf), getUGI(), conf);
// Refresh the authorization policy in-effect
refreshProtocol.refreshServiceAcl();
@@ -820,13 +816,9 @@ public class DFSAdmin extends FsShell {
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, ""));
// Create the client
- RefreshUserMappingsProtocol refreshProtocol =
- (RefreshUserMappingsProtocol)
- RPC.getProxy(RefreshUserMappingsProtocol.class,
- RefreshUserMappingsProtocol.versionID,
- NameNode.getAddress(conf), getUGI(), conf,
- NetUtils.getSocketFactory(conf,
- RefreshUserMappingsProtocol.class));
+ RefreshUserMappingsProtocolClientSideTranslatorPB refreshProtocol =
+ new RefreshUserMappingsProtocolClientSideTranslatorPB(
+ NameNode.getAddress(conf), getUGI(), conf);
// Refresh the user-to-groups mappings
refreshProtocol.refreshUserToGroupsMappings();
@@ -851,13 +843,9 @@ public class DFSAdmin extends FsShell {
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, ""));
// Create the client
- RefreshUserMappingsProtocol refreshProtocol =
- (RefreshUserMappingsProtocol)
- RPC.getProxy(RefreshUserMappingsProtocol.class,
- RefreshUserMappingsProtocol.versionID,
- NameNode.getAddress(conf), getUGI(), conf,
- NetUtils.getSocketFactory(conf,
- RefreshUserMappingsProtocol.class));
+ RefreshUserMappingsProtocolClientSideTranslatorPB refreshProtocol =
+ new RefreshUserMappingsProtocolClientSideTranslatorPB(
+ NameNode.getAddress(conf), getUGI(), conf);
// Refresh the user-to-groups mappings
refreshProtocol.refreshSuperUserGroupsConfiguration();
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetGroups.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetGroups.java
index 1f2b98fd4ac..5ad227d9e02 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetGroups.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetGroups.java
@@ -23,8 +23,11 @@ import java.net.InetSocketAddress;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.protocolPB.GetUserMappingsProtocolClientSideTranslatorPB;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.tools.GetGroupsBase;
+import org.apache.hadoop.tools.GetUserMappingsProtocol;
import org.apache.hadoop.util.ToolRunner;
/**
@@ -51,6 +54,13 @@ public class GetGroups extends GetGroupsBase {
throws IOException {
return NameNode.getAddress(conf);
}
+
+ @Override
+ protected GetUserMappingsProtocol getUgmProtocol() throws IOException {
+ return new GetUserMappingsProtocolClientSideTranslatorPB(
+ NameNode.getAddress(getConf()), UserGroupInformation.getCurrentUser(),
+ getConf());
+ }
public static void main(String[] argv) throws Exception {
int res = ToolRunner.run(new GetGroups(new HdfsConfiguration()), argv);
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/GetUserMappingsProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/GetUserMappingsProtocol.proto
new file mode 100644
index 00000000000..d3e2321f5a8
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/GetUserMappingsProtocol.proto
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+option java_package = "org.apache.hadoop.hdfs.protocol.proto";
+option java_outer_classname = "GetUserMappingsProtocolProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+
+/**
+ * Get groups for user request.
+ */
+message GetGroupsForUserRequestProto {
+ required string user = 1;
+}
+
+/**
+ * Response for get groups.
+ */
+message GetGroupsForUserResponseProto {
+ repeated string groups = 1;
+}
+
+
+/**
+ * Protocol implemented by the Name Node and Job Tracker which maps users to
+ * groups.
+ */
+service GetUserMappingsProtocolService {
+ /**
+ * Get the groups which are mapped to the given user.
+ */
+ rpc getGroupsForUser(GetGroupsForUserRequestProto)
+ returns(GetGroupsForUserResponseProto);
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/RefreshAuthorizationPolicyProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/RefreshAuthorizationPolicyProtocol.proto
new file mode 100644
index 00000000000..45bfaf8576c
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/RefreshAuthorizationPolicyProtocol.proto
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+option java_package = "org.apache.hadoop.hdfs.protocol.proto";
+option java_outer_classname = "RefreshAuthorizationPolicyProtocolProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+
+/**
+ * Refresh service acl request.
+ */
+message RefreshServiceAclRequestProto {
+}
+
+/**
+ * void response
+ */
+message RefreshServiceAclResponseProto {
+}
+
+/**
+ * Protocol which is used to refresh the authorization policy in use currently.
+ */
+service RefreshAuthorizationPolicyProtocolService {
+ /**
+ * Refresh the service-level authorization policy in-effect.
+ */
+ rpc refreshServiceAcl(RefreshServiceAclRequestProto)
+ returns(RefreshServiceAclResponseProto);
+}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/RefreshUserMappingsProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/RefreshUserMappingsProtocol.proto
new file mode 100644
index 00000000000..4e45d523b52
--- /dev/null
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/RefreshUserMappingsProtocol.proto
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+option java_package = "org.apache.hadoop.hdfs.protocol.proto";
+option java_outer_classname = "RefreshUserMappingsProtocolProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+
+/**
+ * Refresh user to group mappings request.
+ */
+message RefreshUserToGroupsMappingsRequestProto {
+}
+
+/**
+ * void response
+ */
+message RefreshUserToGroupsMappingsResponseProto {
+}
+
+/**
+ * Refresh superuser configuration request.
+ */
+message RefreshSuperUserGroupsConfigurationRequestProto {
+}
+
+/**
+ * void response
+ */
+message RefreshSuperUserGroupsConfigurationResponseProto {
+}
+
+/**
+ * Protocol to refresh the user mappings.
+ */
+service RefreshUserMappingsProtocolService {
+ /**
+ * Refresh user to group mappings.
+ */
+ rpc refreshUserToGroupsMappings(RefreshUserToGroupsMappingsRequestProto)
+ returns(RefreshUserToGroupsMappingsResponseProto);
+
+ /**
+ * Refresh superuser proxy group list.
+ */
+ rpc refreshSuperUserGroupsConfiguration(RefreshSuperUserGroupsConfigurationRequestProto)
+ returns(RefreshSuperUserGroupsConfigurationResponseProto);
+}
diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt
index ea291382f6d..2a77b38fb25 100644
--- a/hadoop-mapreduce-project/CHANGES.txt
+++ b/hadoop-mapreduce-project/CHANGES.txt
@@ -73,28 +73,6 @@ Trunk (unreleased changes)
findBugs, correct links to findBugs artifacts and no links to the
artifacts when there are no warnings. (Tom White via vinodkv).
- MAPREDUCE-3183. hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml
- missing license header. (Hitesh Shah via tucu).
-
- MAPREDUCE-3003. Publish MR JARs to Maven snapshot repository. (tucu)
-
- MAPREDUCE-3204. mvn site:site fails on MapReduce. (tucu)
-
- MAPREDUCE-3014. Rename and invert logic of '-cbuild' profile to 'native' and off
- by default. (tucu)
-
- MAPREDUCE-3477. Hadoop site documentation cannot be built anymore. (jeagles via tucu)
-
- MAPREDUCE-3500. MRJobConfig creates an LD_LIBRARY_PATH using the platform ARCH. (tucu)
-
- MAPREDUCE-3389. MRApps loads the 'mrapp-generated-classpath' file with
- classpath from the build machine. (tucu)
-
- MAPREDUCE-3544. gridmix build is broken, requires hadoop-archives to be added as
- ivy dependency. (tucu)
-
- MAPREDUCE-3557. MR1 test fail to compile because of missing hadoop-archives dependency.
- (tucu)
Release 0.23.1 - Unreleased
@@ -414,6 +392,39 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3615. Fix some ant test failures. (Thomas Graves via sseth)
+ MAPREDUCE-3326. Added detailed information about queue's to the
+ CapacityScheduler web-ui. (Jason Lowe via acmurthy)
+
+ MAPREDUCE-3548. Added more unit tests for MR AM & JHS web-services.
+ (Thomas Graves via acmurthy)
+
+ MAPREDUCE-3617. Removed wrong default value for
+ yarn.resourcemanager.principal and yarn.nodemanager.principal. (Jonathan
+ Eagles via acmurthy)
+
+ MAPREDUCE-3183. hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml
+ missing license header. (Hitesh Shah via tucu).
+
+ MAPREDUCE-3003. Publish MR JARs to Maven snapshot repository. (tucu)
+
+ MAPREDUCE-3204. mvn site:site fails on MapReduce. (tucu)
+
+ MAPREDUCE-3014. Rename and invert logic of '-cbuild' profile to 'native' and off
+ by default. (tucu)
+
+ MAPREDUCE-3477. Hadoop site documentation cannot be built anymore. (jeagles via tucu)
+
+ MAPREDUCE-3500. MRJobConfig creates an LD_LIBRARY_PATH using the platform ARCH. (tucu)
+
+ MAPREDUCE-3389. MRApps loads the 'mrapp-generated-classpath' file with
+ classpath from the build machine. (tucu)
+
+ MAPREDUCE-3544. gridmix build is broken, requires hadoop-archives to be added as
+ ivy dependency. (tucu)
+
+ MAPREDUCE-3557. MR1 test fail to compile because of missing hadoop-archives dependency.
+ (tucu)
+
Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml
index 6fc0d114282..88032cad0a5 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/pom.xml
@@ -136,7 +136,6 @@
org.codehaus.mojo
exec-maven-plugin
- 1.2
compile
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
index 72ee762fc6e..2481b64bd2c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
@@ -76,14 +76,90 @@ public class AMWebServices {
}
Boolean hasAccess(Job job, HttpServletRequest request) {
- UserGroupInformation callerUgi = UserGroupInformation
- .createRemoteUser(request.getRemoteUser());
- if (!job.checkAccess(callerUgi, JobACL.VIEW_JOB)) {
+ String remoteUser = request.getRemoteUser();
+ UserGroupInformation callerUGI = null;
+ if (remoteUser != null) {
+ callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
+ }
+ if (callerUGI != null && !job.checkAccess(callerUGI, JobACL.VIEW_JOB)) {
return false;
}
return true;
}
+ /**
+ * convert a job id string to an actual job and handle all the error checking.
+ */
+ public static Job getJobFromJobIdString(String jid, AppContext appCtx) throws NotFoundException {
+ JobId jobId;
+ Job job;
+ try {
+ jobId = MRApps.toJobID(jid);
+ } catch (YarnException e) {
+ throw new NotFoundException(e.getMessage());
+ }
+ if (jobId == null) {
+ throw new NotFoundException("job, " + jid + ", is not found");
+ }
+ job = appCtx.getJob(jobId);
+ if (job == null) {
+ throw new NotFoundException("job, " + jid + ", is not found");
+ }
+ return job;
+ }
+
+ /**
+ * convert a task id string to an actual task and handle all the error
+ * checking.
+ */
+ public static Task getTaskFromTaskIdString(String tid, Job job) throws NotFoundException {
+ TaskId taskID;
+ Task task;
+ try {
+ taskID = MRApps.toTaskID(tid);
+ } catch (YarnException e) {
+ throw new NotFoundException(e.getMessage());
+ } catch (NumberFormatException ne) {
+ throw new NotFoundException(ne.getMessage());
+ }
+ if (taskID == null) {
+ throw new NotFoundException("taskid " + tid + " not found or invalid");
+ }
+ task = job.getTask(taskID);
+ if (task == null) {
+ throw new NotFoundException("task not found with id " + tid);
+ }
+ return task;
+ }
+
+ /**
+ * convert a task attempt id string to an actual task attempt and handle all
+ * the error checking.
+ */
+ public static TaskAttempt getTaskAttemptFromTaskAttemptString(String attId, Task task)
+ throws NotFoundException {
+ TaskAttemptId attemptId;
+ TaskAttempt ta;
+ try {
+ attemptId = MRApps.toTaskAttemptID(attId);
+ } catch (YarnException e) {
+ throw new NotFoundException(e.getMessage());
+ } catch (NumberFormatException ne) {
+ throw new NotFoundException(ne.getMessage());
+ }
+ if (attemptId == null) {
+ throw new NotFoundException("task attempt id " + attId
+ + " not found or invalid");
+ }
+ ta = task.getAttempt(attemptId);
+ if (ta == null) {
+ throw new NotFoundException("Error getting info on task attempt id "
+ + attId);
+ }
+ return ta;
+ }
+
+
/**
* check for job access.
*
@@ -130,16 +206,8 @@ public class AMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public JobInfo getJob(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid) {
- JobId jobId = MRApps.toJobID(jid);
- if (jobId == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- Job job = appCtx.getJob(jobId);
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+ Job job = getJobFromJobIdString(jid, appCtx);
return new JobInfo(job, hasAccess(job, hsr));
-
}
@GET
@@ -147,63 +215,25 @@ public class AMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public JobCounterInfo getJobCounters(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid) {
- JobId jobId = MRApps.toJobID(jid);
- if (jobId == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- Job job = appCtx.getJob(jobId);
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+ Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
return new JobCounterInfo(this.appCtx, job);
}
- @GET
- @Path("/jobs/{jobid}/tasks/{taskid}/counters")
- @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- public JobTaskCounterInfo getSingleTaskCounters(
- @Context HttpServletRequest hsr, @PathParam("jobid") String jid,
- @PathParam("taskid") String tid) {
- JobId jobId = MRApps.toJobID(jid);
- if (jobId == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- Job job = this.appCtx.getJob(jobId);
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- checkAccess(job, hsr);
- TaskId taskID = MRApps.toTaskID(tid);
- if (taskID == null) {
- throw new NotFoundException("taskid " + tid + " not found or invalid");
- }
- Task task = job.getTask(taskID);
- if (task == null) {
- throw new NotFoundException("task not found with id " + tid);
- }
- return new JobTaskCounterInfo(task);
- }
-
@GET
@Path("/jobs/{jobid}/conf")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public ConfInfo getJobConf(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid) {
- JobId jobId = MRApps.toJobID(jid);
- if (jobId == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- Job job = appCtx.getJob(jobId);
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+
+ Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
ConfInfo info;
try {
info = new ConfInfo(job, this.conf);
} catch (IOException e) {
- throw new NotFoundException("unable to load configuration for job: " + jid);
+ throw new NotFoundException("unable to load configuration for job: "
+ + jid);
}
return info;
}
@@ -213,10 +243,8 @@ public class AMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public TasksInfo getJobTasks(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid, @QueryParam("type") String type) {
- Job job = this.appCtx.getJob(MRApps.toJobID(jid));
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+
+ Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
TasksInfo allTasks = new TasksInfo();
for (Task task : job.getTasks().values()) {
@@ -225,7 +253,8 @@ public class AMWebServices {
try {
ttype = MRApps.taskType(type);
} catch (YarnException e) {
- throw new BadRequestException("tasktype must be either m or r"); }
+ throw new BadRequestException("tasktype must be either m or r");
+ }
}
if (ttype != null && task.getType() != ttype) {
continue;
@@ -240,21 +269,24 @@ public class AMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public TaskInfo getJobTask(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
- Job job = this.appCtx.getJob(MRApps.toJobID(jid));
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- checkAccess(job, hsr);
- TaskId taskID = MRApps.toTaskID(tid);
- if (taskID == null) {
- throw new NotFoundException("taskid " + tid + " not found or invalid");
- }
- Task task = job.getTask(taskID);
- if (task == null) {
- throw new NotFoundException("task not found with id " + tid);
- }
- return new TaskInfo(task);
+ Job job = getJobFromJobIdString(jid, appCtx);
+ checkAccess(job, hsr);
+ Task task = getTaskFromTaskIdString(tid, job);
+ return new TaskInfo(task);
+ }
+
+ @GET
+ @Path("/jobs/{jobid}/tasks/{taskid}/counters")
+ @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+ public JobTaskCounterInfo getSingleTaskCounters(
+ @Context HttpServletRequest hsr, @PathParam("jobid") String jid,
+ @PathParam("taskid") String tid) {
+
+ Job job = getJobFromJobIdString(jid, appCtx);
+ checkAccess(job, hsr);
+ Task task = getTaskFromTaskIdString(tid, job);
+ return new JobTaskCounterInfo(task);
}
@GET
@@ -263,19 +295,11 @@ public class AMWebServices {
public TaskAttemptsInfo getJobTaskAttempts(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
TaskAttemptsInfo attempts = new TaskAttemptsInfo();
- Job job = this.appCtx.getJob(MRApps.toJobID(jid));
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+
+ Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
- TaskId taskID = MRApps.toTaskID(tid);
- if (taskID == null) {
- throw new NotFoundException("taskid " + tid + " not found or invalid");
- }
- Task task = job.getTask(taskID);
- if (task == null) {
- throw new NotFoundException("task not found with id " + tid);
- }
+ Task task = getTaskFromTaskIdString(tid, job);
+
for (TaskAttempt ta : task.getAttempts().values()) {
if (ta != null) {
if (task.getType() == TaskType.REDUCE) {
@@ -294,29 +318,11 @@ public class AMWebServices {
public TaskAttemptInfo getJobTaskAttemptId(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid, @PathParam("taskid") String tid,
@PathParam("attemptid") String attId) {
- Job job = this.appCtx.getJob(MRApps.toJobID(jid));
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+
+ Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
- TaskId taskID = MRApps.toTaskID(tid);
- if (taskID == null) {
- throw new NotFoundException("taskid " + tid + " not found or invalid");
- }
- Task task = job.getTask(taskID);
- if (task == null) {
- throw new NotFoundException("task not found with id " + tid);
- }
- TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
- if (attemptId == null) {
- throw new NotFoundException("task attempt id " + attId
- + " not found or invalid");
- }
- TaskAttempt ta = task.getAttempt(attemptId);
- if (ta == null) {
- throw new NotFoundException("Error getting info on task attempt id "
- + attId);
- }
+ Task task = getTaskFromTaskIdString(tid, job);
+ TaskAttempt ta = getTaskAttemptFromTaskAttemptString(attId, task);
if (task.getType() == TaskType.REDUCE) {
return new ReduceTaskAttemptInfo(ta, task.getType());
} else {
@@ -330,33 +336,11 @@ public class AMWebServices {
public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(
@Context HttpServletRequest hsr, @PathParam("jobid") String jid,
@PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
- JobId jobId = MRApps.toJobID(jid);
- if (jobId == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- Job job = this.appCtx.getJob(jobId);
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+
+ Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
- TaskId taskID = MRApps.toTaskID(tid);
- if (taskID == null) {
- throw new NotFoundException("taskid " + tid + " not found or invalid");
- }
- Task task = job.getTask(taskID);
- if (task == null) {
- throw new NotFoundException("task not found with id " + tid);
- }
- TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
- if (attemptId == null) {
- throw new NotFoundException("task attempt id " + attId
- + " not found or invalid");
- }
- TaskAttempt ta = task.getAttempt(attemptId);
- if (ta == null) {
- throw new NotFoundException("Error getting info on task attempt id "
- + attId);
- }
+ Task task = getTaskFromTaskIdString(tid, job);
+ TaskAttempt ta = getTaskAttemptFromTaskAttemptString(attId, task);
return new JobTaskAttemptCounterInfo(ta);
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java
index 3e9a7e1b800..426676a7119 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java
@@ -32,7 +32,6 @@ public class AppInfo {
protected String appId;
protected String name;
protected String user;
- protected String hostname;
protected long startedOn;
protected long elapsedTime;
@@ -44,7 +43,7 @@ public class AppInfo {
this.name = context.getApplicationName().toString();
this.user = context.getUser().toString();
this.startedOn = context.getStartTime();
- this.elapsedTime = Times.elapsed(context.getStartTime(), 0);
+ this.elapsedTime = Times.elapsed(this.startedOn, 0);
}
public String getId() {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java
index 0cc7bc46f82..d7f88bde60d 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java
@@ -30,7 +30,7 @@ import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
-@XmlRootElement
+@XmlRootElement(name = "conf")
@XmlAccessorType(XmlAccessType.FIELD)
public class ConfInfo {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java
index 97c3563d62b..e632f344bb9 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.mapreduce.v2.api.records.Counter;
@XmlAccessorType(XmlAccessType.FIELD)
public class CounterInfo {
- protected String counterName;
+ protected String name;
protected long totalCounterValue;
protected long mapCounterValue;
protected long reduceCounterValue;
@@ -36,7 +36,7 @@ public class CounterInfo {
}
public CounterInfo(Counter counter, Counter mc, Counter rc) {
- this.counterName = counter.getName();
+ this.name = counter.getName();
this.totalCounterValue = counter.getValue();
this.mapCounterValue = mc == null ? 0 : mc.getValue();
this.reduceCounterValue = rc == null ? 0 : rc.getValue();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java
index 6276e6a443b..73dc2065543 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java
@@ -46,14 +46,14 @@ public class JobCounterInfo {
protected Counters reduce = null;
protected String id;
- protected ArrayList counterGroups;
+ protected ArrayList counterGroup;
public JobCounterInfo() {
}
public JobCounterInfo(AppContext ctx, Job job) {
getCounters(ctx, job);
- counterGroups = new ArrayList();
+ counterGroup = new ArrayList();
this.id = MRApps.toString(job.getID());
int numGroups = 0;
@@ -68,7 +68,7 @@ public class JobCounterInfo {
++numGroups;
CounterGroupInfo cginfo = new CounterGroupInfo(g.getName(), g, mg, rg);
- counterGroups.add(cginfo);
+ counterGroup.add(cginfo);
}
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
index c46fc07dd6e..da57f86c114 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
@@ -30,6 +30,7 @@ import javax.xml.bind.annotation.XmlTransient;
import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
@@ -51,12 +52,12 @@ public class JobInfo {
protected String id;
protected String name;
protected String user;
- protected String state;
+ protected JobState state;
protected int mapsTotal;
protected int mapsCompleted;
- protected float mapProgress;
protected int reducesTotal;
protected int reducesCompleted;
+ protected float mapProgress;
protected float reduceProgress;
@XmlTransient
@@ -83,18 +84,12 @@ public class JobInfo {
protected int successfulMapAttempts = 0;
protected ArrayList acls;
- @XmlTransient
- protected int numMaps;
- @XmlTransient
- protected int numReduces;
-
public JobInfo() {
}
public JobInfo(Job job, Boolean hasAccess) {
this.id = MRApps.toString(job.getID());
JobReport report = job.getReport();
- countTasksAndAttempts(job);
this.startTime = report.getStartTime();
this.finishTime = report.getFinishTime();
this.elapsedTime = Times.elapsed(this.startTime, this.finishTime);
@@ -103,7 +98,7 @@ public class JobInfo {
}
this.name = job.getName().toString();
this.user = job.getUserName();
- this.state = job.getState().toString();
+ this.state = job.getState();
this.mapsTotal = job.getTotalMaps();
this.mapsCompleted = job.getCompletedMaps();
this.mapProgress = report.getMapProgress() * 100;
@@ -115,6 +110,9 @@ public class JobInfo {
this.acls = new ArrayList();
if (hasAccess) {
+ this.diagnostics = "";
+ countTasksAndAttempts(job);
+
this.uberized = job.isUber();
List diagnostics = job.getDiagnostics();
@@ -213,7 +211,7 @@ public class JobInfo {
}
public String getState() {
- return this.state;
+ return this.state.toString();
}
public String getUser() {
@@ -267,13 +265,11 @@ public class JobInfo {
/**
* Go through a job and update the member variables with counts for
* information to output in the page.
- *
+ *
* @param job
* the job to get counts for.
*/
private void countTasksAndAttempts(Job job) {
- numReduces = 0;
- numMaps = 0;
final Map tasks = job.getTasks();
if (tasks == null) {
return;
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java
index da23b7a24d0..a14e5feb878 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java
@@ -38,23 +38,22 @@ public class JobTaskAttemptCounterInfo {
protected Counters total = null;
protected String id;
- protected ArrayList taskCounterGroups;
+ protected ArrayList taskAttemptCounterGroup;
public JobTaskAttemptCounterInfo() {
}
public JobTaskAttemptCounterInfo(TaskAttempt taskattempt) {
- long value = 0;
this.id = MRApps.toString(taskattempt.getID());
total = taskattempt.getCounters();
- taskCounterGroups = new ArrayList();
+ taskAttemptCounterGroup = new ArrayList();
if (total != null) {
for (CounterGroup g : total.getAllCounterGroups().values()) {
if (g != null) {
TaskCounterGroupInfo cginfo = new TaskCounterGroupInfo(g.getName(), g);
if (cginfo != null) {
- taskCounterGroups.add(cginfo);
+ taskAttemptCounterGroup.add(cginfo);
}
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java
index 7ba57f13a08..bcdde8c03cb 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java
@@ -38,7 +38,7 @@ public class JobTaskCounterInfo {
protected Counters total = null;
protected String id;
- protected ArrayList taskCounterGroups;
+ protected ArrayList taskCounterGroup;
public JobTaskCounterInfo() {
}
@@ -46,12 +46,12 @@ public class JobTaskCounterInfo {
public JobTaskCounterInfo(Task task) {
total = task.getCounters();
this.id = MRApps.toString(task.getID());
- taskCounterGroups = new ArrayList();
+ taskCounterGroup = new ArrayList();
if (total != null) {
for (CounterGroup g : total.getAllCounterGroups().values()) {
if (g != null) {
TaskCounterGroupInfo cginfo = new TaskCounterGroupInfo(g.getName(), g);
- taskCounterGroups.add(cginfo);
+ taskCounterGroup.add(cginfo);
}
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java
index 231e36bdf05..c35411b0a2e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java
@@ -25,6 +25,7 @@ import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlTransient;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
@@ -43,7 +44,7 @@ public class TaskAttemptInfo {
protected float progress;
protected String id;
protected String rack;
- protected String state;
+ protected TaskAttemptState state;
protected String nodeHttpAddress;
protected String diagnostics;
protected String type;
@@ -69,7 +70,7 @@ public class TaskAttemptInfo {
.getAssignedContainerID());
this.assignedContainer = ta.getAssignedContainerID();
this.progress = ta.getProgress() * 100;
- this.state = ta.getState().toString();
+ this.state = ta.getState();
this.elapsedTime = Times
.elapsed(this.startTime, this.finishTime, isRunning);
if (this.elapsedTime == -1) {
@@ -95,7 +96,7 @@ public class TaskAttemptInfo {
}
public String getState() {
- return this.state;
+ return this.state.toString();
}
public String getId() {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java
index b8a48fe489d..c92488fe81b 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java
@@ -23,21 +23,21 @@ import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
-@XmlRootElement(name = "taskattempts")
+@XmlRootElement(name = "taskAttempts")
@XmlAccessorType(XmlAccessType.FIELD)
public class TaskAttemptsInfo {
- protected ArrayList taskattempt = new ArrayList();
+ protected ArrayList taskAttempt = new ArrayList();
public TaskAttemptsInfo() {
} // JAXB needs this
public void add(TaskAttemptInfo taskattemptInfo) {
- taskattempt.add(taskattemptInfo);
+ taskAttempt.add(taskattemptInfo);
}
public ArrayList getTaskAttempts() {
- return taskattempt;
+ return taskAttempt;
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java
index d38635af2cd..aab8b56d911 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java
@@ -24,6 +24,7 @@ import javax.xml.bind.annotation.XmlTransient;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@@ -39,7 +40,7 @@ public class TaskInfo {
protected long elapsedTime;
protected float progress;
protected String id;
- protected String state;
+ protected TaskState state;
protected String type;
protected String successfulAttempt;
@@ -62,7 +63,7 @@ public class TaskInfo {
if (this.elapsedTime == -1) {
this.elapsedTime = 0;
}
- this.state = report.getTaskState().toString();
+ this.state = report.getTaskState();
this.progress = report.getProgress() * 100;
this.id = MRApps.toString(task.getID());
this.taskNum = task.getID().getId();
@@ -79,7 +80,7 @@ public class TaskInfo {
}
public String getState() {
- return this.state;
+ return this.state.toString();
}
public String getId() {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
index ad3e4a87120..183f589aa6f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
@@ -1,39 +1,41 @@
/**
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements. See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership. The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License. You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package org.apache.hadoop.mapreduce.v2.app;
-import com.google.common.collect.Iterators;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Maps;
import java.util.Collection;
import java.util.Collections;
+import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobACLsManager;
import org.apache.hadoop.mapred.ShuffleHandler;
import org.apache.hadoop.mapreduce.FileSystemCounter;
import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.JobCounter;
+import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.TaskCounter;
+import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.Counters;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@@ -48,7 +50,6 @@ import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
-import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@@ -63,33 +64,38 @@ import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.util.BuilderUtils;
import org.apache.hadoop.yarn.util.Records;
+import com.google.common.collect.Iterators;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
public class MockJobs extends MockApps {
- static final Iterator JOB_STATES = Iterators.cycle(
- JobState.values());
- static final Iterator TASK_STATES = Iterators.cycle(
- TaskState.values());
- static final Iterator TASK_ATTEMPT_STATES = Iterators.cycle(
- TaskAttemptState.values());
- static final Iterator TASK_TYPES = Iterators.cycle(
- TaskType.values());
- static final Iterator JOB_COUNTERS = Iterators.cycle(
- JobCounter.values());
- static final Iterator FS_COUNTERS = Iterators.cycle(
- FileSystemCounter.values());
- static final Iterator TASK_COUNTERS = Iterators.cycle(
- TaskCounter.values());
+ static final Iterator JOB_STATES = Iterators.cycle(JobState
+ .values());
+ static final Iterator TASK_STATES = Iterators.cycle(TaskState
+ .values());
+ static final Iterator TASK_ATTEMPT_STATES = Iterators
+ .cycle(TaskAttemptState.values());
+ static final Iterator TASK_TYPES = Iterators.cycle(TaskType
+ .values());
+ static final Iterator JOB_COUNTERS = Iterators.cycle(JobCounter
+ .values());
+ static final Iterator FS_COUNTERS = Iterators
+ .cycle(FileSystemCounter.values());
+ static final Iterator TASK_COUNTERS = Iterators
+ .cycle(TaskCounter.values());
static final Iterator FS_SCHEMES = Iterators.cycle("FILE", "HDFS",
"LAFS", "CEPH");
- static final Iterator USER_COUNTER_GROUPS = Iterators.cycle(
- "com.company.project.subproject.component.subcomponent.UserDefinedSpecificSpecialTask$Counters",
- "PigCounters");
- static final Iterator USER_COUNTERS = Iterators.cycle(
- "counter1", "counter2", "counter3");
+ static final Iterator USER_COUNTER_GROUPS = Iterators
+ .cycle(
+ "com.company.project.subproject.component.subcomponent.UserDefinedSpecificSpecialTask$Counters",
+ "PigCounters");
+ static final Iterator USER_COUNTERS = Iterators.cycle("counter1",
+ "counter2", "counter3");
static final Iterator PHASES = Iterators.cycle(Phase.values());
static final Iterator DIAGS = Iterators.cycle(
"Error: java.lang.OutOfMemoryError: Java heap space",
"Lost task tracker: tasktracker.domain/127.0.0.1:40879");
-
+
public static final String NM_HOST = "localhost";
public static final int NM_PORT = 1234;
public static final int NM_HTTP_PORT = 9999;
@@ -101,8 +107,7 @@ public class MockJobs extends MockApps {
}
public static Map newJobs(ApplicationId appID, int numJobsPerApp,
- int numTasksPerJob,
- int numAttemptsPerTask) {
+ int numTasksPerJob, int numAttemptsPerTask) {
Map map = Maps.newHashMap();
for (int j = 0; j < numJobsPerApp; ++j) {
Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask);
@@ -121,10 +126,12 @@ public class MockJobs extends MockApps {
public static JobReport newJobReport(JobId id) {
JobReport report = Records.newRecord(JobReport.class);
report.setJobId(id);
- report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
- report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
- report.setMapProgress((float)Math.random());
- report.setReduceProgress((float)Math.random());
+ report
+ .setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT));
+ report.setFinishTime(System.currentTimeMillis()
+ + (int) (Math.random() * DT) + 1);
+ report.setMapProgress((float) Math.random());
+ report.setReduceProgress((float) Math.random());
report.setJobState(JOB_STATES.next());
return report;
}
@@ -132,9 +139,11 @@ public class MockJobs extends MockApps {
public static TaskReport newTaskReport(TaskId id) {
TaskReport report = Records.newRecord(TaskReport.class);
report.setTaskId(id);
- report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
- report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
- report.setProgress((float)Math.random());
+ report
+ .setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT));
+ report.setFinishTime(System.currentTimeMillis()
+ + (int) (Math.random() * DT) + 1);
+ report.setProgress((float) Math.random());
report.setCounters(newCounters());
report.setTaskState(TASK_STATES.next());
return report;
@@ -143,41 +152,42 @@ public class MockJobs extends MockApps {
public static TaskAttemptReport newTaskAttemptReport(TaskAttemptId id) {
TaskAttemptReport report = Records.newRecord(TaskAttemptReport.class);
report.setTaskAttemptId(id);
- report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * DT));
- report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * DT) + 1);
+ report
+ .setStartTime(System.currentTimeMillis() - (int) (Math.random() * DT));
+ report.setFinishTime(System.currentTimeMillis()
+ + (int) (Math.random() * DT) + 1);
report.setPhase(PHASES.next());
report.setTaskAttemptState(TASK_ATTEMPT_STATES.next());
- report.setProgress((float)Math.random());
+ report.setProgress((float) Math.random());
report.setCounters(newCounters());
return report;
}
@SuppressWarnings("deprecation")
public static Counters newCounters() {
- org.apache.hadoop.mapred.Counters hc =
- new org.apache.hadoop.mapred.Counters();
+ org.apache.hadoop.mapred.Counters hc = new org.apache.hadoop.mapred.Counters();
for (JobCounter c : JobCounter.values()) {
- hc.findCounter(c).setValue((long)(Math.random() * 1000));
+ hc.findCounter(c).setValue((long) (Math.random() * 1000));
}
for (TaskCounter c : TaskCounter.values()) {
- hc.findCounter(c).setValue((long)(Math.random() * 1000));
+ hc.findCounter(c).setValue((long) (Math.random() * 1000));
}
int nc = FileSystemCounter.values().length * 4;
for (int i = 0; i < nc; ++i) {
for (FileSystemCounter c : FileSystemCounter.values()) {
- hc.findCounter(FS_SCHEMES.next(), c).
- setValue((long)(Math.random() * DT));
+ hc.findCounter(FS_SCHEMES.next(), c).setValue(
+ (long) (Math.random() * DT));
}
}
for (int i = 0; i < 2 * 3; ++i) {
- hc.findCounter(USER_COUNTER_GROUPS.next(), USER_COUNTERS.next()).
- setValue((long)(Math.random() * 100000));
+ hc.findCounter(USER_COUNTER_GROUPS.next(), USER_COUNTERS.next())
+ .setValue((long) (Math.random() * 100000));
}
return TypeConverter.toYarn(hc);
}
public static Map newTaskAttempts(TaskId tid,
- int m) {
+ int m) {
Map map = Maps.newHashMap();
for (int i = 0; i < m; ++i) {
TaskAttempt ta = newTaskAttempt(tid, i);
@@ -237,9 +247,10 @@ public class MockJobs extends MockApps {
@Override
public boolean isFinished() {
switch (report.getTaskAttemptState()) {
- case SUCCEEDED:
- case FAILED:
- case KILLED: return true;
+ case SUCCEEDED:
+ case FAILED:
+ case KILLED:
+ return true;
}
return false;
}
@@ -247,8 +258,8 @@ public class MockJobs extends MockApps {
@Override
public ContainerId getAssignedContainerID() {
ContainerId id = Records.newRecord(ContainerId.class);
- ApplicationAttemptId appAttemptId =
- Records.newRecord(ApplicationAttemptId.class);
+ ApplicationAttemptId appAttemptId = Records
+ .newRecord(ApplicationAttemptId.class);
appAttemptId.setApplicationId(taid.getTaskId().getJobId().getAppId());
appAttemptId.setAttemptId(0);
id.setApplicationAttemptId(appAttemptId);
@@ -280,10 +291,10 @@ public class MockJobs extends MockApps {
return 0;
}
- @Override
- public String getNodeRackName() {
- return "/default-rack";
- }
+ @Override
+ public String getNodeRackName() {
+ return "/default-rack";
+ }
};
}
@@ -342,9 +353,10 @@ public class MockJobs extends MockApps {
@Override
public boolean isFinished() {
switch (report.getTaskState()) {
- case SUCCEEDED:
- case KILLED:
- case FAILED: return true;
+ case SUCCEEDED:
+ case KILLED:
+ case FAILED:
+ return true;
}
return false;
}
@@ -398,12 +410,26 @@ public class MockJobs extends MockApps {
}
public static Job newJob(ApplicationId appID, int i, int n, int m) {
+ return newJob(appID, i, n, m, null);
+ }
+
+ public static Job newJob(ApplicationId appID, int i, int n, int m, Path confFile) {
final JobId id = newJobID(appID, i);
final String name = newJobName();
final JobReport report = newJobReport(id);
final Map tasks = newTasks(id, n, m);
final TaskCount taskCount = getTaskCount(tasks.values());
final Counters counters = getCounters(tasks.values());
+ final Path configFile = confFile;
+
+ Map tmpJobACLs = new HashMap();
+ Configuration conf = new Configuration();
+ conf.set(JobACL.VIEW_JOB.getAclName(), "testuser");
+ conf.setBoolean(MRConfig.MR_ACLS_ENABLED, true);
+
+ JobACLsManager aclsManager = new JobACLsManager(conf);
+ tmpJobACLs = aclsManager.constructJobACLs(conf);
+ final Map jobACLs = tmpJobACLs;
return new Job() {
@Override
public JobId getID() {
@@ -483,7 +509,7 @@ public class MockJobs extends MockApps {
@Override
public List getDiagnostics() {
- return Collections.emptyList();
+ return Collections. emptyList();
}
@Override
@@ -504,12 +530,12 @@ public class MockJobs extends MockApps {
@Override
public Path getConfFile() {
- throw new UnsupportedOperationException("Not supported yet.");
+ return configFile;
}
@Override
public Map getJobACLs() {
- return Collections.emptyMap();
+ return jobACLs;
}
@Override
@@ -521,11 +547,10 @@ public class MockJobs extends MockApps {
}
};
}
-
+
private static AMInfo createAMInfo(int attempt) {
- ApplicationAttemptId appAttemptId =
- BuilderUtils.newApplicationAttemptId(
- BuilderUtils.newApplicationId(100, 1), attempt);
+ ApplicationAttemptId appAttemptId = BuilderUtils.newApplicationAttemptId(
+ BuilderUtils.newApplicationId(100, 1), attempt);
ContainerId containerId = BuilderUtils.newContainerId(appAttemptId, 1);
return MRBuilderUtils.newAMInfo(appAttemptId, System.currentTimeMillis(),
containerId, NM_HOST, NM_PORT, NM_HTTP_PORT);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java
new file mode 100644
index 00000000000..27346789769
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java
@@ -0,0 +1,359 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.StringReader;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the MapReduce Application master info web services api's. Also test
+ * non-existent urls.
+ *
+ * /ws/v1/mapreduce
+ * /ws/v1/mapreduce/info
+ */
+public class TestAMWebServices extends JerseyTest {
+
+ private static Configuration conf = new Configuration();
+ private static TestAppContext appContext;
+
+ static class TestAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map jobs;
+ final long startTime = System.currentTimeMillis();
+
+ TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = MockJobs.newAppAttemptID(appID, 0);
+ jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
+ }
+
+ TestAppContext() {
+ this(0, 1, 1, 1);
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+ }
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+
+ appContext = new TestAppContext();
+ bind(JAXBContextResolver.class);
+ bind(AMWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(AppContext.class).toInstance(appContext);
+ bind(Configuration.class).toInstance(conf);
+
+ serve("/*").with(GuiceContainer.class);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ }
+
+ public TestAMWebServices() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.mapreduce.v2.app.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testAM() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyAMInfo(json.getJSONObject("info"), appContext);
+ }
+
+ @Test
+ public void testAMSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyAMInfo(json.getJSONObject("info"), appContext);
+ }
+
+ @Test
+ public void testAMDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce/")
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyAMInfo(json.getJSONObject("info"), appContext);
+ }
+
+ @Test
+ public void testAMXML() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ verifyAMInfoXML(xml, appContext);
+ }
+
+ @Test
+ public void testInfo() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("info").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyAMInfo(json.getJSONObject("info"), appContext);
+ }
+
+ @Test
+ public void testInfoSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("info/").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyAMInfo(json.getJSONObject("info"), appContext);
+ }
+
+ @Test
+ public void testInfoDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("info/").get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyAMInfo(json.getJSONObject("info"), appContext);
+ }
+
+ @Test
+ public void testInfoXML() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("info/").accept(MediaType.APPLICATION_XML)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ verifyAMInfoXML(xml, appContext);
+ }
+
+ @Test
+ public void testInvalidUri() throws JSONException, Exception {
+ WebResource r = resource();
+ String responseStr = "";
+ try {
+ responseStr = r.path("ws").path("v1").path("mapreduce").path("bogus")
+ .accept(MediaType.APPLICATION_JSON).get(String.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ WebServicesTestUtils.checkStringMatch(
+ "error string exists and shouldn't", "", responseStr);
+ }
+ }
+
+ @Test
+ public void testInvalidUri2() throws JSONException, Exception {
+ WebResource r = resource();
+ String responseStr = "";
+ try {
+ responseStr = r.path("ws").path("v1").path("invalid")
+ .accept(MediaType.APPLICATION_JSON).get(String.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ WebServicesTestUtils.checkStringMatch(
+ "error string exists and shouldn't", "", responseStr);
+ }
+ }
+
+ @Test
+ public void testInvalidAccept() throws JSONException, Exception {
+ WebResource r = resource();
+ String responseStr = "";
+ try {
+ responseStr = r.path("ws").path("v1").path("mapreduce")
+ .accept(MediaType.TEXT_PLAIN).get(String.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.INTERNAL_SERVER_ERROR,
+ response.getClientResponseStatus());
+ WebServicesTestUtils.checkStringMatch(
+ "error string exists and shouldn't", "", responseStr);
+ }
+ }
+
+ public void verifyAMInfo(JSONObject info, TestAppContext ctx)
+ throws JSONException {
+ assertEquals("incorrect number of elements", 5, info.length());
+
+ verifyAMInfoGeneric(ctx, info.getString("appId"), info.getString("user"),
+ info.getString("name"), info.getLong("startedOn"),
+ info.getLong("elapsedTime"));
+ }
+
+ public void verifyAMInfoXML(String xml, TestAppContext ctx)
+ throws JSONException, Exception {
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList nodes = dom.getElementsByTagName("info");
+ assertEquals("incorrect number of elements", 1, nodes.getLength());
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+ verifyAMInfoGeneric(ctx,
+ WebServicesTestUtils.getXmlString(element, "appId"),
+ WebServicesTestUtils.getXmlString(element, "user"),
+ WebServicesTestUtils.getXmlString(element, "name"),
+ WebServicesTestUtils.getXmlLong(element, "startedOn"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedTime"));
+ }
+ }
+
+ public void verifyAMInfoGeneric(TestAppContext ctx, String id, String user,
+ String name, long startedOn, long elapsedTime) {
+
+ WebServicesTestUtils.checkStringMatch("id", ctx.getApplicationID()
+ .toString(), id);
+ WebServicesTestUtils.checkStringMatch("user", ctx.getUser().toString(),
+ user);
+ WebServicesTestUtils.checkStringMatch("name", ctx.getApplicationName(),
+ name);
+
+ assertEquals("startedOn incorrect", ctx.getStartTime(), startedOn);
+ assertTrue("elapsedTime not greater then 0", (elapsedTime > 0));
+
+ }
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java
new file mode 100644
index 00000000000..215d7718e15
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesAttempts.java
@@ -0,0 +1,732 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.StringReader;
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the app master web service Rest API for getting task attempts, a
+ * specific task attempt, and task attempt counters
+ *
+ * /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts
+ * /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}
+ * /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/counters
+ */
+public class TestAMWebServicesAttempts extends JerseyTest {
+
+ private static Configuration conf = new Configuration();
+ private static TestAppContext appContext;
+
+ static class TestAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map jobs;
+ final long startTime = System.currentTimeMillis();
+
+ TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = MockJobs.newAppAttemptID(appID, 0);
+ jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
+ }
+
+ TestAppContext() {
+ this(0, 1, 2, 1);
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+ }
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+
+ appContext = new TestAppContext();
+ bind(JAXBContextResolver.class);
+ bind(AMWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(AppContext.class).toInstance(appContext);
+ bind(Configuration.class).toInstance(conf);
+
+ serve("/*").with(GuiceContainer.class);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+
+ }
+
+ public TestAMWebServicesAttempts() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.mapreduce.v2.app.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testTaskAttempts() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid).path("attempts")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ verifyAMTaskAttempts(json, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptsSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid).path("attempts/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ verifyAMTaskAttempts(json, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptsDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid).path("attempts")
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ verifyAMTaskAttempts(json, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptsXML() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid).path("attempts")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList attempts = dom.getElementsByTagName("taskAttempts");
+ assertEquals("incorrect number of elements", 1, attempts.getLength());
+
+ NodeList nodes = dom.getElementsByTagName("taskAttempt");
+ verifyAMTaskAttemptsXML(nodes, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptId() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+ String tid = MRApps.toString(task.getID());
+
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid)
+ .path("attempts").path(attid).accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("taskAttempt");
+ verifyAMTaskAttempt(info, att, task.getType());
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptIdSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+ String tid = MRApps.toString(task.getID());
+
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid)
+ .path("attempts").path(attid + "/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("taskAttempt");
+ verifyAMTaskAttempt(info, att, task.getType());
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptIdDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+ String tid = MRApps.toString(task.getID());
+
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid)
+ .path("attempts").path(attid).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("taskAttempt");
+ verifyAMTaskAttempt(info, att, task.getType());
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptIdXML() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid)
+ .path("attempts").path(attid).accept(MediaType.APPLICATION_XML)
+ .get(ClientResponse.class);
+
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList nodes = dom.getElementsByTagName("taskAttempt");
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+ verifyAMTaskAttemptXML(element, att, task.getType());
+ }
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptIdBogus() throws JSONException, Exception {
+
+ testTaskAttemptIdErrorGeneric("bogusid",
+ "java.lang.Exception: Error parsing attempt ID: bogusid");
+ }
+
+ @Test
+ public void testTaskAttemptIdNonExist() throws JSONException, Exception {
+
+ testTaskAttemptIdErrorGeneric(
+ "attempt_12345_0_0_r_1_0",
+ "java.lang.Exception: Error getting info on task attempt id attempt_12345_0_0_r_1_0");
+ }
+
+ @Test
+ public void testTaskAttemptIdInvalid() throws JSONException, Exception {
+
+ testTaskAttemptIdErrorGeneric("attempt_12345_0_0_d_1_0",
+ "java.lang.Exception: Unknown task symbol: d");
+ }
+
+ @Test
+ public void testTaskAttemptIdInvalid2() throws JSONException, Exception {
+
+ testTaskAttemptIdErrorGeneric("attempt_12345_0_r_1_0",
+ "java.lang.Exception: For input string: \"r\"");
+ }
+
+ @Test
+ public void testTaskAttemptIdInvalid3() throws JSONException, Exception {
+
+ testTaskAttemptIdErrorGeneric("attempt_12345_0_0_r_1",
+ "java.lang.Exception: Error parsing attempt ID: attempt_12345_0_0_r_1");
+ }
+
+ private void testTaskAttemptIdErrorGeneric(String attid, String error)
+ throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+ String tid = MRApps.toString(task.getID());
+
+ try {
+ r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
+ .path("tasks").path(tid).path("attempts").path(attid)
+ .accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message", error,
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+ }
+
+ public void verifyAMTaskAttemptXML(Element element, TaskAttempt att,
+ TaskType ttype) {
+ verifyTaskAttemptGeneric(att, ttype,
+ WebServicesTestUtils.getXmlString(element, "id"),
+ WebServicesTestUtils.getXmlString(element, "state"),
+ WebServicesTestUtils.getXmlString(element, "type"),
+ WebServicesTestUtils.getXmlString(element, "rack"),
+ WebServicesTestUtils.getXmlString(element, "nodeHttpAddress"),
+ WebServicesTestUtils.getXmlString(element, "diagnostics"),
+ WebServicesTestUtils.getXmlString(element, "assignedContainerId"),
+ WebServicesTestUtils.getXmlLong(element, "startTime"),
+ WebServicesTestUtils.getXmlLong(element, "finishTime"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
+ WebServicesTestUtils.getXmlFloat(element, "progress"));
+
+ if (ttype == TaskType.REDUCE) {
+ verifyReduceTaskAttemptGeneric(att,
+ WebServicesTestUtils.getXmlLong(element, "shuffleFinishTime"),
+ WebServicesTestUtils.getXmlLong(element, "mergeFinishTime"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedShuffleTime"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedMergeTime"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedReduceTime"));
+ }
+ }
+
+ public void verifyAMTaskAttempt(JSONObject info, TaskAttempt att,
+ TaskType ttype) throws JSONException {
+ if (ttype == TaskType.REDUCE) {
+ assertEquals("incorrect number of elements", 16, info.length());
+ } else {
+ assertEquals("incorrect number of elements", 11, info.length());
+ }
+
+ verifyTaskAttemptGeneric(att, ttype, info.getString("id"),
+ info.getString("state"), info.getString("type"),
+ info.getString("rack"), info.getString("nodeHttpAddress"),
+ info.getString("diagnostics"), info.getString("assignedContainerId"),
+ info.getLong("startTime"), info.getLong("finishTime"),
+ info.getLong("elapsedTime"), (float) info.getDouble("progress"));
+
+ if (ttype == TaskType.REDUCE) {
+ verifyReduceTaskAttemptGeneric(att, info.getLong("shuffleFinishTime"),
+ info.getLong("mergeFinishTime"), info.getLong("elapsedShuffleTime"),
+ info.getLong("elapsedMergeTime"), info.getLong("elapsedReduceTime"));
+ }
+ }
+
+ public void verifyAMTaskAttempts(JSONObject json, Task task)
+ throws JSONException {
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject attempts = json.getJSONObject("taskAttempts");
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONArray arr = attempts.getJSONArray("taskAttempt");
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId id = att.getID();
+ String attid = MRApps.toString(id);
+ Boolean found = false;
+
+ for (int i = 0; i < arr.length(); i++) {
+ JSONObject info = arr.getJSONObject(i);
+ if (attid.matches(info.getString("id"))) {
+ found = true;
+ verifyAMTaskAttempt(info, att, task.getType());
+ }
+ }
+ assertTrue("task attempt with id: " + attid
+ + " not in web service output", found);
+ }
+ }
+
+ public void verifyAMTaskAttemptsXML(NodeList nodes, Task task) {
+ assertEquals("incorrect number of elements", 1, nodes.getLength());
+
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId id = att.getID();
+ String attid = MRApps.toString(id);
+ Boolean found = false;
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+
+ if (attid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
+ found = true;
+ verifyAMTaskAttemptXML(element, att, task.getType());
+ }
+ }
+ assertTrue("task with id: " + attid + " not in web service output", found);
+ }
+ }
+
+ public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype,
+ String id, String state, String type, String rack,
+ String nodeHttpAddress, String diagnostics, String assignedContainerId,
+ long startTime, long finishTime, long elapsedTime, float progress) {
+
+ TaskAttemptId attid = ta.getID();
+ String attemptId = MRApps.toString(attid);
+
+ WebServicesTestUtils.checkStringMatch("id", attemptId, id);
+ WebServicesTestUtils.checkStringMatch("type", ttype.toString(), type);
+ WebServicesTestUtils.checkStringMatch("state", ta.getState().toString(),
+ state);
+ WebServicesTestUtils.checkStringMatch("rack", ta.getNodeRackName(), rack);
+ WebServicesTestUtils.checkStringMatch("nodeHttpAddress",
+ ta.getNodeHttpAddress(), nodeHttpAddress);
+
+ String expectDiag = "";
+ List diagnosticsList = ta.getDiagnostics();
+ if (diagnosticsList != null && !diagnostics.isEmpty()) {
+ StringBuffer b = new StringBuffer();
+ for (String diag : diagnosticsList) {
+ b.append(diag);
+ }
+ expectDiag = b.toString();
+ }
+ WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag,
+ diagnostics);
+ WebServicesTestUtils.checkStringMatch("assignedContainerId",
+ ConverterUtils.toString(ta.getAssignedContainerID()),
+ assignedContainerId);
+
+ assertEquals("startTime wrong", ta.getLaunchTime(), startTime);
+ assertEquals("finishTime wrong", ta.getFinishTime(), finishTime);
+ assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime);
+ assertEquals("progress wrong", ta.getProgress() * 100, progress, 1e-3f);
+ }
+
+ public void verifyReduceTaskAttemptGeneric(TaskAttempt ta,
+ long shuffleFinishTime, long mergeFinishTime, long elapsedShuffleTime,
+ long elapsedMergeTime, long elapsedReduceTime) {
+
+ assertEquals("shuffleFinishTime wrong", ta.getShuffleFinishTime(),
+ shuffleFinishTime);
+ assertEquals("mergeFinishTime wrong", ta.getSortFinishTime(),
+ mergeFinishTime);
+ assertEquals("elapsedShuffleTime wrong",
+ ta.getLaunchTime() - ta.getShuffleFinishTime(), elapsedShuffleTime);
+ assertEquals("elapsedMergeTime wrong",
+ ta.getShuffleFinishTime() - ta.getSortFinishTime(), elapsedMergeTime);
+ assertEquals("elapsedReduceTime wrong",
+ ta.getSortFinishTime() - ta.getFinishTime(), elapsedReduceTime);
+ }
+
+ @Test
+ public void testTaskAttemptIdCounters() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+ String tid = MRApps.toString(task.getID());
+
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid)
+ .path("attempts").path(attid).path("counters")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("JobTaskAttemptCounters");
+ verifyAMJobTaskAttemptCounters(info, att);
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptIdXMLCounters() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid)
+ .path("attempts").path(attid).path("counters")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList nodes = dom.getElementsByTagName("JobTaskAttemptCounters");
+
+ verifyAMTaskCountersXML(nodes, att);
+ }
+ }
+ }
+ }
+
+ public void verifyAMJobTaskAttemptCounters(JSONObject info, TaskAttempt att)
+ throws JSONException {
+
+ assertEquals("incorrect number of elements", 2, info.length());
+
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(att.getID()),
+ info.getString("id"));
+
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ JSONArray counterGroups = info.getJSONArray("taskAttemptCounterGroup");
+ for (int i = 0; i < counterGroups.length(); i++) {
+ JSONObject counterGroup = counterGroups.getJSONObject(i);
+ String name = counterGroup.getString("counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ JSONArray counters = counterGroup.getJSONArray("counter");
+ for (int j = 0; j < counters.length(); j++) {
+ JSONObject counter = counters.getJSONObject(i);
+ String counterName = counter.getString("name");
+ assertTrue("name not set",
+ (counterName != null && !counterName.isEmpty()));
+ long value = counter.getLong("value");
+ assertTrue("value >= 0", value >= 0);
+ }
+ }
+ }
+
+ public void verifyAMTaskCountersXML(NodeList nodes, TaskAttempt att) {
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+
+ Element element = (Element) nodes.item(i);
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(att.getID()),
+ WebServicesTestUtils.getXmlString(element, "id"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ NodeList groups = element.getElementsByTagName("taskAttemptCounterGroup");
+
+ for (int j = 0; j < groups.getLength(); j++) {
+ Element counters = (Element) groups.item(j);
+ assertNotNull("should have counters in the web service info", counters);
+ String name = WebServicesTestUtils.getXmlString(counters,
+ "counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ NodeList counterArr = counters.getElementsByTagName("counter");
+ for (int z = 0; z < counterArr.getLength(); z++) {
+ Element counter = (Element) counterArr.item(z);
+ String counterName = WebServicesTestUtils.getXmlString(counter,
+ "name");
+ assertTrue("counter name not set",
+ (counterName != null && !counterName.isEmpty()));
+
+ long value = WebServicesTestUtils.getXmlLong(counter, "value");
+ assertTrue("value not >= 0", value >= 0);
+
+ }
+ }
+ }
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobConf.java
new file mode 100644
index 00000000000..9434fa8505c
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobConf.java
@@ -0,0 +1,336 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.StringReader;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import com.google.common.collect.Maps;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the app master web service Rest API for getting the job conf. This
+ * requires created a temporary configuration file.
+ *
+ * /ws/v1/mapreduce/job/{jobid}/conf
+ */
+public class TestAMWebServicesJobConf extends JerseyTest {
+
+ private static Configuration conf = new Configuration();
+ private static TestAppContext appContext;
+
+ private static File testConfDir = new File("target",
+ TestAMWebServicesJobConf.class.getSimpleName() + "confDir");
+
+ static class TestAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map jobs;
+ final long startTime = System.currentTimeMillis();
+
+ TestAppContext(int appid, int numTasks, int numAttempts, Path confPath) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = MockJobs.newAppAttemptID(appID, 0);
+ Map map = Maps.newHashMap();
+ Job job = MockJobs.newJob(appID, 0, numTasks, numAttempts, confPath);
+ map.put(job.getID(), job);
+ jobs = map;
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+ }
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+
+ Path confPath = new Path(testConfDir.toString(),
+ MRJobConfig.JOB_CONF_FILE);
+ Configuration config = new Configuration();
+
+ FileSystem localFs;
+ try {
+ localFs = FileSystem.getLocal(config);
+ confPath = localFs.makeQualified(confPath);
+
+ OutputStream out = localFs.create(confPath);
+ try {
+ conf.writeXml(out);
+ } finally {
+ out.close();
+ }
+ if (!localFs.exists(confPath)) {
+ fail("error creating config file: " + confPath);
+ }
+
+ } catch (IOException e) {
+ fail("error creating config file: " + e.getMessage());
+ }
+
+ appContext = new TestAppContext(0, 2, 1, confPath);
+
+ bind(JAXBContextResolver.class);
+ bind(AMWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(AppContext.class).toInstance(appContext);
+ bind(Configuration.class).toInstance(conf);
+
+ serve("/*").with(GuiceContainer.class);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ testConfDir.mkdir();
+
+ }
+
+ @AfterClass
+ static public void stop() {
+ FileUtil.fullyDelete(testConfDir);
+ }
+
+ public TestAMWebServicesJobConf() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.mapreduce.v2.app.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testJobConf() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("conf")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("conf");
+ verifyAMJobConf(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobConfSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("conf/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("conf");
+ verifyAMJobConf(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobConfDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("conf").get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("conf");
+ verifyAMJobConf(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobConfXML() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("conf")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList info = dom.getElementsByTagName("conf");
+ verifyAMJobConfXML(info, jobsMap.get(id));
+ }
+ }
+
+ public void verifyAMJobConf(JSONObject info, Job job) throws JSONException {
+
+ assertEquals("incorrect number of elements", 2, info.length());
+
+ WebServicesTestUtils.checkStringMatch("path", job.getConfFile().toString(),
+ info.getString("path"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ JSONArray properties = info.getJSONArray("property");
+ for (int i = 0; i < properties.length(); i++) {
+ JSONObject prop = properties.getJSONObject(i);
+ String name = prop.getString("name");
+ String value = prop.getString("value");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ assertTrue("value not set", (value != null && !value.isEmpty()));
+ }
+ }
+
+ public void verifyAMJobConfXML(NodeList nodes, Job job) {
+
+ assertEquals("incorrect number of elements", 1, nodes.getLength());
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+ WebServicesTestUtils.checkStringMatch("path", job.getConfFile()
+ .toString(), WebServicesTestUtils.getXmlString(element, "path"));
+
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ NodeList properties = element.getElementsByTagName("property");
+
+ for (int j = 0; j < properties.getLength(); j++) {
+ Element property = (Element) properties.item(j);
+ assertNotNull("should have counters in the web service info", property);
+ String name = WebServicesTestUtils.getXmlString(property, "name");
+ String value = WebServicesTestUtils.getXmlString(property, "value");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ assertTrue("name not set", (value != null && !value.isEmpty()));
+ }
+ }
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java
new file mode 100644
index 00000000000..605ccf534fc
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesJobs.java
@@ -0,0 +1,780 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.StringReader;
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.util.Times;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the app master web service Rest API for getting jobs, a specific job,
+ * and job counters.
+ *
+ * /ws/v1/mapreduce/jobs
+ * /ws/v1/mapreduce/jobs/{jobid}
+ * /ws/v1/mapreduce/jobs/{jobid}/counters
+ */
+public class TestAMWebServicesJobs extends JerseyTest {
+
+ private static Configuration conf = new Configuration();
+ private static TestAppContext appContext;
+
+ static class TestAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map jobs;
+ final long startTime = System.currentTimeMillis();
+
+ TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = MockJobs.newAppAttemptID(appID, 0);
+ jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
+ }
+
+ TestAppContext() {
+ this(0, 1, 2, 1);
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+ }
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+
+ appContext = new TestAppContext();
+ bind(JAXBContextResolver.class);
+ bind(AMWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(AppContext.class).toInstance(appContext);
+ bind(Configuration.class).toInstance(conf);
+
+ serve("/*").with(GuiceContainer.class);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+
+ }
+
+ public TestAMWebServicesJobs() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.mapreduce.v2.app.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testJobs() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ JSONObject info = arr.getJSONObject(0);
+ Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
+ verifyAMJob(info, job);
+
+ }
+
+ @Test
+ public void testJobsSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs/").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ JSONObject info = arr.getJSONObject(0);
+ Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
+ verifyAMJob(info, job);
+
+ }
+
+ @Test
+ public void testJobsDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ JSONObject info = arr.getJSONObject(0);
+ Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
+ verifyAMJob(info, job);
+
+ }
+
+ @Test
+ public void testJobsXML() throws Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").accept(MediaType.APPLICATION_XML)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList jobs = dom.getElementsByTagName("jobs");
+ assertEquals("incorrect number of elements", 1, jobs.getLength());
+ NodeList job = dom.getElementsByTagName("job");
+ assertEquals("incorrect number of elements", 1, job.getLength());
+ verifyAMJobXML(job, appContext);
+
+ }
+
+ @Test
+ public void testJobId() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("job");
+ verifyAMJob(info, jobsMap.get(id));
+ }
+
+ }
+
+ @Test
+ public void testJobIdSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId + "/").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("job");
+ verifyAMJob(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobIdDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("job");
+ verifyAMJob(info, jobsMap.get(id));
+ }
+
+ }
+
+ @Test
+ public void testJobIdNonExist() throws JSONException, Exception {
+ WebResource r = resource();
+
+ try {
+ r.path("ws").path("v1").path("mapreduce").path("jobs")
+ .path("job_1234_1_2").get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: job, job_1234_1_2, is not found", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+
+ @Test
+ public void testJobIdInvalid() throws JSONException, Exception {
+ WebResource r = resource();
+
+ try {
+ r.path("ws").path("v1").path("mapreduce").path("jobs").path("job_foo")
+ .get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "For input string: \"foo\"", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NumberFormatException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "java.lang.NumberFormatException", classname);
+ }
+ }
+
+ @Test
+ public void testJobIdInvalidBogus() throws JSONException, Exception {
+ WebResource r = resource();
+
+ try {
+ r.path("ws").path("v1").path("mapreduce").path("jobs").path("bogusfoo")
+ .get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: Error parsing job ID: bogusfoo", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+
+ @Test
+ public void testJobIdXML() throws Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).accept(MediaType.APPLICATION_XML)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList job = dom.getElementsByTagName("job");
+ verifyAMJobXML(job, appContext);
+ }
+
+ }
+
+ public void verifyAMJob(JSONObject info, Job job) throws JSONException {
+
+ assertEquals("incorrect number of elements", 30, info.length());
+
+ // everyone access fields
+ verifyAMJobGeneric(job, info.getString("id"), info.getString("user"),
+ info.getString("name"), info.getString("state"),
+ info.getLong("startTime"), info.getLong("finishTime"),
+ info.getLong("elapsedTime"), info.getInt("mapsTotal"),
+ info.getInt("mapsCompleted"), info.getInt("reducesTotal"),
+ info.getInt("reducesCompleted"),
+ (float) info.getDouble("reduceProgress"),
+ (float) info.getDouble("mapProgress"));
+
+ String diagnostics = "";
+ if (info.has("diagnostics")) {
+ diagnostics = info.getString("diagnostics");
+ }
+
+ // restricted access fields - if security and acls set
+ verifyAMJobGenericSecure(job, info.getInt("mapsPending"),
+ info.getInt("mapsRunning"), info.getInt("reducesPending"),
+ info.getInt("reducesRunning"), info.getBoolean("uberized"),
+ diagnostics, info.getInt("newReduceAttempts"),
+ info.getInt("runningReduceAttempts"),
+ info.getInt("failedReduceAttempts"),
+ info.getInt("killedReduceAttempts"),
+ info.getInt("successfulReduceAttempts"), info.getInt("newMapAttempts"),
+ info.getInt("runningMapAttempts"), info.getInt("failedMapAttempts"),
+ info.getInt("killedMapAttempts"), info.getInt("successfulMapAttempts"));
+
+ Map allacls = job.getJobACLs();
+ if (allacls != null) {
+
+ for (Map.Entry entry : allacls.entrySet()) {
+ String expectName = entry.getKey().getAclName();
+ String expectValue = entry.getValue().getAclString();
+ Boolean found = false;
+ // make sure ws includes it
+ if (info.has("acls")) {
+ JSONArray arr = info.getJSONArray("acls");
+
+ for (int i = 0; i < arr.length(); i++) {
+ JSONObject aclInfo = arr.getJSONObject(i);
+ if (expectName.matches(aclInfo.getString("name"))) {
+ found = true;
+ WebServicesTestUtils.checkStringMatch("value", expectValue,
+ aclInfo.getString("value"));
+ }
+ }
+ } else {
+ fail("should have acls in the web service info");
+ }
+ assertTrue("acl: " + expectName + " not found in webservice output",
+ found);
+ }
+ }
+
+ }
+
+ public void verifyAMJobXML(NodeList nodes, TestAppContext appContext) {
+
+ assertEquals("incorrect number of elements", 1, nodes.getLength());
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+
+ Job job = appContext.getJob(MRApps.toJobID(WebServicesTestUtils
+ .getXmlString(element, "id")));
+ assertNotNull("Job not found - output incorrect", job);
+
+ verifyAMJobGeneric(job, WebServicesTestUtils.getXmlString(element, "id"),
+ WebServicesTestUtils.getXmlString(element, "user"),
+ WebServicesTestUtils.getXmlString(element, "name"),
+ WebServicesTestUtils.getXmlString(element, "state"),
+ WebServicesTestUtils.getXmlLong(element, "startTime"),
+ WebServicesTestUtils.getXmlLong(element, "finishTime"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
+ WebServicesTestUtils.getXmlInt(element, "mapsTotal"),
+ WebServicesTestUtils.getXmlInt(element, "mapsCompleted"),
+ WebServicesTestUtils.getXmlInt(element, "reducesTotal"),
+ WebServicesTestUtils.getXmlInt(element, "reducesCompleted"),
+ WebServicesTestUtils.getXmlFloat(element, "reduceProgress"),
+ WebServicesTestUtils.getXmlFloat(element, "mapProgress"));
+
+ // restricted access fields - if security and acls set
+ verifyAMJobGenericSecure(job,
+ WebServicesTestUtils.getXmlInt(element, "mapsPending"),
+ WebServicesTestUtils.getXmlInt(element, "mapsRunning"),
+ WebServicesTestUtils.getXmlInt(element, "reducesPending"),
+ WebServicesTestUtils.getXmlInt(element, "reducesRunning"),
+ WebServicesTestUtils.getXmlBoolean(element, "uberized"),
+ WebServicesTestUtils.getXmlString(element, "diagnostics"),
+ WebServicesTestUtils.getXmlInt(element, "newReduceAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "runningReduceAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "failedReduceAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "killedReduceAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "successfulReduceAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "newMapAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "runningMapAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "failedMapAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "killedMapAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "successfulMapAttempts"));
+
+ Map allacls = job.getJobACLs();
+ if (allacls != null) {
+ for (Map.Entry entry : allacls.entrySet()) {
+ String expectName = entry.getKey().getAclName();
+ String expectValue = entry.getValue().getAclString();
+ Boolean found = false;
+ // make sure ws includes it
+ NodeList id = element.getElementsByTagName("acls");
+ if (id != null) {
+ for (int j = 0; j < id.getLength(); j++) {
+ Element aclElem = (Element) id.item(j);
+ if (aclElem == null) {
+ fail("should have acls in the web service info");
+ }
+ if (expectName.matches(WebServicesTestUtils.getXmlString(aclElem,
+ "name"))) {
+ found = true;
+ WebServicesTestUtils.checkStringMatch("value", expectValue,
+ WebServicesTestUtils.getXmlString(aclElem, "value"));
+ }
+ }
+ } else {
+ fail("should have acls in the web service info");
+ }
+ assertTrue("acl: " + expectName + " not found in webservice output",
+ found);
+ }
+ }
+ }
+ }
+
+ public void verifyAMJobGeneric(Job job, String id, String user, String name,
+ String state, long startTime, long finishTime, long elapsedTime,
+ int mapsTotal, int mapsCompleted, int reducesTotal, int reducesCompleted,
+ float reduceProgress, float mapProgress) {
+ JobReport report = job.getReport();
+
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
+ id);
+ WebServicesTestUtils.checkStringMatch("user", job.getUserName().toString(),
+ user);
+ WebServicesTestUtils.checkStringMatch("name", job.getName(), name);
+ WebServicesTestUtils.checkStringMatch("state", job.getState().toString(),
+ state);
+
+ assertEquals("startTime incorrect", report.getStartTime(), startTime);
+ assertEquals("finishTime incorrect", report.getFinishTime(), finishTime);
+ assertEquals("elapsedTime incorrect",
+ Times.elapsed(report.getStartTime(), report.getFinishTime()),
+ elapsedTime);
+ assertEquals("mapsTotal incorrect", job.getTotalMaps(), mapsTotal);
+ assertEquals("mapsCompleted incorrect", job.getCompletedMaps(),
+ mapsCompleted);
+ assertEquals("reducesTotal incorrect", job.getTotalReduces(), reducesTotal);
+ assertEquals("reducesCompleted incorrect", job.getCompletedReduces(),
+ reducesCompleted);
+ assertEquals("mapProgress incorrect", report.getMapProgress() * 100,
+ mapProgress, 0);
+ assertEquals("reduceProgress incorrect", report.getReduceProgress() * 100,
+ reduceProgress, 0);
+ }
+
+ public void verifyAMJobGenericSecure(Job job, int mapsPending,
+ int mapsRunning, int reducesPending, int reducesRunning,
+ Boolean uberized, String diagnostics, int newReduceAttempts,
+ int runningReduceAttempts, int failedReduceAttempts,
+ int killedReduceAttempts, int successfulReduceAttempts,
+ int newMapAttempts, int runningMapAttempts, int failedMapAttempts,
+ int killedMapAttempts, int successfulMapAttempts) {
+
+ String diagString = "";
+ List diagList = job.getDiagnostics();
+ if (diagList != null && !diagList.isEmpty()) {
+ StringBuffer b = new StringBuffer();
+ for (String diag : diagList) {
+ b.append(diag);
+ }
+ diagString = b.toString();
+ }
+ WebServicesTestUtils.checkStringMatch("diagnostics", diagString,
+ diagnostics);
+
+ assertEquals("isUber incorrect", job.isUber(), uberized);
+
+ // unfortunately the following fields are all calculated in JobInfo
+ // so not easily accessible without doing all the calculations again.
+ // For now just make sure they are present.
+ assertTrue("mapsPending not >= 0", mapsPending >= 0);
+ assertTrue("mapsRunning not >= 0", mapsRunning >= 0);
+ assertTrue("reducesPending not >= 0", reducesPending >= 0);
+ assertTrue("reducesRunning not >= 0", reducesRunning >= 0);
+
+ assertTrue("newReduceAttempts not >= 0", newReduceAttempts >= 0);
+ assertTrue("runningReduceAttempts not >= 0", runningReduceAttempts >= 0);
+ assertTrue("failedReduceAttempts not >= 0", failedReduceAttempts >= 0);
+ assertTrue("killedReduceAttempts not >= 0", killedReduceAttempts >= 0);
+ assertTrue("successfulReduceAttempts not >= 0",
+ successfulReduceAttempts >= 0);
+
+ assertTrue("newMapAttempts not >= 0", newMapAttempts >= 0);
+ assertTrue("runningMapAttempts not >= 0", runningMapAttempts >= 0);
+ assertTrue("failedMapAttempts not >= 0", failedMapAttempts >= 0);
+ assertTrue("killedMapAttempts not >= 0", killedMapAttempts >= 0);
+ assertTrue("successfulMapAttempts not >= 0", successfulMapAttempts >= 0);
+
+ }
+
+ @Test
+ public void testJobCounters() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("counters")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobCounters");
+ verifyAMJobCounters(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobCountersSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("counters/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobCounters");
+ verifyAMJobCounters(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobCountersDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("counters/").get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobCounters");
+ verifyAMJobCounters(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobCountersXML() throws Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("counters")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList info = dom.getElementsByTagName("jobCounters");
+ verifyAMJobCountersXML(info, jobsMap.get(id));
+ }
+ }
+
+ public void verifyAMJobCounters(JSONObject info, Job job)
+ throws JSONException {
+
+ assertEquals("incorrect number of elements", 2, info.length());
+
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
+ info.getString("id"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ JSONArray counterGroups = info.getJSONArray("counterGroup");
+ for (int i = 0; i < counterGroups.length(); i++) {
+ JSONObject counterGroup = counterGroups.getJSONObject(i);
+ String name = counterGroup.getString("counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ JSONArray counters = counterGroup.getJSONArray("counter");
+ for (int j = 0; j < counters.length(); j++) {
+ JSONObject counter = counters.getJSONObject(i);
+ String counterName = counter.getString("name");
+ assertTrue("counter name not set",
+ (counterName != null && !counterName.isEmpty()));
+
+ long mapValue = counter.getLong("mapCounterValue");
+ assertTrue("mapCounterValue >= 0", mapValue >= 0);
+
+ long reduceValue = counter.getLong("reduceCounterValue");
+ assertTrue("reduceCounterValue >= 0", reduceValue >= 0);
+
+ long totalValue = counter.getLong("totalCounterValue");
+ assertTrue("totalCounterValue >= 0", totalValue >= 0);
+
+ }
+ }
+ }
+
+ public void verifyAMJobCountersXML(NodeList nodes, Job job) {
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+
+ assertNotNull("Job not found - output incorrect", job);
+
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
+ WebServicesTestUtils.getXmlString(element, "id"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ NodeList groups = element.getElementsByTagName("counterGroup");
+
+ for (int j = 0; j < groups.getLength(); j++) {
+ Element counters = (Element) groups.item(j);
+ assertNotNull("should have counters in the web service info", counters);
+ String name = WebServicesTestUtils.getXmlString(counters,
+ "counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ NodeList counterArr = counters.getElementsByTagName("counter");
+ for (int z = 0; z < counterArr.getLength(); z++) {
+ Element counter = (Element) counterArr.item(z);
+ String counterName = WebServicesTestUtils.getXmlString(counter,
+ "name");
+ assertTrue("counter name not set",
+ (counterName != null && !counterName.isEmpty()));
+
+ long mapValue = WebServicesTestUtils.getXmlLong(counter,
+ "mapCounterValue");
+ assertTrue("mapCounterValue not >= 0", mapValue >= 0);
+
+ long reduceValue = WebServicesTestUtils.getXmlLong(counter,
+ "reduceCounterValue");
+ assertTrue("reduceCounterValue >= 0", reduceValue >= 0);
+
+ long totalValue = WebServicesTestUtils.getXmlLong(counter,
+ "totalCounterValue");
+ assertTrue("totalCounterValue >= 0", totalValue >= 0);
+ }
+ }
+ }
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java
new file mode 100644
index 00000000000..19e626ae6c6
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServicesTasks.java
@@ -0,0 +1,821 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.StringReader;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the app master web service Rest API for getting tasks, a specific task,
+ * and task counters.
+ *
+ * /ws/v1/mapreduce/jobs/{jobid}/tasks
+ * /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}
+ * /ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}/counters
+ */
+public class TestAMWebServicesTasks extends JerseyTest {
+
+ private static Configuration conf = new Configuration();
+ private static TestAppContext appContext;
+
+ static class TestAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map jobs;
+ final long startTime = System.currentTimeMillis();
+
+ TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = MockJobs.newAppAttemptID(appID, 0);
+ jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
+ }
+
+ TestAppContext() {
+ this(0, 1, 2, 1);
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+ }
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+
+ appContext = new TestAppContext();
+ bind(JAXBContextResolver.class);
+ bind(AMWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(AppContext.class).toInstance(appContext);
+ bind(Configuration.class).toInstance(conf);
+
+ serve("/*").with(GuiceContainer.class);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+
+ }
+
+ public TestAMWebServicesTasks() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.mapreduce.v2.app.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testTasks() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject tasks = json.getJSONObject("tasks");
+ JSONArray arr = tasks.getJSONArray("task");
+ assertEquals("incorrect number of elements", 2, arr.length());
+
+ verifyAMTask(arr, jobsMap.get(id), null);
+ }
+ }
+
+ @Test
+ public void testTasksDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject tasks = json.getJSONObject("tasks");
+ JSONArray arr = tasks.getJSONArray("task");
+ assertEquals("incorrect number of elements", 2, arr.length());
+
+ verifyAMTask(arr, jobsMap.get(id), null);
+ }
+ }
+
+ @Test
+ public void testTasksSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject tasks = json.getJSONObject("tasks");
+ JSONArray arr = tasks.getJSONArray("task");
+ assertEquals("incorrect number of elements", 2, arr.length());
+
+ verifyAMTask(arr, jobsMap.get(id), null);
+ }
+ }
+
+ @Test
+ public void testTasksXML() throws JSONException, Exception {
+
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList tasks = dom.getElementsByTagName("tasks");
+ assertEquals("incorrect number of elements", 1, tasks.getLength());
+ NodeList task = dom.getElementsByTagName("task");
+ verifyAMTaskXML(task, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testTasksQueryMap() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String type = "m";
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").queryParam("type", type)
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject tasks = json.getJSONObject("tasks");
+ JSONArray arr = tasks.getJSONArray("task");
+ assertEquals("incorrect number of elements", 1, arr.length());
+ verifyAMTask(arr, jobsMap.get(id), type);
+ }
+ }
+
+ @Test
+ public void testTasksQueryReduce() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String type = "r";
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").queryParam("type", type)
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject tasks = json.getJSONObject("tasks");
+ JSONArray arr = tasks.getJSONArray("task");
+ assertEquals("incorrect number of elements", 1, arr.length());
+ verifyAMTask(arr, jobsMap.get(id), type);
+ }
+ }
+
+ @Test
+ public void testTasksQueryInvalid() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ // tasktype must be exactly either "m" or "r"
+ String tasktype = "reduce";
+
+ try {
+ r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
+ .path("tasks").queryParam("type", tasktype)
+ .accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: tasktype must be either m or r", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskId() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid)
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("task");
+ verifyAMSingleTask(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid + "/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("task");
+ verifyAMSingleTask(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("task");
+ verifyAMSingleTask(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdBogus() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String tid = "bogustaskid";
+ try {
+ r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
+ .path("tasks").path(tid).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: Error parsing task ID: bogustaskid", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdNonExist() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String tid = "task_1234_0_0_m_0";
+ try {
+ r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
+ .path("tasks").path(tid).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: task not found with id task_1234_0_0_m_0",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdInvalid() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String tid = "task_1234_0_0_d_0";
+ try {
+ r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
+ .path("tasks").path(tid).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: Unknown task symbol: d", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdInvalid2() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String tid = "task_1234_0_m_0";
+ try {
+ r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
+ .path("tasks").path(tid).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: For input string: \"m\"", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdInvalid3() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String tid = "task_1234_0_0_m";
+ try {
+ r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
+ .path("tasks").path(tid).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: Error parsing task ID: task_1234_0_0_m",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdXML() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid)
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList nodes = dom.getElementsByTagName("task");
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+ verifyAMSingleTaskXML(element, task);
+ }
+ }
+ }
+ }
+
+ public void verifyAMSingleTask(JSONObject info, Task task)
+ throws JSONException {
+ assertEquals("incorrect number of elements", 8, info.length());
+
+ verifyTaskGeneric(task, info.getString("id"), info.getString("state"),
+ info.getString("type"), info.getString("successfulAttempt"),
+ info.getLong("startTime"), info.getLong("finishTime"),
+ info.getLong("elapsedTime"), (float) info.getDouble("progress"));
+ }
+
+ public void verifyAMTask(JSONArray arr, Job job, String type)
+ throws JSONException {
+ for (Task task : job.getTasks().values()) {
+ TaskId id = task.getID();
+ String tid = MRApps.toString(id);
+ Boolean found = false;
+ if (type != null && task.getType() == MRApps.taskType(type)) {
+
+ for (int i = 0; i < arr.length(); i++) {
+ JSONObject info = arr.getJSONObject(i);
+ if (tid.matches(info.getString("id"))) {
+ found = true;
+ verifyAMSingleTask(info, task);
+ }
+ }
+ assertTrue("task with id: " + tid + " not in web service output", found);
+ }
+ }
+ }
+
+ public void verifyTaskGeneric(Task task, String id, String state,
+ String type, String successfulAttempt, long startTime, long finishTime,
+ long elapsedTime, float progress) {
+
+ TaskId taskid = task.getID();
+ String tid = MRApps.toString(taskid);
+ TaskReport report = task.getReport();
+
+ WebServicesTestUtils.checkStringMatch("id", tid, id);
+ WebServicesTestUtils.checkStringMatch("type", task.getType().toString(),
+ type);
+ WebServicesTestUtils.checkStringMatch("state", report.getTaskState()
+ .toString(), state);
+ // not easily checked without duplicating logic, just make sure its here
+ assertNotNull("successfulAttempt null", successfulAttempt);
+ assertEquals("startTime wrong", report.getStartTime(), startTime);
+ assertEquals("finishTime wrong", report.getFinishTime(), finishTime);
+ assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime);
+ assertEquals("progress wrong", report.getProgress() * 100, progress, 1e-3f);
+ }
+
+ public void verifyAMSingleTaskXML(Element element, Task task) {
+ verifyTaskGeneric(task, WebServicesTestUtils.getXmlString(element, "id"),
+ WebServicesTestUtils.getXmlString(element, "state"),
+ WebServicesTestUtils.getXmlString(element, "type"),
+ WebServicesTestUtils.getXmlString(element, "successfulAttempt"),
+ WebServicesTestUtils.getXmlLong(element, "startTime"),
+ WebServicesTestUtils.getXmlLong(element, "finishTime"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
+ WebServicesTestUtils.getXmlFloat(element, "progress"));
+ }
+
+ public void verifyAMTaskXML(NodeList nodes, Job job) {
+
+ assertEquals("incorrect number of elements", 2, nodes.getLength());
+
+ for (Task task : job.getTasks().values()) {
+ TaskId id = task.getID();
+ String tid = MRApps.toString(id);
+ Boolean found = false;
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+
+ if (tid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
+ found = true;
+ verifyAMSingleTaskXML(element, task);
+ }
+ }
+ assertTrue("task with id: " + tid + " not in web service output", found);
+ }
+ }
+
+ @Test
+ public void testTaskIdCounters() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid).path("counters")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobTaskCounters");
+ verifyAMJobTaskCounters(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdCountersSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid).path("counters/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobTaskCounters");
+ verifyAMJobTaskCounters(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdCountersDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid).path("counters")
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobTaskCounters");
+ verifyAMJobTaskCounters(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testJobTaskCountersXML() throws Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid).path("counters")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList info = dom.getElementsByTagName("jobTaskCounters");
+ verifyAMTaskCountersXML(info, task);
+ }
+ }
+ }
+
+ public void verifyAMJobTaskCounters(JSONObject info, Task task)
+ throws JSONException {
+
+ assertEquals("incorrect number of elements", 2, info.length());
+
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(task.getID()),
+ info.getString("id"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ JSONArray counterGroups = info.getJSONArray("taskCounterGroup");
+ for (int i = 0; i < counterGroups.length(); i++) {
+ JSONObject counterGroup = counterGroups.getJSONObject(i);
+ String name = counterGroup.getString("counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ JSONArray counters = counterGroup.getJSONArray("counter");
+ for (int j = 0; j < counters.length(); j++) {
+ JSONObject counter = counters.getJSONObject(i);
+ String counterName = counter.getString("name");
+ assertTrue("name not set",
+ (counterName != null && !counterName.isEmpty()));
+ long value = counter.getLong("value");
+ assertTrue("value >= 0", value >= 0);
+ }
+ }
+ }
+
+ public void verifyAMTaskCountersXML(NodeList nodes, Task task) {
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+
+ Element element = (Element) nodes.item(i);
+ WebServicesTestUtils.checkStringMatch("id",
+ MRApps.toString(task.getID()),
+ WebServicesTestUtils.getXmlString(element, "id"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ NodeList groups = element.getElementsByTagName("taskCounterGroup");
+
+ for (int j = 0; j < groups.getLength(); j++) {
+ Element counters = (Element) groups.item(j);
+ assertNotNull("should have counters in the web service info", counters);
+ String name = WebServicesTestUtils.getXmlString(counters,
+ "counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ NodeList counterArr = counters.getElementsByTagName("counter");
+ for (int z = 0; z < counterArr.getLength(); z++) {
+ Element counter = (Element) counterArr.item(z);
+ String counterName = WebServicesTestUtils.getXmlString(counter,
+ "name");
+ assertTrue("counter name not set",
+ (counterName != null && !counterName.isEmpty()));
+
+ long value = WebServicesTestUtils.getXmlLong(counter, "value");
+ assertTrue("value not >= 0", value >= 0);
+
+ }
+ }
+ }
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
index 2f083f5275a..ca194cc239f 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
@@ -54,7 +54,6 @@
org.apache.avro
avro-maven-plugin
- 1.5.3
generate-sources
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
index 8463f27d94f..5153d2ee802 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
@@ -31,14 +31,13 @@ import javax.ws.rs.core.UriInfo;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
-import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.webapp.AMWebServices;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptCounterInfo;
@@ -131,7 +130,7 @@ public class HsWebServices {
try {
sBegin = Long.parseLong(startedBegin);
} catch (NumberFormatException e) {
- throw new BadRequestException(e.getMessage());
+ throw new BadRequestException("Invalid number format: " + e.getMessage());
}
if (sBegin < 0) {
throw new BadRequestException("startedTimeBegin must be greater than 0");
@@ -142,7 +141,7 @@ public class HsWebServices {
try {
sEnd = Long.parseLong(startedEnd);
} catch (NumberFormatException e) {
- throw new BadRequestException(e.getMessage());
+ throw new BadRequestException("Invalid number format: " + e.getMessage());
}
if (sEnd < 0) {
throw new BadRequestException("startedTimeEnd must be greater than 0");
@@ -158,10 +157,10 @@ public class HsWebServices {
try {
fBegin = Long.parseLong(finishBegin);
} catch (NumberFormatException e) {
- throw new BadRequestException(e.getMessage());
+ throw new BadRequestException("Invalid number format: " + e.getMessage());
}
if (fBegin < 0) {
- throw new BadRequestException("finishTimeBegin must be greater than 0");
+ throw new BadRequestException("finishedTimeBegin must be greater than 0");
}
}
if (finishEnd != null && !finishEnd.isEmpty()) {
@@ -169,15 +168,15 @@ public class HsWebServices {
try {
fEnd = Long.parseLong(finishEnd);
} catch (NumberFormatException e) {
- throw new BadRequestException(e.getMessage());
+ throw new BadRequestException("Invalid number format: " + e.getMessage());
}
if (fEnd < 0) {
- throw new BadRequestException("finishTimeEnd must be greater than 0");
+ throw new BadRequestException("finishedTimeEnd must be greater than 0");
}
}
if (fBegin > fEnd) {
throw new BadRequestException(
- "finishTimeEnd must be greater than finishTimeBegin");
+ "finishedTimeEnd must be greater than finishedTimeBegin");
}
for (Job job : appCtx.getAllJobs().values()) {
@@ -200,7 +199,7 @@ public class HsWebServices {
}
if (userQuery != null && !userQuery.isEmpty()) {
- if (!jobInfo.getName().equals(userQuery)) {
+ if (!jobInfo.getUserName().equals(userQuery)) {
continue;
}
}
@@ -224,14 +223,8 @@ public class HsWebServices {
@Path("/mapreduce/jobs/{jobid}")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public JobInfo getJob(@PathParam("jobid") String jid) {
- JobId jobId = MRApps.toJobID(jid);
- if (jobId == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- Job job = appCtx.getJob(jobId);
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+
+ Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
return new JobInfo(job);
}
@@ -239,14 +232,8 @@ public class HsWebServices {
@Path("/mapreduce/jobs/{jobid}/attempts")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
- JobId jobId = MRApps.toJobID(jid);
- if (jobId == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- Job job = appCtx.getJob(jobId);
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+
+ Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
AMAttemptsInfo amAttempts = new AMAttemptsInfo();
for (AMInfo amInfo : job.getAMInfos()) {
AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(job
@@ -261,53 +248,17 @@ public class HsWebServices {
@Path("/mapreduce/jobs/{jobid}/counters")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public JobCounterInfo getJobCounters(@PathParam("jobid") String jid) {
- JobId jobId = MRApps.toJobID(jid);
- if (jobId == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- Job job = appCtx.getJob(jobId);
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- return new JobCounterInfo(this.appCtx, job);
- }
- @GET
- @Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/counters")
- @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
- public JobTaskCounterInfo getSingleTaskCounters(
- @PathParam("jobid") String jid, @PathParam("taskid") String tid) {
- JobId jobId = MRApps.toJobID(jid);
- if (jobId == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- Job job = this.appCtx.getJob(jobId);
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- TaskId taskID = MRApps.toTaskID(tid);
- if (taskID == null) {
- throw new NotFoundException("taskid " + tid + " not found or invalid");
- }
- Task task = job.getTask(taskID);
- if (task == null) {
- throw new NotFoundException("task not found with id " + tid);
- }
- return new JobTaskCounterInfo(task);
+ Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+ return new JobCounterInfo(this.appCtx, job);
}
@GET
@Path("/mapreduce/jobs/{jobid}/conf")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public ConfInfo getJobConf(@PathParam("jobid") String jid) {
- JobId jobId = MRApps.toJobID(jid);
- if (jobId == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- Job job = appCtx.getJob(jobId);
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+
+ Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
ConfInfo info;
try {
info = new ConfInfo(job, this.conf);
@@ -315,7 +266,6 @@ public class HsWebServices {
throw new NotFoundException("unable to load configuration for job: "
+ jid);
}
-
return info;
}
@@ -324,10 +274,8 @@ public class HsWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public TasksInfo getJobTasks(@PathParam("jobid") String jid,
@QueryParam("type") String type) {
- Job job = this.appCtx.getJob(MRApps.toJobID(jid));
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+
+ Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
TasksInfo allTasks = new TasksInfo();
for (Task task : job.getTasks().values()) {
TaskType ttype = null;
@@ -351,10 +299,20 @@ public class HsWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public TaskInfo getJobTask(@PathParam("jobid") String jid,
@PathParam("taskid") String tid) {
- Job job = this.appCtx.getJob(MRApps.toJobID(jid));
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
+
+ Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+ Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
+ return new TaskInfo(task);
+
+ }
+
+ @GET
+ @Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/counters")
+ @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+ public JobTaskCounterInfo getSingleTaskCounters(
+ @PathParam("jobid") String jid, @PathParam("taskid") String tid) {
+
+ Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
TaskId taskID = MRApps.toTaskID(tid);
if (taskID == null) {
throw new NotFoundException("taskid " + tid + " not found or invalid");
@@ -363,8 +321,7 @@ public class HsWebServices {
if (task == null) {
throw new NotFoundException("task not found with id " + tid);
}
- return new TaskInfo(task);
-
+ return new JobTaskCounterInfo(task);
}
@GET
@@ -372,19 +329,10 @@ public class HsWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public TaskAttemptsInfo getJobTaskAttempts(@PathParam("jobid") String jid,
@PathParam("taskid") String tid) {
+
TaskAttemptsInfo attempts = new TaskAttemptsInfo();
- Job job = this.appCtx.getJob(MRApps.toJobID(jid));
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- TaskId taskID = MRApps.toTaskID(tid);
- if (taskID == null) {
- throw new NotFoundException("taskid " + tid + " not found or invalid");
- }
- Task task = job.getTask(taskID);
- if (task == null) {
- throw new NotFoundException("task not found with id " + tid);
- }
+ Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+ Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
for (TaskAttempt ta : task.getAttempts().values()) {
if (ta != null) {
if (task.getType() == TaskType.REDUCE) {
@@ -402,28 +350,11 @@ public class HsWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public TaskAttemptInfo getJobTaskAttemptId(@PathParam("jobid") String jid,
@PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
- Job job = this.appCtx.getJob(MRApps.toJobID(jid));
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- TaskId taskID = MRApps.toTaskID(tid);
- if (taskID == null) {
- throw new NotFoundException("taskid " + tid + " not found or invalid");
- }
- Task task = job.getTask(taskID);
- if (task == null) {
- throw new NotFoundException("task not found with id " + tid);
- }
- TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
- if (attemptId == null) {
- throw new NotFoundException("task attempt id " + attId
- + " not found or invalid");
- }
- TaskAttempt ta = task.getAttempt(attemptId);
- if (ta == null) {
- throw new NotFoundException("Error getting info on task attempt id "
- + attId);
- }
+
+ Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+ Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
+ TaskAttempt ta = AMWebServices.getTaskAttemptFromTaskAttemptString(attId,
+ task);
if (task.getType() == TaskType.REDUCE) {
return new ReduceTaskAttemptInfo(ta, task.getType());
} else {
@@ -437,32 +368,11 @@ public class HsWebServices {
public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(
@PathParam("jobid") String jid, @PathParam("taskid") String tid,
@PathParam("attemptid") String attId) {
- JobId jobId = MRApps.toJobID(jid);
- if (jobId == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- Job job = this.appCtx.getJob(jobId);
- if (job == null) {
- throw new NotFoundException("job, " + jid + ", is not found");
- }
- TaskId taskID = MRApps.toTaskID(tid);
- if (taskID == null) {
- throw new NotFoundException("taskid " + tid + " not found or invalid");
- }
- Task task = job.getTask(taskID);
- if (task == null) {
- throw new NotFoundException("task not found with id " + tid);
- }
- TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
- if (attemptId == null) {
- throw new NotFoundException("task attempt id " + attId
- + " not found or invalid");
- }
- TaskAttempt ta = task.getAttempt(attemptId);
- if (ta == null) {
- throw new NotFoundException("Error getting info on task attempt id "
- + attId);
- }
+
+ Job job = AMWebServices.getJobFromJobIdString(jid, appCtx);
+ Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
+ TaskAttempt ta = AMWebServices.getTaskAttemptFromTaskAttemptString(attId,
+ task);
return new JobTaskAttemptCounterInfo(ta);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java
index 13c557e80bb..c24fefc129a 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java
@@ -42,6 +42,8 @@ import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptsInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterGroupInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterInfo;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
+
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptsInfo;
import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo;
@@ -57,13 +59,12 @@ public class JAXBContextResolver implements ContextResolver {
// you have to specify all the dao classes here
private final Class[] cTypes = { HistoryInfo.class, JobInfo.class,
- JobsInfo.class, TasksInfo.class, TaskAttemptsInfo.class, ConfInfo.class,
- CounterInfo.class, JobTaskCounterInfo.class,
- JobTaskAttemptCounterInfo.class,
- TaskCounterInfo.class, JobCounterInfo.class, ReduceTaskAttemptInfo.class,
- TaskAttemptInfo.class, TaskAttemptsInfo.class, CounterGroupInfo.class,
- TaskCounterGroupInfo.class,
- AMAttemptInfo.class, AMAttemptsInfo.class};
+ JobsInfo.class, TaskInfo.class, TasksInfo.class, TaskAttemptsInfo.class,
+ ConfInfo.class, CounterInfo.class, JobTaskCounterInfo.class,
+ JobTaskAttemptCounterInfo.class, TaskCounterInfo.class,
+ JobCounterInfo.class, ReduceTaskAttemptInfo.class, TaskAttemptInfo.class,
+ TaskAttemptsInfo.class, CounterGroupInfo.class,
+ TaskCounterGroupInfo.class, AMAttemptInfo.class, AMAttemptsInfo.class };
public JAXBContextResolver() throws Exception {
this.types = new HashSet(Arrays.asList(cTypes));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java
index d4cf3686b57..b91cb6e4f2c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java
@@ -26,6 +26,7 @@ import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
+import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.util.BuilderUtils;
@@ -48,21 +49,28 @@ public class AMAttemptInfo {
public AMAttemptInfo(AMInfo amInfo, String jobId, String user, String host,
String pathPrefix) {
- this.nodeHttpAddress = amInfo.getNodeManagerHost() + ":"
- + amInfo.getNodeManagerHttpPort();
- NodeId nodeId = BuilderUtils.newNodeId(amInfo.getNodeManagerHost(),
- amInfo.getNodeManagerPort());
- this.nodeId = nodeId.toString();
+ this.nodeHttpAddress = "";
+ this.nodeId = "";
+ String nmHost = amInfo.getNodeManagerHost();
+ int nmPort = amInfo.getNodeManagerHttpPort();
+ if (nmHost != null) {
+ this.nodeHttpAddress = nmHost + ":" + nmPort;
+ NodeId nodeId = BuilderUtils.newNodeId(nmHost, nmPort);
+ this.nodeId = nodeId.toString();
+ }
this.id = amInfo.getAppAttemptId().getAttemptId();
this.startTime = amInfo.getStartTime();
- this.containerId = amInfo.getContainerId().toString();
- this.logsLink = join(
- host,
- pathPrefix,
- ujoin("logs", nodeId.toString(), amInfo.getContainerId().toString(),
- jobId, user));
- this.shortLogsLink = ujoin("logs", nodeId.toString(), amInfo
- .getContainerId().toString(), jobId, user);
+ this.containerId = "";
+ this.logsLink = "";
+ this.shortLogsLink = "";
+ ContainerId containerId = amInfo.getContainerId();
+ if (containerId != null) {
+ this.containerId = containerId.toString();
+ this.logsLink = join(host, pathPrefix,
+ ujoin("logs", this.nodeId, this.containerId, jobId, user));
+ this.shortLogsLink = ujoin("logs", this.nodeId, this.containerId,
+ jobId, user);
+ }
}
public String getNodeHttpAddress() {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java
index bcc9d31b406..03d97092e09 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java
@@ -92,6 +92,7 @@ public class JobInfo {
this.user = job.getUserName();
this.state = job.getState().toString();
this.uberized = job.isUber();
+ this.diagnostics = "";
List diagnostics = job.getDiagnostics();
if (diagnostics != null && !diagnostics.isEmpty()) {
StringBuffer b = new StringBuffer();
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java
new file mode 100644
index 00000000000..1071fbcdfa9
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServices.java
@@ -0,0 +1,360 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.hs.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.io.StringReader;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
+import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
+import org.apache.hadoop.util.VersionInfo;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebApp;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the History Server info web services api's. Also test non-existent urls.
+ *
+ * /ws/v1/history
+ * /ws/v1/history/info
+ */
+public class TestHsWebServices extends JerseyTest {
+
+ private static Configuration conf = new Configuration();
+ private static TestAppContext appContext;
+ private static HsWebApp webApp;
+
+ static class TestAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map jobs;
+ final long startTime = System.currentTimeMillis();
+
+ TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = MockJobs.newAppAttemptID(appID, 0);
+ jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
+ }
+
+ TestAppContext() {
+ this(0, 1, 1, 1);
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+ }
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+
+ appContext = new TestAppContext();
+ JobHistory jobHistoryService = new JobHistory();
+ HistoryContext historyContext = (HistoryContext) jobHistoryService;
+ webApp = new HsWebApp(historyContext);
+
+ bind(JAXBContextResolver.class);
+ bind(HsWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(WebApp.class).toInstance(webApp);
+ bind(AppContext.class).toInstance(appContext);
+ bind(Configuration.class).toInstance(conf);
+
+ serve("/*").with(GuiceContainer.class);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ }
+
+ public TestHsWebServices() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.mapreduce.v2.hs.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testHS() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
+ }
+
+ @Test
+ public void testHSSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
+ }
+
+ @Test
+ public void testHSDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history/")
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
+ }
+
+ @Test
+ public void testHSXML() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ verifyHSInfoXML(xml, appContext);
+ }
+
+ @Test
+ public void testInfo() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("info").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
+ }
+
+ @Test
+ public void testInfoSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("info/").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
+ }
+
+ @Test
+ public void testInfoDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("info/").get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ verifyHSInfo(json.getJSONObject("historyInfo"), appContext);
+ }
+
+ @Test
+ public void testInfoXML() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("info/").accept(MediaType.APPLICATION_XML)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ verifyHSInfoXML(xml, appContext);
+ }
+
+ @Test
+ public void testInvalidUri() throws JSONException, Exception {
+ WebResource r = resource();
+ String responseStr = "";
+ try {
+ responseStr = r.path("ws").path("v1").path("history").path("bogus")
+ .accept(MediaType.APPLICATION_JSON).get(String.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ WebServicesTestUtils.checkStringMatch(
+ "error string exists and shouldn't", "", responseStr);
+ }
+ }
+
+ @Test
+ public void testInvalidUri2() throws JSONException, Exception {
+ WebResource r = resource();
+ String responseStr = "";
+ try {
+ responseStr = r.path("ws").path("v1").path("invalid")
+ .accept(MediaType.APPLICATION_JSON).get(String.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ WebServicesTestUtils.checkStringMatch(
+ "error string exists and shouldn't", "", responseStr);
+ }
+ }
+
+ @Test
+ public void testInvalidAccept() throws JSONException, Exception {
+ WebResource r = resource();
+ String responseStr = "";
+ try {
+ responseStr = r.path("ws").path("v1").path("history")
+ .accept(MediaType.TEXT_PLAIN).get(String.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.INTERNAL_SERVER_ERROR,
+ response.getClientResponseStatus());
+ WebServicesTestUtils.checkStringMatch(
+ "error string exists and shouldn't", "", responseStr);
+ }
+ }
+
+ public void verifyHsInfoGeneric(String hadoopVersionBuiltOn,
+ String hadoopBuildVersion, String hadoopVersion) {
+ WebServicesTestUtils.checkStringMatch("hadoopVersionBuiltOn",
+ VersionInfo.getDate(), hadoopVersionBuiltOn);
+ WebServicesTestUtils.checkStringMatch("hadoopBuildVersion",
+ VersionInfo.getBuildVersion(), hadoopBuildVersion);
+ WebServicesTestUtils.checkStringMatch("hadoopVersion",
+ VersionInfo.getVersion(), hadoopVersion);
+ }
+
+ public void verifyHSInfo(JSONObject info, TestAppContext ctx)
+ throws JSONException {
+ assertEquals("incorrect number of elements", 3, info.length());
+
+ verifyHsInfoGeneric(info.getString("hadoopVersionBuiltOn"),
+ info.getString("hadoopBuildVersion"), info.getString("hadoopVersion"));
+ }
+
+ public void verifyHSInfoXML(String xml, TestAppContext ctx)
+ throws JSONException, Exception {
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList nodes = dom.getElementsByTagName("historyInfo");
+ assertEquals("incorrect number of elements", 1, nodes.getLength());
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+ verifyHsInfoGeneric(
+ WebServicesTestUtils.getXmlString(element, "hadoopVersionBuiltOn"),
+ WebServicesTestUtils.getXmlString(element, "hadoopBuildVersion"),
+ WebServicesTestUtils.getXmlString(element, "hadoopVersion"));
+ }
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java
new file mode 100644
index 00000000000..4cc0b3259f5
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesAttempts.java
@@ -0,0 +1,745 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.hs.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.StringReader;
+import java.util.List;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebApp;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the history server Rest API for getting task attempts, a
+ * specific task attempt, and task attempt counters
+ *
+ * /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts
+ * /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}
+ * /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/
+ * counters
+ */
+public class TestHsWebServicesAttempts extends JerseyTest {
+
+ private static Configuration conf = new Configuration();
+ private static TestAppContext appContext;
+ private static HsWebApp webApp;
+
+ static class TestAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map jobs;
+ final long startTime = System.currentTimeMillis();
+
+ TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = MockJobs.newAppAttemptID(appID, 0);
+ jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
+ }
+
+ TestAppContext() {
+ this(0, 1, 2, 1);
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+ }
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+
+ appContext = new TestAppContext();
+ webApp = mock(HsWebApp.class);
+ when(webApp.name()).thenReturn("hsmockwebapp");
+
+ bind(JAXBContextResolver.class);
+ bind(HsWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(WebApp.class).toInstance(webApp);
+ bind(AppContext.class).toInstance(appContext);
+ bind(Configuration.class).toInstance(conf);
+
+ serve("/*").with(GuiceContainer.class);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+
+ }
+
+ public TestHsWebServicesAttempts() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.mapreduce.v2.hs.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testTaskAttempts() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
+ .path("attempts").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ verifyHsTaskAttempts(json, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptsSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
+ .path("attempts/").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ verifyHsTaskAttempts(json, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptsDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
+ .path("attempts").get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ verifyHsTaskAttempts(json, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptsXML() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
+ .path("attempts").accept(MediaType.APPLICATION_XML)
+ .get(ClientResponse.class);
+
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList attempts = dom.getElementsByTagName("taskAttempts");
+ assertEquals("incorrect number of elements", 1, attempts.getLength());
+
+ NodeList nodes = dom.getElementsByTagName("taskAttempt");
+ verifyHsTaskAttemptsXML(nodes, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptId() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+ String tid = MRApps.toString(task.getID());
+
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .path(tid).path("attempts").path(attid)
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("taskAttempt");
+ verifyHsTaskAttempt(info, att, task.getType());
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptIdSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+ String tid = MRApps.toString(task.getID());
+
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .path(tid).path("attempts").path(attid + "/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("taskAttempt");
+ verifyHsTaskAttempt(info, att, task.getType());
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptIdDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+ String tid = MRApps.toString(task.getID());
+
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .path(tid).path("attempts").path(attid).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("taskAttempt");
+ verifyHsTaskAttempt(info, att, task.getType());
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptIdXML() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .path(tid).path("attempts").path(attid)
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList nodes = dom.getElementsByTagName("taskAttempt");
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+ verifyHsTaskAttemptXML(element, att, task.getType());
+ }
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptIdBogus() throws JSONException, Exception {
+
+ testTaskAttemptIdErrorGeneric("bogusid",
+ "java.lang.Exception: Error parsing attempt ID: bogusid");
+ }
+
+ @Test
+ public void testTaskAttemptIdNonExist() throws JSONException, Exception {
+
+ testTaskAttemptIdErrorGeneric(
+ "attempt_12345_0_0_r_1_0",
+ "java.lang.Exception: Error getting info on task attempt id attempt_12345_0_0_r_1_0");
+ }
+
+ @Test
+ public void testTaskAttemptIdInvalid() throws JSONException, Exception {
+
+ testTaskAttemptIdErrorGeneric("attempt_12345_0_0_d_1_0",
+ "java.lang.Exception: Unknown task symbol: d");
+ }
+
+ @Test
+ public void testTaskAttemptIdInvalid2() throws JSONException, Exception {
+
+ testTaskAttemptIdErrorGeneric("attempt_12345_0_r_1_0",
+ "java.lang.Exception: For input string: \"r\"");
+ }
+
+ @Test
+ public void testTaskAttemptIdInvalid3() throws JSONException, Exception {
+
+ testTaskAttemptIdErrorGeneric("attempt_12345_0_0_r_1",
+ "java.lang.Exception: Error parsing attempt ID: attempt_12345_0_0_r_1");
+ }
+
+ private void testTaskAttemptIdErrorGeneric(String attid, String error)
+ throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+ String tid = MRApps.toString(task.getID());
+
+ try {
+ r.path("ws").path("v1").path("history").path("mapreduce")
+ .path("jobs").path(jobId).path("tasks").path(tid)
+ .path("attempts").path(attid).accept(MediaType.APPLICATION_JSON)
+ .get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message", error,
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+ }
+
+ public void verifyHsTaskAttemptXML(Element element, TaskAttempt att,
+ TaskType ttype) {
+ verifyTaskAttemptGeneric(att, ttype,
+ WebServicesTestUtils.getXmlString(element, "id"),
+ WebServicesTestUtils.getXmlString(element, "state"),
+ WebServicesTestUtils.getXmlString(element, "type"),
+ WebServicesTestUtils.getXmlString(element, "rack"),
+ WebServicesTestUtils.getXmlString(element, "nodeHttpAddress"),
+ WebServicesTestUtils.getXmlString(element, "diagnostics"),
+ WebServicesTestUtils.getXmlString(element, "assignedContainerId"),
+ WebServicesTestUtils.getXmlLong(element, "startTime"),
+ WebServicesTestUtils.getXmlLong(element, "finishTime"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
+ WebServicesTestUtils.getXmlFloat(element, "progress"));
+
+ if (ttype == TaskType.REDUCE) {
+ verifyReduceTaskAttemptGeneric(att,
+ WebServicesTestUtils.getXmlLong(element, "shuffleFinishTime"),
+ WebServicesTestUtils.getXmlLong(element, "mergeFinishTime"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedShuffleTime"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedMergeTime"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedReduceTime"));
+ }
+ }
+
+ public void verifyHsTaskAttempt(JSONObject info, TaskAttempt att,
+ TaskType ttype) throws JSONException {
+ if (ttype == TaskType.REDUCE) {
+ assertEquals("incorrect number of elements", 16, info.length());
+ } else {
+ assertEquals("incorrect number of elements", 11, info.length());
+ }
+
+ verifyTaskAttemptGeneric(att, ttype, info.getString("id"),
+ info.getString("state"), info.getString("type"),
+ info.getString("rack"), info.getString("nodeHttpAddress"),
+ info.getString("diagnostics"), info.getString("assignedContainerId"),
+ info.getLong("startTime"), info.getLong("finishTime"),
+ info.getLong("elapsedTime"), (float) info.getDouble("progress"));
+
+ if (ttype == TaskType.REDUCE) {
+ verifyReduceTaskAttemptGeneric(att, info.getLong("shuffleFinishTime"),
+ info.getLong("mergeFinishTime"), info.getLong("elapsedShuffleTime"),
+ info.getLong("elapsedMergeTime"), info.getLong("elapsedReduceTime"));
+ }
+ }
+
+ public void verifyHsTaskAttempts(JSONObject json, Task task)
+ throws JSONException {
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject attempts = json.getJSONObject("taskAttempts");
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONArray arr = attempts.getJSONArray("taskAttempt");
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId id = att.getID();
+ String attid = MRApps.toString(id);
+ Boolean found = false;
+
+ for (int i = 0; i < arr.length(); i++) {
+ JSONObject info = arr.getJSONObject(i);
+ if (attid.matches(info.getString("id"))) {
+ found = true;
+ verifyHsTaskAttempt(info, att, task.getType());
+ }
+ }
+ assertTrue("task attempt with id: " + attid
+ + " not in web service output", found);
+ }
+ }
+
+ public void verifyHsTaskAttemptsXML(NodeList nodes, Task task) {
+ assertEquals("incorrect number of elements", 1, nodes.getLength());
+
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId id = att.getID();
+ String attid = MRApps.toString(id);
+ Boolean found = false;
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+
+ if (attid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
+ found = true;
+ verifyHsTaskAttemptXML(element, att, task.getType());
+ }
+ }
+ assertTrue("task with id: " + attid + " not in web service output", found);
+ }
+ }
+
+ public void verifyTaskAttemptGeneric(TaskAttempt ta, TaskType ttype,
+ String id, String state, String type, String rack,
+ String nodeHttpAddress, String diagnostics, String assignedContainerId,
+ long startTime, long finishTime, long elapsedTime, float progress) {
+
+ TaskAttemptId attid = ta.getID();
+ String attemptId = MRApps.toString(attid);
+
+ WebServicesTestUtils.checkStringMatch("id", attemptId, id);
+ WebServicesTestUtils.checkStringMatch("type", ttype.toString(), type);
+ WebServicesTestUtils.checkStringMatch("state", ta.getState().toString(),
+ state);
+ WebServicesTestUtils.checkStringMatch("rack", ta.getNodeRackName(), rack);
+ WebServicesTestUtils.checkStringMatch("nodeHttpAddress",
+ ta.getNodeHttpAddress(), nodeHttpAddress);
+
+ String expectDiag = "";
+ List diagnosticsList = ta.getDiagnostics();
+ if (diagnosticsList != null && !diagnostics.isEmpty()) {
+ StringBuffer b = new StringBuffer();
+ for (String diag : diagnosticsList) {
+ b.append(diag);
+ }
+ expectDiag = b.toString();
+ }
+ WebServicesTestUtils.checkStringMatch("diagnostics", expectDiag,
+ diagnostics);
+ WebServicesTestUtils.checkStringMatch("assignedContainerId",
+ ConverterUtils.toString(ta.getAssignedContainerID()),
+ assignedContainerId);
+
+ assertEquals("startTime wrong", ta.getLaunchTime(), startTime);
+ assertEquals("finishTime wrong", ta.getFinishTime(), finishTime);
+ assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime);
+ assertEquals("progress wrong", ta.getProgress() * 100, progress, 1e-3f);
+ }
+
+ public void verifyReduceTaskAttemptGeneric(TaskAttempt ta,
+ long shuffleFinishTime, long mergeFinishTime, long elapsedShuffleTime,
+ long elapsedMergeTime, long elapsedReduceTime) {
+
+ assertEquals("shuffleFinishTime wrong", ta.getShuffleFinishTime(),
+ shuffleFinishTime);
+ assertEquals("mergeFinishTime wrong", ta.getSortFinishTime(),
+ mergeFinishTime);
+ assertEquals("elapsedShuffleTime wrong",
+ ta.getLaunchTime() - ta.getShuffleFinishTime(), elapsedShuffleTime);
+ assertEquals("elapsedMergeTime wrong",
+ ta.getShuffleFinishTime() - ta.getSortFinishTime(), elapsedMergeTime);
+ assertEquals("elapsedReduceTime wrong",
+ ta.getSortFinishTime() - ta.getFinishTime(), elapsedReduceTime);
+ }
+
+ @Test
+ public void testTaskAttemptIdCounters() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+ String tid = MRApps.toString(task.getID());
+
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .path(tid).path("attempts").path(attid).path("counters")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("JobTaskAttemptCounters");
+ verifyHsJobTaskAttemptCounters(info, att);
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testTaskAttemptIdXMLCounters() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ for (TaskAttempt att : task.getAttempts().values()) {
+ TaskAttemptId attemptid = att.getID();
+ String attid = MRApps.toString(attemptid);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .path(tid).path("attempts").path(attid).path("counters")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList nodes = dom.getElementsByTagName("JobTaskAttemptCounters");
+
+ verifyHsTaskCountersXML(nodes, att);
+ }
+ }
+ }
+ }
+
+ public void verifyHsJobTaskAttemptCounters(JSONObject info, TaskAttempt att)
+ throws JSONException {
+
+ assertEquals("incorrect number of elements", 2, info.length());
+
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(att.getID()),
+ info.getString("id"));
+
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ JSONArray counterGroups = info.getJSONArray("taskAttemptCounterGroup");
+ for (int i = 0; i < counterGroups.length(); i++) {
+ JSONObject counterGroup = counterGroups.getJSONObject(i);
+ String name = counterGroup.getString("counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ JSONArray counters = counterGroup.getJSONArray("counter");
+ for (int j = 0; j < counters.length(); j++) {
+ JSONObject counter = counters.getJSONObject(i);
+ String counterName = counter.getString("name");
+ assertTrue("name not set",
+ (counterName != null && !counterName.isEmpty()));
+ long value = counter.getLong("value");
+ assertTrue("value >= 0", value >= 0);
+ }
+ }
+ }
+
+ public void verifyHsTaskCountersXML(NodeList nodes, TaskAttempt att) {
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+
+ Element element = (Element) nodes.item(i);
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(att.getID()),
+ WebServicesTestUtils.getXmlString(element, "id"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ NodeList groups = element.getElementsByTagName("taskAttemptCounterGroup");
+
+ for (int j = 0; j < groups.getLength(); j++) {
+ Element counters = (Element) groups.item(j);
+ assertNotNull("should have counters in the web service info", counters);
+ String name = WebServicesTestUtils.getXmlString(counters,
+ "counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ NodeList counterArr = counters.getElementsByTagName("counter");
+ for (int z = 0; z < counterArr.getLength(); z++) {
+ Element counter = (Element) counterArr.item(z);
+ String counterName = WebServicesTestUtils.getXmlString(counter,
+ "name");
+ assertTrue("counter name not set",
+ (counterName != null && !counterName.isEmpty()));
+
+ long value = WebServicesTestUtils.getXmlLong(counter, "value");
+ assertTrue("value not >= 0", value >= 0);
+
+ }
+ }
+ }
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java
new file mode 100644
index 00000000000..086281193ab
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobConf.java
@@ -0,0 +1,345 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.hs.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.StringReader;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebApp;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import com.google.common.collect.Maps;
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the history server Rest API for getting the job conf. This
+ * requires created a temporary configuration file.
+ *
+ * /ws/v1/history/mapreduce/jobs/{jobid}/conf
+ */
+public class TestHsWebServicesJobConf extends JerseyTest {
+
+ private static Configuration conf = new Configuration();
+ private static TestAppContext appContext;
+ private static HsWebApp webApp;
+
+ private static File testConfDir = new File("target",
+ TestHsWebServicesJobConf.class.getSimpleName() + "confDir");
+
+ static class TestAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map jobs;
+ final long startTime = System.currentTimeMillis();
+
+ TestAppContext(int appid, int numTasks, int numAttempts, Path confPath) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = MockJobs.newAppAttemptID(appID, 0);
+ Map map = Maps.newHashMap();
+ Job job = MockJobs.newJob(appID, 0, numTasks, numAttempts, confPath);
+ map.put(job.getID(), job);
+ jobs = map;
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+ }
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+
+ Path confPath = new Path(testConfDir.toString(),
+ MRJobConfig.JOB_CONF_FILE);
+ Configuration config = new Configuration();
+
+ FileSystem localFs;
+ try {
+ localFs = FileSystem.getLocal(config);
+ confPath = localFs.makeQualified(confPath);
+
+ OutputStream out = localFs.create(confPath);
+ try {
+ conf.writeXml(out);
+ } finally {
+ out.close();
+ }
+ if (!localFs.exists(confPath)) {
+ fail("error creating config file: " + confPath);
+ }
+
+ } catch (IOException e) {
+ fail("error creating config file: " + e.getMessage());
+ }
+
+ appContext = new TestAppContext(0, 2, 1, confPath);
+
+ webApp = mock(HsWebApp.class);
+ when(webApp.name()).thenReturn("hsmockwebapp");
+
+ bind(JAXBContextResolver.class);
+ bind(HsWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(WebApp.class).toInstance(webApp);
+ bind(AppContext.class).toInstance(appContext);
+ bind(Configuration.class).toInstance(conf);
+
+ serve("/*").with(GuiceContainer.class);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ testConfDir.mkdir();
+
+ }
+
+ @AfterClass
+ static public void stop() {
+ FileUtil.fullyDelete(testConfDir);
+ }
+
+ public TestHsWebServicesJobConf() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.mapreduce.v2.hs.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testJobConf() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce")
+ .path("jobs").path(jobId).path("conf")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("conf");
+ verifyHsJobConf(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobConfSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce")
+ .path("jobs").path(jobId).path("conf/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("conf");
+ verifyHsJobConf(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobConfDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce")
+ .path("jobs").path(jobId).path("conf").get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("conf");
+ verifyHsJobConf(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobConfXML() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history").path("mapreduce")
+ .path("jobs").path(jobId).path("conf")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList info = dom.getElementsByTagName("conf");
+ verifyHsJobConfXML(info, jobsMap.get(id));
+ }
+ }
+
+ public void verifyHsJobConf(JSONObject info, Job job) throws JSONException {
+
+ assertEquals("incorrect number of elements", 2, info.length());
+
+ WebServicesTestUtils.checkStringMatch("path", job.getConfFile().toString(),
+ info.getString("path"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ JSONArray properties = info.getJSONArray("property");
+ for (int i = 0; i < properties.length(); i++) {
+ JSONObject prop = properties.getJSONObject(i);
+ String name = prop.getString("name");
+ String value = prop.getString("value");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ assertTrue("value not set", (value != null && !value.isEmpty()));
+ }
+ }
+
+ public void verifyHsJobConfXML(NodeList nodes, Job job) {
+
+ assertEquals("incorrect number of elements", 1, nodes.getLength());
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+ WebServicesTestUtils.checkStringMatch("path", job.getConfFile()
+ .toString(), WebServicesTestUtils.getXmlString(element, "path"));
+
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ NodeList properties = element.getElementsByTagName("property");
+
+ for (int j = 0; j < properties.getLength(); j++) {
+ Element property = (Element) properties.item(j);
+ assertNotNull("should have counters in the web service info", property);
+ String name = WebServicesTestUtils.getXmlString(property, "name");
+ String value = WebServicesTestUtils.getXmlString(property, "value");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ assertTrue("name not set", (value != null && !value.isEmpty()));
+ }
+ }
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java
new file mode 100644
index 00000000000..8c9b0603b85
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobs.java
@@ -0,0 +1,755 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.hs.webapp;
+
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.StringReader;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.util.BuilderUtils;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebApp;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the history server Rest API for getting jobs, a specific job, job
+ * counters, and job attempts.
+ *
+ * /ws/v1/history/mapreduce/jobs /ws/v1/history/mapreduce/jobs/{jobid}
+ * /ws/v1/history/mapreduce/jobs/{jobid}/counters
+ * /ws/v1/history/mapreduce/jobs/{jobid}/attempts
+ */
+public class TestHsWebServicesJobs extends JerseyTest {
+
+ private static Configuration conf = new Configuration();
+ private static TestAppContext appContext;
+ private static HsWebApp webApp;
+
+ static class TestAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map jobs;
+ final long startTime = System.currentTimeMillis();
+
+ TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = MockJobs.newAppAttemptID(appID, 0);
+ jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
+ }
+
+ TestAppContext() {
+ this(0, 1, 2, 1);
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+ }
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+
+ appContext = new TestAppContext();
+ webApp = mock(HsWebApp.class);
+ when(webApp.name()).thenReturn("hsmockwebapp");
+
+ bind(JAXBContextResolver.class);
+ bind(HsWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(WebApp.class).toInstance(webApp);
+ bind(AppContext.class).toInstance(appContext);
+ bind(Configuration.class).toInstance(conf);
+
+ serve("/*").with(GuiceContainer.class);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+
+ }
+
+ public TestHsWebServicesJobs() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.mapreduce.v2.hs.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testJobs() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ assertEquals("incorrect number of elements", 1, arr.length());
+ JSONObject info = arr.getJSONObject(0);
+ Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
+ VerifyJobsUtils.verifyHsJob(info, job);
+
+ }
+
+ @Test
+ public void testJobsSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs/").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ assertEquals("incorrect number of elements", 1, arr.length());
+ JSONObject info = arr.getJSONObject(0);
+ Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
+ VerifyJobsUtils.verifyHsJob(info, job);
+
+ }
+
+ @Test
+ public void testJobsDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ assertEquals("incorrect number of elements", 1, arr.length());
+ JSONObject info = arr.getJSONObject(0);
+ Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
+ VerifyJobsUtils.verifyHsJob(info, job);
+
+ }
+
+ @Test
+ public void testJobsXML() throws Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").accept(MediaType.APPLICATION_XML)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList jobs = dom.getElementsByTagName("jobs");
+ assertEquals("incorrect number of elements", 1, jobs.getLength());
+ NodeList job = dom.getElementsByTagName("job");
+ assertEquals("incorrect number of elements", 1, job.getLength());
+ verifyHsJobXML(job, appContext);
+
+ }
+
+ public void verifyHsJobXML(NodeList nodes, TestAppContext appContext) {
+
+ assertEquals("incorrect number of elements", 1, nodes.getLength());
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+
+ Job job = appContext.getJob(MRApps.toJobID(WebServicesTestUtils
+ .getXmlString(element, "id")));
+ assertNotNull("Job not found - output incorrect", job);
+
+ VerifyJobsUtils.verifyHsJobGeneric(job,
+ WebServicesTestUtils.getXmlString(element, "id"),
+ WebServicesTestUtils.getXmlString(element, "user"),
+ WebServicesTestUtils.getXmlString(element, "name"),
+ WebServicesTestUtils.getXmlString(element, "state"),
+ WebServicesTestUtils.getXmlString(element, "queue"),
+ WebServicesTestUtils.getXmlLong(element, "startTime"),
+ WebServicesTestUtils.getXmlLong(element, "finishTime"),
+ WebServicesTestUtils.getXmlInt(element, "mapsTotal"),
+ WebServicesTestUtils.getXmlInt(element, "mapsCompleted"),
+ WebServicesTestUtils.getXmlInt(element, "reducesTotal"),
+ WebServicesTestUtils.getXmlInt(element, "reducesCompleted"));
+
+ // restricted access fields - if security and acls set
+ VerifyJobsUtils.verifyHsJobGenericSecure(job,
+ WebServicesTestUtils.getXmlBoolean(element, "uberized"),
+ WebServicesTestUtils.getXmlString(element, "diagnostics"),
+ WebServicesTestUtils.getXmlLong(element, "avgMapTime"),
+ WebServicesTestUtils.getXmlLong(element, "avgReduceTime"),
+ WebServicesTestUtils.getXmlLong(element, "avgShuffleTime"),
+ WebServicesTestUtils.getXmlLong(element, "avgMergeTime"),
+ WebServicesTestUtils.getXmlInt(element, "failedReduceAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "killedReduceAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "successfulReduceAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "failedMapAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "killedMapAttempts"),
+ WebServicesTestUtils.getXmlInt(element, "successfulMapAttempts"));
+ }
+ }
+
+ @Test
+ public void testJobId() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId)
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("job");
+ VerifyJobsUtils.verifyHsJob(info, jobsMap.get(id));
+ }
+
+ }
+
+ @Test
+ public void testJobIdSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId + "/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("job");
+ VerifyJobsUtils.verifyHsJob(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobIdDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("job");
+ VerifyJobsUtils.verifyHsJob(info, jobsMap.get(id));
+ }
+
+ }
+
+ @Test
+ public void testJobIdNonExist() throws JSONException, Exception {
+ WebResource r = resource();
+
+ try {
+ r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
+ .path("job_1234_1_2").get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: job, job_1234_1_2, is not found", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+
+ @Test
+ public void testJobIdInvalid() throws JSONException, Exception {
+ WebResource r = resource();
+
+ try {
+ r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
+ .path("job_foo").get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "For input string: \"foo\"", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NumberFormatException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "java.lang.NumberFormatException", classname);
+ }
+ }
+
+ @Test
+ public void testJobIdInvalidBogus() throws JSONException, Exception {
+ WebResource r = resource();
+
+ try {
+ r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
+ .path("bogusfoo").get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: Error parsing job ID: bogusfoo", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+
+ @Test
+ public void testJobIdXML() throws Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId)
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList job = dom.getElementsByTagName("job");
+ verifyHsJobXML(job, appContext);
+ }
+
+ }
+
+ @Test
+ public void testJobCounters() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("counters")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobCounters");
+ verifyHsJobCounters(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobCountersSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("counters/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobCounters");
+ verifyHsJobCounters(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobCountersDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("counters/")
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobCounters");
+ verifyHsJobCounters(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobCountersXML() throws Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("counters")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList info = dom.getElementsByTagName("jobCounters");
+ verifyHsJobCountersXML(info, jobsMap.get(id));
+ }
+ }
+
+ public void verifyHsJobCounters(JSONObject info, Job job)
+ throws JSONException {
+
+ assertEquals("incorrect number of elements", 2, info.length());
+
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
+ info.getString("id"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ JSONArray counterGroups = info.getJSONArray("counterGroup");
+ for (int i = 0; i < counterGroups.length(); i++) {
+ JSONObject counterGroup = counterGroups.getJSONObject(i);
+ String name = counterGroup.getString("counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ JSONArray counters = counterGroup.getJSONArray("counter");
+ for (int j = 0; j < counters.length(); j++) {
+ JSONObject counter = counters.getJSONObject(i);
+ String counterName = counter.getString("name");
+ assertTrue("counter name not set",
+ (counterName != null && !counterName.isEmpty()));
+
+ long mapValue = counter.getLong("mapCounterValue");
+ assertTrue("mapCounterValue >= 0", mapValue >= 0);
+
+ long reduceValue = counter.getLong("reduceCounterValue");
+ assertTrue("reduceCounterValue >= 0", reduceValue >= 0);
+
+ long totalValue = counter.getLong("totalCounterValue");
+ assertTrue("totalCounterValue >= 0", totalValue >= 0);
+
+ }
+ }
+ }
+
+ public void verifyHsJobCountersXML(NodeList nodes, Job job) {
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+
+ assertNotNull("Job not found - output incorrect", job);
+
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
+ WebServicesTestUtils.getXmlString(element, "id"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ NodeList groups = element.getElementsByTagName("counterGroup");
+
+ for (int j = 0; j < groups.getLength(); j++) {
+ Element counters = (Element) groups.item(j);
+ assertNotNull("should have counters in the web service info", counters);
+ String name = WebServicesTestUtils.getXmlString(counters,
+ "counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ NodeList counterArr = counters.getElementsByTagName("counter");
+ for (int z = 0; z < counterArr.getLength(); z++) {
+ Element counter = (Element) counterArr.item(z);
+ String counterName = WebServicesTestUtils.getXmlString(counter,
+ "name");
+ assertTrue("counter name not set",
+ (counterName != null && !counterName.isEmpty()));
+
+ long mapValue = WebServicesTestUtils.getXmlLong(counter,
+ "mapCounterValue");
+ assertTrue("mapCounterValue not >= 0", mapValue >= 0);
+
+ long reduceValue = WebServicesTestUtils.getXmlLong(counter,
+ "reduceCounterValue");
+ assertTrue("reduceCounterValue >= 0", reduceValue >= 0);
+
+ long totalValue = WebServicesTestUtils.getXmlLong(counter,
+ "totalCounterValue");
+ assertTrue("totalCounterValue >= 0", totalValue >= 0);
+ }
+ }
+ }
+ }
+
+ @Test
+ public void testJobAttempts() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("attempts")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("attempts");
+ verifyHsJobAttempts(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobAttemptsSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("attempts/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("attempts");
+ verifyHsJobAttempts(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobAttemptsDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("attempts")
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("attempts");
+ verifyHsJobAttempts(info, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testJobAttemptsXML() throws Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("attempts")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList attempts = dom.getElementsByTagName("attempts");
+ assertEquals("incorrect number of elements", 1, attempts.getLength());
+ NodeList info = dom.getElementsByTagName("attempt");
+ verifyHsJobAttemptsXML(info, jobsMap.get(id));
+ }
+ }
+
+ public void verifyHsJobAttempts(JSONObject info, Job job)
+ throws JSONException {
+
+ JSONArray attempts = info.getJSONArray("attempt");
+ assertEquals("incorrect number of elements", 2, attempts.length());
+ for (int i = 0; i < attempts.length(); i++) {
+ JSONObject attempt = attempts.getJSONObject(i);
+ verifyHsJobAttemptsGeneric(job, attempt.getString("nodeHttpAddress"),
+ attempt.getString("nodeId"), attempt.getInt("id"),
+ attempt.getLong("startTime"), attempt.getString("containerId"),
+ attempt.getString("logsLink"));
+ }
+ }
+
+ public void verifyHsJobAttemptsXML(NodeList nodes, Job job) {
+
+ assertEquals("incorrect number of elements", 2, nodes.getLength());
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+ verifyHsJobAttemptsGeneric(job,
+ WebServicesTestUtils.getXmlString(element, "nodeHttpAddress"),
+ WebServicesTestUtils.getXmlString(element, "nodeId"),
+ WebServicesTestUtils.getXmlInt(element, "id"),
+ WebServicesTestUtils.getXmlLong(element, "startTime"),
+ WebServicesTestUtils.getXmlString(element, "containerId"),
+ WebServicesTestUtils.getXmlString(element, "logsLink"));
+ }
+ }
+
+ public void verifyHsJobAttemptsGeneric(Job job, String nodeHttpAddress,
+ String nodeId, int id, long startTime, String containerId, String logsLink) {
+ boolean attemptFound = false;
+ for (AMInfo amInfo : job.getAMInfos()) {
+ if (amInfo.getAppAttemptId().getAttemptId() == id) {
+ attemptFound = true;
+ String nmHost = amInfo.getNodeManagerHost();
+ int nmPort = amInfo.getNodeManagerHttpPort();
+ WebServicesTestUtils.checkStringMatch("nodeHttpAddress", nmHost + ":"
+ + nmPort, nodeHttpAddress);
+ WebServicesTestUtils.checkStringMatch("nodeId",
+ BuilderUtils.newNodeId(nmHost, nmPort).toString(), nodeId);
+ assertTrue("startime not greater than 0", startTime > 0);
+ WebServicesTestUtils.checkStringMatch("containerId", amInfo
+ .getContainerId().toString(), containerId);
+
+ String localLogsLink = join(
+ "hsmockwebapp",
+ ujoin("logs", nodeId, containerId, MRApps.toString(job.getID()),
+ job.getUserName()));
+
+ assertTrue("logsLink", logsLink.contains(localLogsLink));
+ }
+ }
+ assertTrue("attempt: " + id + " was not found", attemptFound);
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
new file mode 100644
index 00000000000..74af1f6d767
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesJobsQuery.java
@@ -0,0 +1,656 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.hs.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebApp;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the history server Rest API for getting jobs with various query
+ * parameters.
+ *
+ * /ws/v1/history/mapreduce/jobs?{query=value}
+ */
+public class TestHsWebServicesJobsQuery extends JerseyTest {
+
+ private static Configuration conf = new Configuration();
+ private static TestAppContext appContext;
+ private static HsWebApp webApp;
+
+ static class TestAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map jobs;
+ final long startTime = System.currentTimeMillis();
+
+ TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = MockJobs.newAppAttemptID(appID, 0);
+ jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
+ }
+
+ TestAppContext() {
+ this(0, 3, 2, 1);
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+ }
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+
+ appContext = new TestAppContext();
+ webApp = mock(HsWebApp.class);
+ when(webApp.name()).thenReturn("hsmockwebapp");
+
+ bind(JAXBContextResolver.class);
+ bind(HsWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(WebApp.class).toInstance(webApp);
+ bind(AppContext.class).toInstance(appContext);
+ bind(Configuration.class).toInstance(conf);
+
+ serve("/*").with(GuiceContainer.class);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+
+ }
+
+ public TestHsWebServicesJobsQuery() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.mapreduce.v2.hs.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testJobsQueryUserNone() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").queryParam("user", "bogus")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ }
+
+ @Test
+ public void testJobsQueryUser() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").queryParam("user", "mock")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ assertEquals("incorrect number of elements", 3, arr.length());
+ // just verify one of them.
+ JSONObject info = arr.getJSONObject(0);
+ Job job = appContext.getJob(MRApps.toJobID(info.getString("id")));
+ VerifyJobsUtils.verifyHsJob(info, job);
+ }
+
+ @Test
+ public void testJobsQueryLimit() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").queryParam("limit", "2")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ // make sure we get 2 back
+ assertEquals("incorrect number of elements", 2, arr.length());
+ }
+
+ @Test
+ public void testJobsQueryLimitInvalid() throws JSONException, Exception {
+ WebResource r = resource();
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").queryParam("limit", "-1")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: limit value must be greater then 0", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+
+ @Test
+ public void testJobsQueryQueue() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").queryParam("queue", "mockqueue")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ assertEquals("incorrect number of elements", 3, arr.length());
+ }
+
+ @Test
+ public void testJobsQueryQueueNonExist() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").queryParam("queue", "bogus")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ }
+
+ @Test
+ public void testJobsQueryStartTimeEnd() throws JSONException, Exception {
+ WebResource r = resource();
+ // the mockJobs start time is the current time - some random amount
+ Long now = System.currentTimeMillis();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("startedTimeEnd", String.valueOf(now))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ assertEquals("incorrect number of elements", 3, arr.length());
+ }
+
+ @Test
+ public void testJobsQueryStartTimeBegin() throws JSONException, Exception {
+ WebResource r = resource();
+ // the mockJobs start time is the current time - some random amount
+ Long now = System.currentTimeMillis();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("startedTimeBegin", String.valueOf(now))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ }
+
+ @Test
+ public void testJobsQueryStartTimeBeginEnd() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ int size = jobsMap.size();
+ ArrayList startTime = new ArrayList(size);
+ // figure out the middle start Time
+ for (Map.Entry entry : jobsMap.entrySet()) {
+ startTime.add(entry.getValue().getReport().getStartTime());
+ }
+ Collections.sort(startTime);
+
+ assertTrue("Error we must have atleast 3 jobs", size >= 3);
+ long midStartTime = startTime.get(size - 2);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("startedTimeBegin", String.valueOf(40000))
+ .queryParam("startedTimeEnd", String.valueOf(midStartTime))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ assertEquals("incorrect number of elements", size - 1, arr.length());
+ }
+
+ @Test
+ public void testJobsQueryStartTimeBeginEndInvalid() throws JSONException,
+ Exception {
+ WebResource r = resource();
+ Long now = System.currentTimeMillis();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("startedTimeBegin", String.valueOf(now))
+ .queryParam("startedTimeEnd", String.valueOf(40000))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils
+ .checkStringMatch(
+ "exception message",
+ "java.lang.Exception: startedTimeEnd must be greater than startTimeBegin",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+
+ @Test
+ public void testJobsQueryStartTimeInvalidformat() throws JSONException,
+ Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").queryParam("startedTimeBegin", "efsd")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils
+ .checkStringMatch(
+ "exception message",
+ "java.lang.Exception: Invalid number format: For input string: \"efsd\"",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+
+ @Test
+ public void testJobsQueryStartTimeEndInvalidformat() throws JSONException,
+ Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").queryParam("startedTimeEnd", "efsd")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils
+ .checkStringMatch(
+ "exception message",
+ "java.lang.Exception: Invalid number format: For input string: \"efsd\"",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+
+ @Test
+ public void testJobsQueryStartTimeNegative() throws JSONException, Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("startedTimeBegin", String.valueOf(-1000))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils
+ .checkStringMatch("exception message",
+ "java.lang.Exception: startedTimeBegin must be greater than 0",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+
+ @Test
+ public void testJobsQueryStartTimeEndNegative() throws JSONException,
+ Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("startedTimeEnd", String.valueOf(-1000))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: startedTimeEnd must be greater than 0", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+
+ @Test
+ public void testJobsQueryFinishTimeEndNegative() throws JSONException,
+ Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("finishedTimeEnd", String.valueOf(-1000))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: finishedTimeEnd must be greater than 0", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+
+ @Test
+ public void testJobsQueryFinishTimeBeginNegative() throws JSONException,
+ Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("finishedTimeBegin", String.valueOf(-1000))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: finishedTimeBegin must be greater than 0",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+
+ @Test
+ public void testJobsQueryFinishTimeBeginEndInvalid() throws JSONException,
+ Exception {
+ WebResource r = resource();
+ Long now = System.currentTimeMillis();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("finishedTimeBegin", String.valueOf(now))
+ .queryParam("finishedTimeEnd", String.valueOf(40000))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils
+ .checkStringMatch(
+ "exception message",
+ "java.lang.Exception: finishedTimeEnd must be greater than finishedTimeBegin",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+
+ @Test
+ public void testJobsQueryFinishTimeInvalidformat() throws JSONException,
+ Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").queryParam("finishedTimeBegin", "efsd")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils
+ .checkStringMatch(
+ "exception message",
+ "java.lang.Exception: Invalid number format: For input string: \"efsd\"",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+
+ @Test
+ public void testJobsQueryFinishTimeEndInvalidformat() throws JSONException,
+ Exception {
+ WebResource r = resource();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").queryParam("finishedTimeEnd", "efsd")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils
+ .checkStringMatch(
+ "exception message",
+ "java.lang.Exception: Invalid number format: For input string: \"efsd\"",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+
+ @Test
+ public void testJobsQueryFinishTimeBegin() throws JSONException, Exception {
+ WebResource r = resource();
+ // the mockJobs finish time is the current time + some random amount
+ Long now = System.currentTimeMillis();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("finishedTimeBegin", String.valueOf(now))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ assertEquals("incorrect number of elements", 3, arr.length());
+ }
+
+ @Test
+ public void testJobsQueryFinishTimeEnd() throws JSONException, Exception {
+ WebResource r = resource();
+ // the mockJobs finish time is the current time + some random amount
+ Long now = System.currentTimeMillis();
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("finishedTimeEnd", String.valueOf(now))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ assertEquals("jobs is not null", JSONObject.NULL, json.get("jobs"));
+ }
+
+ @Test
+ public void testJobsQueryFinishTimeBeginEnd() throws JSONException, Exception {
+ WebResource r = resource();
+
+ Map jobsMap = appContext.getAllJobs();
+ int size = jobsMap.size();
+ // figure out the mid end time - we expect atleast 3 jobs
+ ArrayList finishTime = new ArrayList(size);
+ for (Map.Entry entry : jobsMap.entrySet()) {
+ finishTime.add(entry.getValue().getReport().getFinishTime());
+ }
+ Collections.sort(finishTime);
+
+ assertTrue("Error we must have atleast 3 jobs", size >= 3);
+ long midFinishTime = finishTime.get(size - 2);
+
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs")
+ .queryParam("finishedTimeBegin", String.valueOf(40000))
+ .queryParam("finishedTimeEnd", String.valueOf(midFinishTime))
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject jobs = json.getJSONObject("jobs");
+ JSONArray arr = jobs.getJSONArray("job");
+ assertEquals("incorrect number of elements", size - 1, arr.length());
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
new file mode 100644
index 00000000000..3dbe860c182
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java
@@ -0,0 +1,835 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.hs.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.StringReader;
+import java.util.Map;
+
+import javax.ws.rs.core.MediaType;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.MockJobs;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.Clock;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
+import org.apache.hadoop.yarn.webapp.WebApp;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONArray;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.Before;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+import com.google.inject.Guice;
+import com.google.inject.Injector;
+import com.google.inject.servlet.GuiceServletContextListener;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.ClientResponse.Status;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.WebResource;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.test.framework.JerseyTest;
+import com.sun.jersey.test.framework.WebAppDescriptor;
+
+/**
+ * Test the history server Rest API for getting tasks, a specific task,
+ * and task counters.
+ *
+ * /ws/v1/history/mapreduce/jobs/{jobid}/tasks
+ * /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}
+ * /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/counters
+ */
+public class TestHsWebServicesTasks extends JerseyTest {
+
+ private static Configuration conf = new Configuration();
+ private static TestAppContext appContext;
+ private static HsWebApp webApp;
+
+ static class TestAppContext implements AppContext {
+ final ApplicationAttemptId appAttemptID;
+ final ApplicationId appID;
+ final String user = MockJobs.newUserName();
+ final Map jobs;
+ final long startTime = System.currentTimeMillis();
+
+ TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
+ appID = MockJobs.newAppID(appid);
+ appAttemptID = MockJobs.newAppAttemptID(appID, 0);
+ jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
+ }
+
+ TestAppContext() {
+ this(0, 1, 2, 1);
+ }
+
+ @Override
+ public ApplicationAttemptId getApplicationAttemptId() {
+ return appAttemptID;
+ }
+
+ @Override
+ public ApplicationId getApplicationID() {
+ return appID;
+ }
+
+ @Override
+ public CharSequence getUser() {
+ return user;
+ }
+
+ @Override
+ public Job getJob(JobId jobID) {
+ return jobs.get(jobID);
+ }
+
+ @Override
+ public Map getAllJobs() {
+ return jobs; // OK
+ }
+
+ @SuppressWarnings("rawtypes")
+ @Override
+ public EventHandler getEventHandler() {
+ return null;
+ }
+
+ @Override
+ public Clock getClock() {
+ return null;
+ }
+
+ @Override
+ public String getApplicationName() {
+ return "TestApp";
+ }
+
+ @Override
+ public long getStartTime() {
+ return startTime;
+ }
+ }
+
+ private Injector injector = Guice.createInjector(new ServletModule() {
+ @Override
+ protected void configureServlets() {
+
+ appContext = new TestAppContext();
+ webApp = mock(HsWebApp.class);
+ when(webApp.name()).thenReturn("hsmockwebapp");
+
+ bind(JAXBContextResolver.class);
+ bind(HsWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(WebApp.class).toInstance(webApp);
+ bind(AppContext.class).toInstance(appContext);
+ bind(Configuration.class).toInstance(conf);
+
+ serve("/*").with(GuiceContainer.class);
+ }
+ });
+
+ public class GuiceServletConfig extends GuiceServletContextListener {
+
+ @Override
+ protected Injector getInjector() {
+ return injector;
+ }
+ }
+
+ @Before
+ @Override
+ public void setUp() throws Exception {
+ super.setUp();
+ }
+
+ public TestHsWebServicesTasks() {
+ super(new WebAppDescriptor.Builder(
+ "org.apache.hadoop.mapreduce.v2.hs.webapp")
+ .contextListenerClass(GuiceServletConfig.class)
+ .filterClass(com.google.inject.servlet.GuiceFilter.class)
+ .contextPath("jersey-guice-filter").servletPath("/").build());
+ }
+
+ @Test
+ public void testTasks() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject tasks = json.getJSONObject("tasks");
+ JSONArray arr = tasks.getJSONArray("task");
+ assertEquals("incorrect number of elements", 2, arr.length());
+
+ verifyHsTask(arr, jobsMap.get(id), null);
+ }
+ }
+
+ @Test
+ public void testTasksDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject tasks = json.getJSONObject("tasks");
+ JSONArray arr = tasks.getJSONArray("task");
+ assertEquals("incorrect number of elements", 2, arr.length());
+
+ verifyHsTask(arr, jobsMap.get(id), null);
+ }
+ }
+
+ @Test
+ public void testTasksSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks/")
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject tasks = json.getJSONObject("tasks");
+ JSONArray arr = tasks.getJSONArray("task");
+ assertEquals("incorrect number of elements", 2, arr.length());
+
+ verifyHsTask(arr, jobsMap.get(id), null);
+ }
+ }
+
+ @Test
+ public void testTasksXML() throws JSONException, Exception {
+
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList tasks = dom.getElementsByTagName("tasks");
+ assertEquals("incorrect number of elements", 1, tasks.getLength());
+ NodeList task = dom.getElementsByTagName("task");
+ verifyHsTaskXML(task, jobsMap.get(id));
+ }
+ }
+
+ @Test
+ public void testTasksQueryMap() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String type = "m";
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .queryParam("type", type).accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject tasks = json.getJSONObject("tasks");
+ JSONArray arr = tasks.getJSONArray("task");
+ assertEquals("incorrect number of elements", 1, arr.length());
+ verifyHsTask(arr, jobsMap.get(id), type);
+ }
+ }
+
+ @Test
+ public void testTasksQueryReduce() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String type = "r";
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .queryParam("type", type).accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject tasks = json.getJSONObject("tasks");
+ JSONArray arr = tasks.getJSONArray("task");
+ assertEquals("incorrect number of elements", 1, arr.length());
+ verifyHsTask(arr, jobsMap.get(id), type);
+ }
+ }
+
+ @Test
+ public void testTasksQueryInvalid() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ // tasktype must be exactly either "m" or "r"
+ String tasktype = "reduce";
+
+ try {
+ r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
+ .path(jobId).path("tasks").queryParam("type", tasktype)
+ .accept(MediaType.APPLICATION_JSON).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: tasktype must be either m or r", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "BadRequestException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.BadRequestException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskId() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
+ .accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("task");
+ verifyHsSingleTask(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks")
+ .path(tid + "/").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("task");
+ verifyHsSingleTask(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("task");
+ verifyHsSingleTask(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdBogus() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String tid = "bogustaskid";
+ try {
+ r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
+ .path(jobId).path("tasks").path(tid).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: Error parsing task ID: bogustaskid", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdNonExist() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String tid = "task_1234_0_0_m_0";
+ try {
+ r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
+ .path(jobId).path("tasks").path(tid).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: task not found with id task_1234_0_0_m_0",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdInvalid() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String tid = "task_1234_0_0_d_0";
+ try {
+ r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
+ .path(jobId).path("tasks").path(tid).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: Unknown task symbol: d", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdInvalid2() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String tid = "task_1234_0_m_0";
+ try {
+ r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
+ .path(jobId).path("tasks").path(tid).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: For input string: \"m\"", message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdInvalid3() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ String tid = "task_1234_0_0_m";
+ try {
+ r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
+ .path(jobId).path("tasks").path(tid).get(JSONObject.class);
+ fail("should have thrown exception on invalid uri");
+ } catch (UniformInterfaceException ue) {
+ ClientResponse response = ue.getResponse();
+ assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject msg = response.getEntity(JSONObject.class);
+ JSONObject exception = msg.getJSONObject("RemoteException");
+ assertEquals("incorrect number of elements", 3, exception.length());
+ String message = exception.getString("message");
+ String type = exception.getString("exception");
+ String classname = exception.getString("javaClassName");
+ WebServicesTestUtils.checkStringMatch("exception message",
+ "java.lang.Exception: Error parsing task ID: task_1234_0_0_m",
+ message);
+ WebServicesTestUtils.checkStringMatch("exception type",
+ "NotFoundException", type);
+ WebServicesTestUtils.checkStringMatch("exception classname",
+ "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdXML() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
+ .accept(MediaType.APPLICATION_XML).get(ClientResponse.class);
+
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList nodes = dom.getElementsByTagName("task");
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+ verifyHsSingleTaskXML(element, task);
+ }
+ }
+ }
+ }
+
+ public void verifyHsSingleTask(JSONObject info, Task task)
+ throws JSONException {
+ assertEquals("incorrect number of elements", 8, info.length());
+
+ verifyTaskGeneric(task, info.getString("id"), info.getString("state"),
+ info.getString("type"), info.getString("successfulAttempt"),
+ info.getLong("startTime"), info.getLong("finishTime"),
+ info.getLong("elapsedTime"), (float) info.getDouble("progress"));
+ }
+
+ public void verifyHsTask(JSONArray arr, Job job, String type)
+ throws JSONException {
+ for (Task task : job.getTasks().values()) {
+ TaskId id = task.getID();
+ String tid = MRApps.toString(id);
+ Boolean found = false;
+ if (type != null && task.getType() == MRApps.taskType(type)) {
+
+ for (int i = 0; i < arr.length(); i++) {
+ JSONObject info = arr.getJSONObject(i);
+ if (tid.matches(info.getString("id"))) {
+ found = true;
+ verifyHsSingleTask(info, task);
+ }
+ }
+ assertTrue("task with id: " + tid + " not in web service output", found);
+ }
+ }
+ }
+
+ public void verifyTaskGeneric(Task task, String id, String state,
+ String type, String successfulAttempt, long startTime, long finishTime,
+ long elapsedTime, float progress) {
+
+ TaskId taskid = task.getID();
+ String tid = MRApps.toString(taskid);
+ TaskReport report = task.getReport();
+
+ WebServicesTestUtils.checkStringMatch("id", tid, id);
+ WebServicesTestUtils.checkStringMatch("type", task.getType().toString(),
+ type);
+ WebServicesTestUtils.checkStringMatch("state", report.getTaskState()
+ .toString(), state);
+ // not easily checked without duplicating logic, just make sure its here
+ assertNotNull("successfulAttempt null", successfulAttempt);
+ assertEquals("startTime wrong", report.getStartTime(), startTime);
+ assertEquals("finishTime wrong", report.getFinishTime(), finishTime);
+ assertEquals("elapsedTime wrong", finishTime - startTime, elapsedTime);
+ assertEquals("progress wrong", report.getProgress() * 100, progress, 1e-3f);
+ }
+
+ public void verifyHsSingleTaskXML(Element element, Task task) {
+ verifyTaskGeneric(task, WebServicesTestUtils.getXmlString(element, "id"),
+ WebServicesTestUtils.getXmlString(element, "state"),
+ WebServicesTestUtils.getXmlString(element, "type"),
+ WebServicesTestUtils.getXmlString(element, "successfulAttempt"),
+ WebServicesTestUtils.getXmlLong(element, "startTime"),
+ WebServicesTestUtils.getXmlLong(element, "finishTime"),
+ WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
+ WebServicesTestUtils.getXmlFloat(element, "progress"));
+ }
+
+ public void verifyHsTaskXML(NodeList nodes, Job job) {
+
+ assertEquals("incorrect number of elements", 2, nodes.getLength());
+
+ for (Task task : job.getTasks().values()) {
+ TaskId id = task.getID();
+ String tid = MRApps.toString(id);
+ Boolean found = false;
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Element element = (Element) nodes.item(i);
+
+ if (tid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
+ found = true;
+ verifyHsSingleTaskXML(element, task);
+ }
+ }
+ assertTrue("task with id: " + tid + " not in web service output", found);
+ }
+ }
+
+ @Test
+ public void testTaskIdCounters() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
+ .path("counters").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobTaskCounters");
+ verifyHsJobTaskCounters(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdCountersSlash() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
+ .path("counters/").accept(MediaType.APPLICATION_JSON)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobTaskCounters");
+ verifyHsJobTaskCounters(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testTaskIdCountersDefault() throws JSONException, Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
+ .path("counters").get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
+ JSONObject json = response.getEntity(JSONObject.class);
+ assertEquals("incorrect number of elements", 1, json.length());
+ JSONObject info = json.getJSONObject("jobTaskCounters");
+ verifyHsJobTaskCounters(info, task);
+ }
+ }
+ }
+
+ @Test
+ public void testJobTaskCountersXML() throws Exception {
+ WebResource r = resource();
+ Map jobsMap = appContext.getAllJobs();
+ for (JobId id : jobsMap.keySet()) {
+ String jobId = MRApps.toString(id);
+ for (Task task : jobsMap.get(id).getTasks().values()) {
+
+ String tid = MRApps.toString(task.getID());
+ ClientResponse response = r.path("ws").path("v1").path("history")
+ .path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
+ .path("counters").accept(MediaType.APPLICATION_XML)
+ .get(ClientResponse.class);
+ assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
+ String xml = response.getEntity(String.class);
+ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
+ DocumentBuilder db = dbf.newDocumentBuilder();
+ InputSource is = new InputSource();
+ is.setCharacterStream(new StringReader(xml));
+ Document dom = db.parse(is);
+ NodeList info = dom.getElementsByTagName("jobTaskCounters");
+ verifyHsTaskCountersXML(info, task);
+ }
+ }
+ }
+
+ public void verifyHsJobTaskCounters(JSONObject info, Task task)
+ throws JSONException {
+
+ assertEquals("incorrect number of elements", 2, info.length());
+
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(task.getID()),
+ info.getString("id"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ JSONArray counterGroups = info.getJSONArray("taskCounterGroup");
+ for (int i = 0; i < counterGroups.length(); i++) {
+ JSONObject counterGroup = counterGroups.getJSONObject(i);
+ String name = counterGroup.getString("counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ JSONArray counters = counterGroup.getJSONArray("counter");
+ for (int j = 0; j < counters.length(); j++) {
+ JSONObject counter = counters.getJSONObject(i);
+ String counterName = counter.getString("name");
+ assertTrue("name not set",
+ (counterName != null && !counterName.isEmpty()));
+ long value = counter.getLong("value");
+ assertTrue("value >= 0", value >= 0);
+ }
+ }
+ }
+
+ public void verifyHsTaskCountersXML(NodeList nodes, Task task) {
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+
+ Element element = (Element) nodes.item(i);
+ WebServicesTestUtils.checkStringMatch("id",
+ MRApps.toString(task.getID()),
+ WebServicesTestUtils.getXmlString(element, "id"));
+ // just do simple verification of fields - not data is correct
+ // in the fields
+ NodeList groups = element.getElementsByTagName("taskCounterGroup");
+
+ for (int j = 0; j < groups.getLength(); j++) {
+ Element counters = (Element) groups.item(j);
+ assertNotNull("should have counters in the web service info", counters);
+ String name = WebServicesTestUtils.getXmlString(counters,
+ "counterGroupName");
+ assertTrue("name not set", (name != null && !name.isEmpty()));
+ NodeList counterArr = counters.getElementsByTagName("counter");
+ for (int z = 0; z < counterArr.getLength(); z++) {
+ Element counter = (Element) counterArr.item(z);
+ String counterName = WebServicesTestUtils.getXmlString(counter,
+ "name");
+ assertTrue("counter name not set",
+ (counterName != null && !counterName.isEmpty()));
+
+ long value = WebServicesTestUtils.getXmlLong(counter, "value");
+ assertTrue("value not >= 0", value >= 0);
+
+ }
+ }
+ }
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/VerifyJobsUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/VerifyJobsUtils.java
new file mode 100644
index 00000000000..1c99af93382
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/VerifyJobsUtils.java
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.hs.webapp;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.util.List;
+
+import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
+import org.codehaus.jettison.json.JSONException;
+import org.codehaus.jettison.json.JSONObject;
+
+public class VerifyJobsUtils {
+
+ public static void verifyHsJob(JSONObject info, Job job) throws JSONException {
+
+ // this is 23 instead of 24 because acls not being checked since
+ // we are using mock job instead of CompletedJob
+ assertEquals("incorrect number of elements", 23, info.length());
+
+ // everyone access fields
+ verifyHsJobGeneric(job, info.getString("id"), info.getString("user"),
+ info.getString("name"), info.getString("state"),
+ info.getString("queue"), info.getLong("startTime"),
+ info.getLong("finishTime"), info.getInt("mapsTotal"),
+ info.getInt("mapsCompleted"), info.getInt("reducesTotal"),
+ info.getInt("reducesCompleted"));
+
+ String diagnostics = "";
+ if (info.has("diagnostics")) {
+ diagnostics = info.getString("diagnostics");
+ }
+
+ // restricted access fields - if security and acls set
+ verifyHsJobGenericSecure(job, info.getBoolean("uberized"), diagnostics,
+ info.getLong("avgMapTime"), info.getLong("avgReduceTime"),
+ info.getLong("avgShuffleTime"), info.getLong("avgMergeTime"),
+ info.getInt("failedReduceAttempts"),
+ info.getInt("killedReduceAttempts"),
+ info.getInt("successfulReduceAttempts"),
+ info.getInt("failedMapAttempts"), info.getInt("killedMapAttempts"),
+ info.getInt("successfulMapAttempts"));
+
+ // acls not being checked since
+ // we are using mock job instead of CompletedJob
+ }
+
+ public static void verifyHsJobGeneric(Job job, String id, String user,
+ String name, String state, String queue, long startTime, long finishTime,
+ int mapsTotal, int mapsCompleted, int reducesTotal, int reducesCompleted) {
+ JobReport report = job.getReport();
+
+ WebServicesTestUtils.checkStringMatch("id", MRApps.toString(job.getID()),
+ id);
+ WebServicesTestUtils.checkStringMatch("user", job.getUserName().toString(),
+ user);
+ WebServicesTestUtils.checkStringMatch("name", job.getName(), name);
+ WebServicesTestUtils.checkStringMatch("state", job.getState().toString(),
+ state);
+ WebServicesTestUtils.checkStringMatch("queue", job.getQueueName(), queue);
+
+ assertEquals("startTime incorrect", report.getStartTime(), startTime);
+ assertEquals("finishTime incorrect", report.getFinishTime(), finishTime);
+
+ assertEquals("mapsTotal incorrect", job.getTotalMaps(), mapsTotal);
+ assertEquals("mapsCompleted incorrect", job.getCompletedMaps(),
+ mapsCompleted);
+ assertEquals("reducesTotal incorrect", job.getTotalReduces(), reducesTotal);
+ assertEquals("reducesCompleted incorrect", job.getCompletedReduces(),
+ reducesCompleted);
+ }
+
+ public static void verifyHsJobGenericSecure(Job job, Boolean uberized,
+ String diagnostics, long avgMapTime, long avgReduceTime,
+ long avgShuffleTime, long avgMergeTime, int failedReduceAttempts,
+ int killedReduceAttempts, int successfulReduceAttempts,
+ int failedMapAttempts, int killedMapAttempts, int successfulMapAttempts) {
+
+ String diagString = "";
+ List diagList = job.getDiagnostics();
+ if (diagList != null && !diagList.isEmpty()) {
+ StringBuffer b = new StringBuffer();
+ for (String diag : diagList) {
+ b.append(diag);
+ }
+ diagString = b.toString();
+ }
+ WebServicesTestUtils.checkStringMatch("diagnostics", diagString,
+ diagnostics);
+
+ assertEquals("isUber incorrect", job.isUber(), uberized);
+
+ // unfortunately the following fields are all calculated in JobInfo
+ // so not easily accessible without doing all the calculations again.
+ // For now just make sure they are present.
+
+ assertTrue("failedReduceAttempts not >= 0", failedReduceAttempts >= 0);
+ assertTrue("killedReduceAttempts not >= 0", killedReduceAttempts >= 0);
+ assertTrue("successfulReduceAttempts not >= 0",
+ successfulReduceAttempts >= 0);
+
+ assertTrue("failedMapAttempts not >= 0", failedMapAttempts >= 0);
+ assertTrue("killedMapAttempts not >= 0", killedMapAttempts >= 0);
+ assertTrue("successfulMapAttempts not >= 0", successfulMapAttempts >= 0);
+
+ assertTrue("avgMapTime not >= 0", avgMapTime >= 0);
+ assertTrue("avgReduceTime not >= 0", avgReduceTime >= 0);
+ assertTrue("avgShuffleTime not >= 0", avgShuffleTime >= 0);
+ assertTrue("avgMergeTime not >= 0", avgMergeTime >= 0);
+
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
index 8eaef23c7e4..63113611b9e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
@@ -30,121 +30,14 @@
-
-
-
-
-
- org.apache.hadoop
- hadoop-yarn
- ${project.version}
- pom
-
-
- org.apache.hadoop
- hadoop-yarn-api
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-yarn-server
- ${project.version}
- pom
-
-
- org.apache.hadoop
- hadoop-yarn-server-web-proxy
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-yarn-server-common
- ${project.version}
-
-
-
- org.apache.hadoop
-
- hadoop-hdfs
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-yarn-common
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-yarn-common
- ${project.version}
- test-jar
-
-
- org.apache.hadoop
- hadoop-yarn-server-tests
- ${project.version}
- test-jar
-
-
- org.apache.hadoop
- hadoop-yarn-server-nodemanager
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-yarn-server-resourcemanager
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-yarn-server-resourcemanager
- ${project.version}
- test-jar
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-core
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-common
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-app
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-app
- ${project.version}
- test-jar
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-hs
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-mapreduce-client-shuffle
- ${project.version}
-
-
-
-
com.google.protobuf
protobuf-java
- 2.4.0a
org.apache.avro
avro
- 1.5.3
org.mortbay.jetty
@@ -175,7 +68,6 @@
org.apache.hadoop
hadoop-common
- ${project.version}
provided
@@ -204,28 +96,23 @@
org.slf4j
slf4j-api
- 1.6.1
org.slf4j
slf4j-log4j12
- 1.6.1
org.apache.hadoop
hadoop-annotations
- ${project.version}
org.mockito
mockito-all
- 1.8.5
test
org.apache.hadoop
hadoop-common
- ${project.version}
test-jar
test
@@ -233,27 +120,22 @@
org.apache.hadoop
hadoop-hdfs
- ${project.version}
com.google.inject.extensions
guice-servlet
- 3.0
junit
junit
- 4.8.2
org.jboss.netty
netty
- 3.2.3.Final
com.cenqua.clover
clover
- 3.0.2
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
index a56d2b6a547..a21cd11ca46 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/pom.xml
@@ -33,43 +33,36 @@
org.apache.hadoop
hadoop-yarn-api
- ${project.version}
org.apache.hadoop
hadoop-yarn-common
- ${project.version}
org.apache.hadoop
hadoop-yarn-server-nodemanager
test
- ${project.version}
org.apache.hadoop
hadoop-yarn-server-resourcemanager
test
- ${project.version}
org.apache.hadoop
hadoop-yarn-server-common
test
- ${project.version}
org.apache.hadoop
hadoop-mapreduce-client-core
test
- ${project.version}
org.apache.hadoop
hadoop-yarn-server-tests
test-jar
test
- ${project.version}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml
index 9376b3e918d..0e193896228 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml
@@ -33,18 +33,7 @@
log4j
log4j
- 1.2.12
-
-
- com.sun.jdmk
- jmxtools
-
-
- com.sun.jmx
- jmxri
-
-
-
+
org.apache.hadoop
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/ResponseInfo.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/ResponseInfo.java
index 144a392a929..84c3b650e21 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/ResponseInfo.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/ResponseInfo.java
@@ -80,6 +80,10 @@ public class ResponseInfo implements Iterable {
return this;
}
+ public void clear() {
+ items.clear();
+ }
+
@Override
public Iterator- iterator() {
return items.iterator();
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/WebServicesTestUtils.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/WebServicesTestUtils.java
index 2cb67724b99..abcca519132 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/WebServicesTestUtils.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/webapp/WebServicesTestUtils.java
@@ -50,6 +50,9 @@ public class WebServicesTestUtils {
public static String getXmlString(Element element, String name) {
NodeList id = element.getElementsByTagName(name);
Element line = (Element) id.item(0);
+ if (line == null) {
+ return null;
+ }
Node first = line.getFirstChild();
// handle empty
if (first == null) {
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml
index f15250a7386..fe6338feab7 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml
@@ -79,7 +79,6 @@
The Kerberos principal for the resource manager.
yarn.resourcemanager.principal
- rm/sightbusy-lx@LOCALHOST
@@ -430,7 +429,7 @@
The kerberos principal for the node manager.
yarn.nodemanager.principal
- nm/sightbusy-lx@LOCALHOST
+
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
index 671e8670b51..859a1464c7c 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
@@ -45,7 +45,6 @@
org.codehaus.mojo
make-maven-plugin
- 1.0-beta-1
compile
@@ -102,7 +101,6 @@
org.codehaus.mojo
exec-maven-plugin
- 1.2
compile
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockApp.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockApp.java
index 7a9dac69483..e69de29bb2d 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockApp.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockApp.java
@@ -1,83 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.nodemanager;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.factories.RecordFactory;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
-import org.apache.hadoop.yarn.util.BuilderUtils;
-
-public class MockApp implements Application {
-
- final String user;
- final ApplicationId appId;
- Map containers = new HashMap();
- ApplicationState appState;
- Application app;
-
- public MockApp(int uniqId) {
- this("mockUser", 1234, uniqId);
- }
-
- public MockApp(String user, long clusterTimeStamp, int uniqId) {
- super();
- this.user = user;
- // Add an application and the corresponding containers
- RecordFactory recordFactory = RecordFactoryProvider
- .getRecordFactory(new Configuration());
- this.appId = BuilderUtils.newApplicationId(recordFactory, clusterTimeStamp,
- uniqId);
- appState = ApplicationState.NEW;
- }
-
- public void setState(ApplicationState state) {
- this.appState = state;
- }
-
- public String getUser() {
- return user;
- }
-
- public Map getContainers() {
- return containers;
- }
-
- public ApplicationId getAppId() {
- return appId;
- }
-
- public ApplicationState getApplicationState() {
- return appState;
- }
-
- public void handle(ApplicationEvent event) {}
-
-}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockContainer.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockContainer.java
index eacdb1c4241..e69de29bb2d 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockContainer.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/MockContainer.java
@@ -1,120 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.yarn.server.nodemanager;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
-import org.apache.hadoop.yarn.event.Dispatcher;
-import org.apache.hadoop.yarn.factories.RecordFactory;
-import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEvent;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState;
-import org.apache.hadoop.yarn.util.BuilderUtils;
-
-public class MockContainer implements Container {
-
- private ContainerId id;
- private ContainerState state;
- private String user;
- private ContainerLaunchContext launchContext;
- private final Map resource = new HashMap();
- private RecordFactory recordFactory;
-
- public MockContainer(ApplicationAttemptId appAttemptId,
- Dispatcher dispatcher, Configuration conf, String user,
- ApplicationId appId, int uniqId) {
-
- this.user = user;
- this.recordFactory = RecordFactoryProvider.getRecordFactory(conf);
- this.id = BuilderUtils.newContainerId(recordFactory, appId, appAttemptId,
- uniqId);
- this.launchContext = recordFactory
- .newRecordInstance(ContainerLaunchContext.class);
- launchContext.setContainerId(id);
- launchContext.setUser(user);
- this.state = ContainerState.NEW;
-
- }
-
- public void setState(ContainerState state) {
- this.state = state;
- }
-
- @Override
- public ContainerId getContainerID() {
- return id;
- }
-
- @Override
- public String getUser() {
- return user;
- }
-
- @Override
- public ContainerState getContainerState() {
- return state;
- }
-
- @Override
- public ContainerLaunchContext getLaunchContext() {
- return launchContext;
- }
-
- @Override
- public Credentials getCredentials() {
- return null;
- }
-
- @Override
- public Map getLocalizedResources() {
- return resource;
- }
-
- @Override
- public ContainerStatus cloneAndGetContainerStatus() {
- ContainerStatus containerStatus = recordFactory
- .newRecordInstance(ContainerStatus.class);
- containerStatus
- .setState(org.apache.hadoop.yarn.api.records.ContainerState.RUNNING);
- containerStatus.setContainerId(this.launchContext.getContainerId());
- containerStatus.setDiagnostics("testing");
- containerStatus.setExitStatus(0);
- return containerStatus;
- }
-
- @Override
- public String toString() {
- return "";
- }
-
- @Override
- public void handle(ContainerEvent event) {
- }
-
-}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java
new file mode 100644
index 00000000000..93fca8f5ff7
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockApp.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.nodemanager.webapp;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.factories.RecordFactory;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+import org.apache.hadoop.yarn.util.BuilderUtils;
+
+public class MockApp implements Application {
+
+ final String user;
+ final ApplicationId appId;
+ Map containers = new HashMap();
+ ApplicationState appState;
+ Application app;
+
+ public MockApp(int uniqId) {
+ this("mockUser", 1234, uniqId);
+ }
+
+ public MockApp(String user, long clusterTimeStamp, int uniqId) {
+ super();
+ this.user = user;
+ // Add an application and the corresponding containers
+ RecordFactory recordFactory = RecordFactoryProvider
+ .getRecordFactory(new Configuration());
+ this.appId = BuilderUtils.newApplicationId(recordFactory, clusterTimeStamp,
+ uniqId);
+ appState = ApplicationState.NEW;
+ }
+
+ public void setState(ApplicationState state) {
+ this.appState = state;
+ }
+
+ public String getUser() {
+ return user;
+ }
+
+ public Map getContainers() {
+ return containers;
+ }
+
+ public ApplicationId getAppId() {
+ return appId;
+ }
+
+ public ApplicationState getApplicationState() {
+ return appState;
+ }
+
+ public void handle(ApplicationEvent event) {}
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockContainer.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockContainer.java
new file mode 100644
index 00000000000..1b2e0653d06
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/MockContainer.java
@@ -0,0 +1,120 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.nodemanager.webapp;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
+import org.apache.hadoop.yarn.event.Dispatcher;
+import org.apache.hadoop.yarn.factories.RecordFactory;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEvent;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerState;
+import org.apache.hadoop.yarn.util.BuilderUtils;
+
+public class MockContainer implements Container {
+
+ private ContainerId id;
+ private ContainerState state;
+ private String user;
+ private ContainerLaunchContext launchContext;
+ private final Map resource = new HashMap();
+ private RecordFactory recordFactory;
+
+ public MockContainer(ApplicationAttemptId appAttemptId,
+ Dispatcher dispatcher, Configuration conf, String user,
+ ApplicationId appId, int uniqId) {
+
+ this.user = user;
+ this.recordFactory = RecordFactoryProvider.getRecordFactory(conf);
+ this.id = BuilderUtils.newContainerId(recordFactory, appId, appAttemptId,
+ uniqId);
+ this.launchContext = recordFactory
+ .newRecordInstance(ContainerLaunchContext.class);
+ launchContext.setContainerId(id);
+ launchContext.setUser(user);
+ this.state = ContainerState.NEW;
+
+ }
+
+ public void setState(ContainerState state) {
+ this.state = state;
+ }
+
+ @Override
+ public ContainerId getContainerID() {
+ return id;
+ }
+
+ @Override
+ public String getUser() {
+ return user;
+ }
+
+ @Override
+ public ContainerState getContainerState() {
+ return state;
+ }
+
+ @Override
+ public ContainerLaunchContext getLaunchContext() {
+ return launchContext;
+ }
+
+ @Override
+ public Credentials getCredentials() {
+ return null;
+ }
+
+ @Override
+ public Map getLocalizedResources() {
+ return resource;
+ }
+
+ @Override
+ public ContainerStatus cloneAndGetContainerStatus() {
+ ContainerStatus containerStatus = recordFactory
+ .newRecordInstance(ContainerStatus.class);
+ containerStatus
+ .setState(org.apache.hadoop.yarn.api.records.ContainerState.RUNNING);
+ containerStatus.setContainerId(this.launchContext.getContainerId());
+ containerStatus.setDiagnostics("testing");
+ containerStatus.setExitStatus(0);
+ return containerStatus;
+ }
+
+ @Override
+ public String toString() {
+ return "";
+ }
+
+ @Override
+ public void handle(ContainerEvent event) {
+ }
+
+}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java
index f61fdbd4761..fce38d2b58d 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesApps.java
@@ -38,8 +38,6 @@ import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
-import org.apache.hadoop.yarn.server.nodemanager.MockApp;
-import org.apache.hadoop.yarn.server.nodemanager.MockContainer;
import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java
index 11ec3401015..a466fa97941 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/webapp/TestNMWebServicesContainers.java
@@ -39,8 +39,6 @@ import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.LocalDirsHandlerService;
-import org.apache.hadoop.yarn.server.nodemanager.MockApp;
-import org.apache.hadoop.yarn.server.nodemanager.MockContainer;
import org.apache.hadoop.yarn.server.nodemanager.NodeHealthCheckerService;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml
index 657989796a1..106c0a32475 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/pom.xml
@@ -129,7 +129,6 @@
org.codehaus.mojo
exec-maven-plugin
- 1.2
compile
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
index 2230acd82b3..467b4d33de0 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
@@ -20,18 +20,22 @@ package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.join;
+import java.util.ArrayList;
+
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.ParentQueue;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerLeafQueueInfo;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerQueueInfo;
+import org.apache.hadoop.yarn.webapp.ResponseInfo;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.LI;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.UL;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
+import org.apache.hadoop.yarn.webapp.view.InfoBlock;
import com.google.inject.Inject;
import com.google.inject.servlet.RequestScoped;
@@ -45,23 +49,61 @@ class CapacitySchedulerPage extends RmView {
static final float EPSILON = 1e-8f;
@RequestScoped
- static class Parent {
- CSQueue queue;
+ static class CSQInfo {
+ CapacitySchedulerInfo csinfo;
+ CapacitySchedulerQueueInfo qinfo;
+ }
+
+ static class LeafQueueInfoBlock extends HtmlBlock {
+ final CapacitySchedulerLeafQueueInfo lqinfo;
+
+ @Inject LeafQueueInfoBlock(ViewContext ctx, CSQInfo info) {
+ super(ctx);
+ lqinfo = (CapacitySchedulerLeafQueueInfo) info.qinfo;
+ }
+
+ @Override
+ protected void render(Block html) {
+ ResponseInfo ri = info("\'" + lqinfo.getQueuePath().substring(5) + "\' Queue Status").
+ _("Queue State:", lqinfo.getQueueState()).
+ _("Capacity:", percent(lqinfo.getCapacity() / 100)).
+ _("Max Capacity:", percent(lqinfo.getMaxCapacity() / 100)).
+ _("Used Capacity:", percent(lqinfo.getUsedCapacity() / 100)).
+ _("Absolute Capacity:", percent(lqinfo.getAbsoluteCapacity() / 100)).
+ _("Absolute Max Capacity:", percent(lqinfo.getAbsoluteMaxCapacity() / 100)).
+ _("Utilization:", percent(lqinfo.getUtilization() / 100)).
+ _("Used Resources:", lqinfo.getUsedResources().toString()).
+ _("Num Active Applications:", Integer.toString(lqinfo.getNumActiveApplications())).
+ _("Num Pending Applications:", Integer.toString(lqinfo.getNumPendingApplications())).
+ _("Num Containers:", Integer.toString(lqinfo.getNumContainers())).
+ _("Max Applications:", Integer.toString(lqinfo.getMaxApplications())).
+ _("Max Applications Per User:", Integer.toString(lqinfo.getMaxApplicationsPerUser())).
+ _("Max Active Applications:", Integer.toString(lqinfo.getMaxActiveApplications())).
+ _("Max Active Applications Per User:", Integer.toString(lqinfo.getMaxActiveApplicationsPerUser())).
+ _("User Limit:", Integer.toString(lqinfo.getUserLimit()) + "%").
+ _("User Limit Factor:", String.format("%.1f", lqinfo.getUserLimitFactor()));
+
+ html._(InfoBlock.class);
+
+ // clear the info contents so this queue's info doesn't accumulate into another queue's info
+ ri.clear();
+ }
}
public static class QueueBlock extends HtmlBlock {
- final Parent parent;
- final CapacitySchedulerInfo sinfo;
+ final CSQInfo csqinfo;
- @Inject QueueBlock(Parent parent) {
- this.parent = parent;
- sinfo = new CapacitySchedulerInfo(parent.queue);
+ @Inject QueueBlock(CSQInfo info) {
+ csqinfo = info;
}
@Override
public void render(Block html) {
+ ArrayList subQueues =
+ (csqinfo.qinfo == null) ? csqinfo.csinfo.getSubQueues()
+ : csqinfo.qinfo.getSubQueues();
UL ul = html.ul();
- for (CapacitySchedulerQueueInfo info : sinfo.getSubQueues()) {
+ for (CapacitySchedulerQueueInfo info : subQueues) {
float used = info.getUsedCapacity() / 100;
float set = info.getCapacity() / 100;
float delta = Math.abs(set - used) + 0.001f;
@@ -76,11 +118,12 @@ class CapacitySchedulerPage extends RmView {
used > set ? OVER : UNDER, ';',
used > set ? left(set/max) : left(used/max)))._('.')._().
span(".q", info.getQueuePath().substring(5))._();
- if (info.getQueue() instanceof ParentQueue) {
- // this could be optimized better
- parent.queue = info.getQueue();
- li.
- _(QueueBlock.class);
+
+ csqinfo.qinfo = info;
+ if (info.getSubQueues() == null) {
+ li.ul("#lq").li()._(LeafQueueInfoBlock.class)._()._();
+ } else {
+ li._(QueueBlock.class);
}
li._();
}
@@ -91,11 +134,11 @@ class CapacitySchedulerPage extends RmView {
static class QueuesBlock extends HtmlBlock {
final CapacityScheduler cs;
- final Parent parent;
+ final CSQInfo csqinfo;
- @Inject QueuesBlock(ResourceManager rm, Parent parent) {
+ @Inject QueuesBlock(ResourceManager rm, CSQInfo info) {
cs = (CapacityScheduler) rm.getResourceScheduler();
- this.parent = parent;
+ csqinfo = info;
}
@Override
@@ -115,8 +158,10 @@ class CapacitySchedulerPage extends RmView {
span(".q", "default")._()._();
} else {
CSQueue root = cs.getRootQueue();
- parent.queue = root;
- CapacitySchedulerInfo sinfo = new CapacitySchedulerInfo(parent.queue);
+ CapacitySchedulerInfo sinfo = new CapacitySchedulerInfo(root);
+ csqinfo.csinfo = sinfo;
+ csqinfo.qinfo = null;
+
float used = sinfo.getUsedCapacity() / 100;
float set = sinfo.getCapacity() / 100;
float delta = Math.abs(set - used) + 0.001f;
@@ -144,13 +189,16 @@ class CapacitySchedulerPage extends RmView {
"#cs ul { list-style: none }",
"#cs a { font-weight: normal; margin: 2px; position: relative }",
"#cs a span { font-weight: normal; font-size: 80% }",
- "#cs-wrapper .ui-widget-header { padding: 0.2em 0.5em }")._().
+ "#cs-wrapper .ui-widget-header { padding: 0.2em 0.5em }",
+ "table.info tr th {width: 50%}")._(). // to center info table
script("/static/jt/jquery.jstree.js").
script().$type("text/javascript").
_("$(function() {",
" $('#cs a span').addClass('ui-corner-all').css('position', 'absolute');",
" $('#cs').bind('loaded.jstree', function (e, data) {",
- " data.inst.open_all(); }).",
+ " data.inst.open_all();",
+ " data.inst.close_node('#lq', true);",
+ " }).",
" jstree({",
" core: { animation: 188, html_titles: true },",
" plugins: ['themeroller', 'html_data', 'ui'],",
@@ -160,8 +208,9 @@ class CapacitySchedulerPage extends RmView {
" });",
" $('#cs').bind('select_node.jstree', function(e, data) {",
" var q = $('.q', data.rslt.obj).first().text();",
- " if (q == 'root') q = '';",
- " $('#apps').dataTable().fnFilter(q, 3);",
+ " if (q == 'root') q = '';",
+ " else q = '^' + q.substr(q.lastIndexOf('.') + 1) + '$';",
+ " $('#apps').dataTable().fnFilter(q, 3, true);",
" });",
" $('#cs').show();",
"});")._();
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerInfo.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerInfo.java
index 921b5ea9e18..f0a34d405ad 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerInfo.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerInfo.java
@@ -26,9 +26,8 @@ import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import javax.xml.bind.annotation.XmlType;
-import org.apache.hadoop.yarn.api.records.QueueState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.ParentQueue;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.LeafQueue;
@XmlRootElement(name = "capacityScheduler")
@XmlType(name = "capacityScheduler")
@@ -83,21 +82,11 @@ public class CapacitySchedulerInfo extends SchedulerInfo {
CSQueue parentQueue = parent;
ArrayList queuesInfo = new ArrayList();
for (CSQueue queue : parentQueue.getChildQueues()) {
- float usedCapacity = queue.getUsedCapacity() * 100;
- float capacity = queue.getCapacity() * 100;
- String queueName = queue.getQueueName();
- String queuePath = queue.getQueuePath();
- float max = queue.getMaximumCapacity();
- if (max < EPSILON || max > 1f)
- max = 1f;
- float maxCapacity = max * 100;
- QueueState state = queue.getState();
- CapacitySchedulerQueueInfo info = new CapacitySchedulerQueueInfo(
- capacity, usedCapacity, maxCapacity, queueName, state, queuePath);
-
- if (queue instanceof ParentQueue) {
- info.isParent = true;
- info.queue = queue;
+ CapacitySchedulerQueueInfo info;
+ if (queue instanceof LeafQueue) {
+ info = new CapacitySchedulerLeafQueueInfo((LeafQueue)queue);
+ } else {
+ info = new CapacitySchedulerQueueInfo(queue);
info.subQueues = getQueues(queue);
}
queuesInfo.add(info);
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerLeafQueueInfo.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerLeafQueueInfo.java
new file mode 100644
index 00000000000..5b2624ee984
--- /dev/null
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerLeafQueueInfo.java
@@ -0,0 +1,91 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.resourcemanager.webapp.dao;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.LeafQueue;
+
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class CapacitySchedulerLeafQueueInfo extends CapacitySchedulerQueueInfo {
+
+ protected int numActiveApplications;
+ protected int numPendingApplications;
+ protected int numContainers;
+ protected int maxApplications;
+ protected int maxApplicationsPerUser;
+ protected int maxActiveApplications;
+ protected int maxActiveApplicationsPerUser;
+ protected int userLimit;
+ protected float userLimitFactor;
+
+ CapacitySchedulerLeafQueueInfo() {
+ };
+
+ CapacitySchedulerLeafQueueInfo(LeafQueue q) {
+ super(q);
+ numActiveApplications = q.getNumActiveApplications();
+ numPendingApplications = q.getNumPendingApplications();
+ numContainers = q.getNumContainers();
+ maxApplications = q.getMaxApplications();
+ maxApplicationsPerUser = q.getMaxApplicationsPerUser();
+ maxActiveApplications = q.getMaximumActiveApplications();
+ maxActiveApplicationsPerUser = q.getMaximumActiveApplicationsPerUser();
+ userLimit = q.getUserLimit();
+ userLimitFactor = q.getUserLimitFactor();
+ }
+
+ public int getNumActiveApplications() {
+ return numActiveApplications;
+ }
+
+ public int getNumPendingApplications() {
+ return numPendingApplications;
+ }
+
+ public int getNumContainers() {
+ return numContainers;
+ }
+
+ public int getMaxApplications() {
+ return maxApplications;
+ }
+
+ public int getMaxApplicationsPerUser() {
+ return maxApplicationsPerUser;
+ }
+
+ public int getMaxActiveApplications() {
+ return maxActiveApplications;
+ }
+
+ public int getMaxActiveApplicationsPerUser() {
+ return maxActiveApplicationsPerUser;
+ }
+
+ public int getUserLimit() {
+ return userLimit;
+ }
+
+ public float getUserLimitFactor() {
+ return userLimitFactor;
+ }
+}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerQueueInfo.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerQueueInfo.java
index 14cffd8e5eb..dd9ab16b2d2 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerQueueInfo.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerQueueInfo.java
@@ -22,50 +22,54 @@ import java.util.ArrayList;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlSeeAlso;
import javax.xml.bind.annotation.XmlTransient;
-import org.apache.hadoop.yarn.api.records.QueueState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
+@XmlSeeAlso({CapacitySchedulerLeafQueueInfo.class})
public class CapacitySchedulerQueueInfo {
@XmlTransient
- protected String queuePath;
- @XmlTransient
- protected Boolean isParent = false;
+ static final float EPSILON = 1e-8f;
- // bit odd to store this but makes html easier for now
@XmlTransient
- protected CSQueue queue;
+ protected String queuePath;
protected float capacity;
protected float usedCapacity;
protected float maxCapacity;
+ protected float absoluteCapacity;
+ protected float absoluteMaxCapacity;
+ protected float utilization;
+ protected int numApplications;
+ protected String usedResources;
protected String queueName;
- protected QueueState state;
+ protected String state;
protected ArrayList subQueues;
CapacitySchedulerQueueInfo() {
};
- CapacitySchedulerQueueInfo(float cap, float used, float max, String name,
- QueueState state, String path) {
- this.capacity = cap;
- this.usedCapacity = used;
- this.maxCapacity = max;
- this.queueName = name;
- this.state = state;
- this.queuePath = path;
- }
+ CapacitySchedulerQueueInfo(CSQueue q) {
+ queuePath = q.getQueuePath();
+ capacity = q.getCapacity() * 100;
+ usedCapacity = q.getUsedCapacity() * 100;
- public Boolean isParent() {
- return this.isParent;
- }
+ maxCapacity = q.getMaximumCapacity();
+ if (maxCapacity < EPSILON || maxCapacity > 1f)
+ maxCapacity = 1f;
+ maxCapacity *= 100;
- public CSQueue getQueue() {
- return this.queue;
+ absoluteCapacity = cap(q.getAbsoluteCapacity(), 0f, 1f) * 100;
+ absoluteMaxCapacity = cap(q.getAbsoluteMaximumCapacity(), 0f, 1f) * 100;
+ utilization = q.getUtilization() * 100;
+ numApplications = q.getNumApplications();
+ usedResources = q.getUsedResources().toString();
+ queueName = q.getQueueName();
+ state = q.getState().toString();
}
public float getCapacity() {
@@ -80,12 +84,32 @@ public class CapacitySchedulerQueueInfo {
return this.maxCapacity;
}
+ public float getAbsoluteCapacity() {
+ return absoluteCapacity;
+ }
+
+ public float getAbsoluteMaxCapacity() {
+ return absoluteMaxCapacity;
+ }
+
+ public float getUtilization() {
+ return utilization;
+ }
+
+ public int getNumApplications() {
+ return numApplications;
+ }
+
+ public String getUsedResources() {
+ return usedResources;
+ }
+
public String getQueueName() {
return this.queueName;
}
public String getQueueState() {
- return this.state.toString();
+ return this.state;
}
public String getQueuePath() {
@@ -96,4 +120,14 @@ public class CapacitySchedulerQueueInfo {
return this.subQueues;
}
+ /**
+ * Limit a value to a specified range.
+ * @param val the value to be capped
+ * @param low the lower bound of the range (inclusive)
+ * @param hi the upper bound of the range (inclusive)
+ * @return the capped value
+ */
+ static float cap(float val, float low, float hi) {
+ return Math.min(Math.max(val, low), hi);
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesCapacitySched.java b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesCapacitySched.java
index dc89381b3df..3ee0dac1040 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesCapacitySched.java
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesCapacitySched.java
@@ -210,17 +210,21 @@ public class TestRMWebServicesCapacitySched extends JerseyTest {
Element qElem = (Element) queues.item(j);
String qName = WebServicesTestUtils.getXmlString(qElem, "queueName");
String q = CapacitySchedulerConfiguration.ROOT + "." + qName;
- verifySubQueueXML(qElem, q);
+ verifySubQueueXML(qElem, q, 100);
}
}
}
- public void verifySubQueueXML(Element qElem, String q) throws Exception {
-
+ public void verifySubQueueXML(Element qElem, String q, float parentAbsCapacity)
+ throws Exception {
+ float absCapacity = WebServicesTestUtils.getXmlFloat(qElem, "absoluteCapacity");
verifySubQueueGeneric(q,
WebServicesTestUtils.getXmlFloat(qElem, "usedCapacity"),
WebServicesTestUtils.getXmlFloat(qElem, "capacity"),
WebServicesTestUtils.getXmlFloat(qElem, "maxCapacity"),
+ absCapacity,
+ WebServicesTestUtils.getXmlFloat(qElem, "absoluteMaxCapacity"),
+ parentAbsCapacity,
WebServicesTestUtils.getXmlString(qElem, "queueName"),
WebServicesTestUtils.getXmlString(qElem, "state"));
@@ -230,8 +234,12 @@ public class TestRMWebServicesCapacitySched extends JerseyTest {
Element subqElem = (Element) queues.item(j);
String qName = WebServicesTestUtils.getXmlString(subqElem, "queueName");
String q2 = q + "." + qName;
- verifySubQueueXML(subqElem, q2);
+ verifySubQueueXML(subqElem, q2, absCapacity);
}
+ } else {
+ verifyLeafQueueGeneric(q,
+ WebServicesTestUtils.getXmlInt(qElem, "userLimit"),
+ WebServicesTestUtils.getXmlFloat(qElem, "userLimitFactor"));
}
}
@@ -254,7 +262,7 @@ public class TestRMWebServicesCapacitySched extends JerseyTest {
for (int i = 0; i < arr.length(); i++) {
JSONObject obj = arr.getJSONObject(i);
String q = CapacitySchedulerConfiguration.ROOT + "." + obj.getString("queueName");
- verifySubQueue(obj, q);
+ verifySubQueue(obj, q, 100);
}
}
@@ -268,31 +276,46 @@ public class TestRMWebServicesCapacitySched extends JerseyTest {
assertTrue("queueName doesn't match", "root".matches(queueName));
}
- private void verifySubQueue(JSONObject info, String q) throws JSONException,
- Exception {
- if (info.has("subQueues")) {
- assertEquals("incorrect number of elements", 6, info.length());
- } else {
- assertEquals("incorrect number of elements", 5, info.length());
+ private void verifySubQueue(JSONObject info, String q, float parentAbsCapacity)
+ throws JSONException, Exception {
+ int numExpectedElements = 11;
+ boolean isParentQueue = true;
+ if (!info.has("subQueues")) {
+ numExpectedElements = 20;
+ isParentQueue = false;
}
+ assertEquals("incorrect number of elements", numExpectedElements, info.length());
+
+ float absCapacity = (float) info.getDouble("absoluteCapacity");
+
verifySubQueueGeneric(q, (float) info.getDouble("usedCapacity"),
(float) info.getDouble("capacity"),
- (float) info.getDouble("maxCapacity"), info.getString("queueName"),
+ (float) info.getDouble("maxCapacity"),
+ absCapacity,
+ (float) info.getDouble("absoluteMaxCapacity"),
+ parentAbsCapacity,
+ info.getString("queueName"),
info.getString("state"));
- if (info.has("subQueues")) {
+ if (isParentQueue) {
JSONArray arr = info.getJSONArray("subQueues");
// test subqueues
for (int i = 0; i < arr.length(); i++) {
JSONObject obj = arr.getJSONObject(i);
String q2 = q + "." + obj.getString("queueName");
- verifySubQueue(obj, q2);
+ verifySubQueue(obj, q2, absCapacity);
}
+ } else {
+ verifyLeafQueueGeneric(q, info.getInt("userLimit"),
+ (float) info.getDouble("userLimitFactor"));
}
}
private void verifySubQueueGeneric(String q, float usedCapacity,
- float capacity, float maxCapacity, String qname, String state)
+ float capacity, float maxCapacity,
+ float absCapacity, float absMaxCapacity,
+ float parentAbsCapacity,
+ String qname, String state)
throws Exception {
String[] qArr = q.split("\\.");
assertTrue("q name invalid: " + q, qArr.length > 1);
@@ -302,15 +325,28 @@ public class TestRMWebServicesCapacitySched extends JerseyTest {
assertEquals("capacity doesn't match", csConf.getCapacity(q), capacity,
1e-3f);
float expectCapacity = csConf.getMaximumCapacity(q);
+ float expectAbsMaxCapacity = parentAbsCapacity * (maxCapacity/100);
if (CapacitySchedulerConfiguration.UNDEFINED == expectCapacity) {
expectCapacity = 100;
+ expectAbsMaxCapacity = 100;
}
assertEquals("maxCapacity doesn't match", expectCapacity, maxCapacity,
1e-3f);
+ assertEquals("absoluteCapacity doesn't match",
+ parentAbsCapacity * (capacity/100), absCapacity, 1e-3f);
+ assertEquals("absoluteMaxCapacity doesn't match",
+ expectAbsMaxCapacity, absMaxCapacity, 1e-3f);
assertTrue("queueName doesn't match, got: " + qname + " expected: " + q,
qshortName.matches(qname));
assertTrue("state doesn't match",
(csConf.getState(q).toString()).matches(state));
}
+
+ private void verifyLeafQueueGeneric(String q, int userLimit,
+ float userLimitFactor) throws Exception {
+ assertEquals("userLimit doesn't match", csConf.getUserLimit(q), userLimit);
+ assertEquals("userLimitFactor doesn't match",
+ csConf.getUserLimitFactor(q), userLimitFactor, 1e-3f);
+ }
}
diff --git a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
index b1f00a058f2..6e657bd9abc 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-web-proxy/pom.xml
@@ -34,7 +34,6 @@
javax.servlet
servlet-api
compile
- 2.5
org.apache.hadoop
diff --git a/hadoop-mapreduce-project/hadoop-yarn/pom.xml b/hadoop-mapreduce-project/hadoop-yarn/pom.xml
index df178ed1978..6b5f6e17c18 100644
--- a/hadoop-mapreduce-project/hadoop-yarn/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-yarn/pom.xml
@@ -32,49 +32,11 @@
600000
${basedir}
-
-
-
- apache.releases.https
- Apache Release Distribution Repository
- https://repository.apache.org/service/local/staging/deploy/maven2
-
-
- apache.snapshots.https
- Apache Development Snapshot Repository
- https://repository.apache.org/content/repositories/snapshots
-
-
-
-
-
- repository.jboss.org
- http://repository.jboss.org/nexus/content/groups/public/
-
- false
-
-
-
- apache.snapshots
- http://repository.apache.org/snapshots
-
-
-
- true
-
-
-
-
- com.google.protobuf
- protobuf-java
- 2.4.0a
-
org.apache.hadoop
hadoop-common
- ${project.version}
provided
@@ -103,295 +65,80 @@
org.slf4j
slf4j-api
- 1.6.1
org.slf4j
slf4j-log4j12
- 1.6.1
org.apache.hadoop
hadoop-annotations
- ${project.version}
org.mockito
mockito-all
- 1.8.5
test
org.apache.hadoop
hadoop-common
- ${project.version}
test-jar
test
-
- org.apache.hadoop
-
- hadoop-hdfs
- ${project.version}
-
com.google.inject.extensions
guice-servlet
- 3.0
-
-
- junit
- junit
- 4.8.2
org.jboss.netty
netty
- 3.2.3.Final
com.cenqua.clover
clover
- 3.0.2
com.google.protobuf
protobuf-java
- 2.4.0a
-
-
- org.apache.hadoop
- hadoop-common
- ${project.version}
-
-
- commons-el
- commons-el
-
-
- tomcat
- jasper-runtime
-
-
- tomcat
- jasper-compiler
-
-
- org.mortbay.jetty
- jsp-2.1-jetty
-
-
- hsqldb
- hsqldb
-
-
-
-
- org.apache.hadoop
- hadoop-annotations
- ${project.version}
junit
junit
- 4.8.2
test
commons-io
commons-io
- 2.1
-
-
- org.mockito
- mockito-all
- 1.8.5
- test
-
-
- org.apache.hadoop
- hadoop-common
- ${project.version}
- test-jar
- test
org.apache.hadoop
hadoop-hdfs
- ${project.version}
- runtime
+ provided
com.google.inject
guice
- 3.0
com.sun.jersey.jersey-test-framework
jersey-test-framework-core
- 1.8
test
com.sun.jersey.jersey-test-framework
jersey-test-framework-grizzly2
- 1.8
com.sun.jersey
jersey-server
- 1.8
com.sun.jersey.contribs
jersey-guice
- 1.8
-
-
- org.jboss.netty
- netty
- 3.2.3.Final
-
-
- org.slf4j
- slf4j-api
- 1.6.1
-
-
- org.slf4j
- slf4j-log4j12
- 1.6.1
-
-
- junit
- junit
- 4.8.2
-
-
-
- org.apache.hadoop
- hadoop-yarn-api
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-yarn-common
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-yarn-common
- ${project.version}
- test-jar
-
-
- org.apache.hadoop
- hadoop-yarn-server-common
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-yarn-server-web-proxy
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-yarn-server-resourcemanager
- ${project.version}
-
-
- org.apache.hadoop
- hadoop-yarn-server-nodemanager
- ${project.version}
-
-
- org.apache.zookeeper
- zookeeper
- 3.4.2
-
-
-
- junit
- junit
-
-
- com.sun.jdmk
- jmxtools
-
-
- com.sun.jmx
- jmxri
-
-
-
-
-
-
-
-
-
- org.codehaus.mojo
- findbugs-maven-plugin
- 2.3.2
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
-
- 2.3.2
-
-
- 1.6
-
-
-
- maven-clean-plugin
- 2.4.1
-
-
- com.atlassian.maven.plugins
- maven-clover2-plugin
- 3.0.2
-
- /home/y/conf/clover/clover.license
-
-
-
- org.apache.maven.plugins
- maven-antrun-plugin
- 1.6
-
-
- org.codehaus.mojo
- exec-maven-plugin
- 1.2
-
-
- org.codehaus.mojo
- build-helper-maven-plugin
- 1.5
-
-
- org.apache.maven.plugins
- maven-install-plugin
- 2.3.1
-
-
- org.apache.maven.plugins
- maven-jar-plugin
- 2.3.1
-
-
- org.apache.maven.plugins
- maven-source-plugin
- 2.1.2
-
-
-
org.codehaus.mojo
diff --git a/hadoop-mapreduce-project/pom.xml b/hadoop-mapreduce-project/pom.xml
index b9e64473cf6..a4f321679ca 100644
--- a/hadoop-mapreduce-project/pom.xml
+++ b/hadoop-mapreduce-project/pom.xml
@@ -48,12 +48,10 @@
com.google.protobuf
protobuf-java
- 2.4.0a
org.apache.avro
avro
- 1.5.3
org.mortbay.jetty
@@ -84,7 +82,6 @@
org.apache.hadoop
hadoop-common
- ${project.version}
provided
@@ -113,28 +110,23 @@
org.slf4j
slf4j-api
- 1.6.1
org.slf4j
slf4j-log4j12
- 1.6.1
org.apache.hadoop
hadoop-annotations
- ${project.version}
org.mockito
mockito-all
- 1.8.5
test
org.apache.hadoop
hadoop-common
- ${project.version}
test-jar
test
@@ -142,118 +134,43 @@
org.apache.hadoop
hadoop-hdfs
- ${project.version}
com.google.inject
guice
- 3.0
com.sun.jersey
jersey-server
- 1.8
com.sun.jersey.contribs
jersey-guice
- 1.8
-
+
com.google.inject.extensions
guice-servlet
- 3.0
junit
junit
- 4.8.2
org.jboss.netty
netty
- 3.2.3.Final
commons-io
commons-io
- 2.1
com.cenqua.clover
clover
- 3.0.2
-
-
-
- org.codehaus.mojo
- findbugs-maven-plugin
- 2.3.2
-
-
- maven-clean-plugin
- 2.4.1
-
-
- com.atlassian.maven.plugins
- maven-clover2-plugin
- 3.0.2
-
- /home/y/conf/clover/clover.license
-
-
-
- org.apache.maven.plugins
- maven-compiler-plugin
-
- 2.3.2
-
-
- 1.6
-
-
-
- org.apache.maven.plugins
- maven-assembly-plugin
- 2.2.1
-
-
- org.apache.maven.plugins
- maven-antrun-plugin
- 1.6
-
-
- org.codehaus.mojo
- exec-maven-plugin
- 1.2
-
-
- org.codehaus.mojo
- build-helper-maven-plugin
- 1.5
-
-
- org.apache.maven.plugins
- maven-install-plugin
- 2.3.1
-
-
- org.apache.maven.plugins
- maven-jar-plugin
- 2.3.1
-
-
- org.apache.maven.plugins
- maven-source-plugin
- 2.1.2
-
-
-
maven-antrun-plugin
diff --git a/hadoop-project-dist/pom.xml b/hadoop-project-dist/pom.xml
index b54af34be9f..f9aeee598e6 100644
--- a/hadoop-project-dist/pom.xml
+++ b/hadoop-project-dist/pom.xml
@@ -213,7 +213,7 @@
org.apache.hadoop
hadoop-annotations
- ${hadoop.annotations.version}
+ ${project.version}
false
${project.build.directory}
hadoop-annotations.jar
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index bd3d7eea9cb..533ccd06853 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -39,7 +39,6 @@
- ${project.version}
1.0.9
${project.version}
@@ -65,7 +64,7 @@
org.apache.hadoop
hadoop-annotations
- ${hadoop.annotations.version}
+ ${project.version}
org.apache.hadoop
@@ -99,6 +98,17 @@
hadoop-mapreduce-client-app
${project.version}
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-app
+ ${project.version}
+ test-jar
+
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-common
+ ${project.version}
+
org.apache.hadoop
hadoop-yarn-api
@@ -117,6 +127,37 @@
${project.version}
+
+ org.apache.hadoop
+ hadoop-mapreduce-client-shuffle
+ ${project.version}
+
+
+
+ org.apache.hadoop
+ hadoop-yarn
+ ${project.version}
+ pom
+
+
+
+ org.apache.hadoop
+ hadoop-yarn-server
+ ${project.version}
+
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-web-proxy
+ ${project.version}
+
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-common
+ ${project.version}
+
+
org.apache.hadoop
hadoop-yarn-server-tests
@@ -124,6 +165,35 @@
test-jar
+
+ org.apache.hadoop
+ hadoop-yarn-common
+ ${project.version}
+
+
+ org.apache.hadoop
+ hadoop-yarn-common
+ ${project.version}
+ test-jar
+
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-nodemanager
+ ${project.version}
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-resourcemanager
+ ${project.version}
+
+
+ org.apache.hadoop
+ hadoop-yarn-server-resourcemanager
+ ${project.version}
+ test-jar
+
+
org.apache.hadoop
hadoop-mapreduce-client-jobclient
@@ -143,6 +213,17 @@
${project.version}
+
+ org.apache.hadoop
+ hadoop-streaming
+ ${project.version}
+
+
+ org.apache.hadoop
+ hadoop-archives
+ ${project.version}
+
+
com.google.guava
guava
@@ -214,6 +295,12 @@
com.sun.jersey
jersey-json
1.8
+
+
+ javax.xml.stream
+ stax-api
+
+
com.sun.jersey
@@ -221,6 +308,48 @@
1.8
+
+ com.google.inject
+ guice
+ 3.0
+
+
+
+ com.google.inject.extensions
+ guice-servlet
+ 3.0
+
+
+
+ com.sun.jersey.contribs
+ jersey-guice
+ 1.8
+
+
+
+ com.sun.jersey.jersey-test-framework
+ jersey-test-framework-core
+ 1.8
+ test
+
+
+ com.sun.jersey.jersey-test-framework
+ jersey-test-framework-grizzly2
+ 1.8
+
+
+
+ org.jboss.netty
+ netty
+ 3.2.3.Final
+
+
+
+ commons-io
+ commons-io
+ 2.1
+
+
org.mortbay.jetty
jetty-servlet-tester
@@ -335,7 +464,7 @@
junit
junit
- 4.8.1
+ 4.8.2
commons-lang
@@ -360,12 +489,12 @@
org.slf4j
slf4j-api
- 1.5.11
+ 1.6.1
org.slf4j
slf4j-log4j12
- 1.5.11
+ 1.6.1
org.eclipse.jdt
@@ -437,16 +566,58 @@
json-simple
1.1
+
+
+ com.cenqua.clover
+ clover
+ 3.0.2
+
+
+
+ org.apache.zookeeper
+ zookeeper
+ 3.4.2
+
+
+
+ junit
+ junit
+
+
+ com.sun.jdmk
+ jmxtools
+
+
+ com.sun.jmx
+ jmxri
+
+
+
+
+ org.apache.bookkeeper
+ bookkeeper-server
+ 4.0.0
+ compile
+
+
+
+ maven-clean-plugin
+ 2.4.1
+
org.apache.maven.plugins
maven-compiler-plugin
2.3.2
+
+
+ 1.6
+
org.apache.maven.plugins
@@ -463,6 +634,11 @@
maven-surefire-plugin
2.10
+
+ org.apache.maven.plugins
+ maven-install-plugin
+ 2.3.1
+
org.apache.maven.plugins
maven-jar-plugin
@@ -471,18 +647,13 @@
org.apache.maven.plugins
maven-assembly-plugin
- 2.2-beta-3
+ 2.2.1
org.apache.maven.plugins
maven-javadoc-plugin
2.7
-
- org.apache.maven.plugins
- maven-antrun-plugin
- 1.6
-
org.apache.maven.plugins
maven-war-plugin
@@ -528,11 +699,6 @@
jspc-maven-plugin
2.0-alpha-3
-
- org.apache.maven.plugins
- maven-site-plugin
- 3.0
-
org.apache.maven.plugins
maven-project-info-reports-plugin
@@ -566,14 +732,6 @@
-
- org.apache.maven.plugins
- maven-compiler-plugin
-
-
- 1.6
-
-
org.apache.maven.plugins
maven-surefire-plugin
diff --git a/hadoop-tools/hadoop-tools-dist/pom.xml b/hadoop-tools/hadoop-tools-dist/pom.xml
new file mode 100644
index 00000000000..afa16b54b67
--- /dev/null
+++ b/hadoop-tools/hadoop-tools-dist/pom.xml
@@ -0,0 +1,68 @@
+
+
+
+ 4.0.0
+
+ org.apache.hadoop
+ hadoop-project-dist
+ 0.24.0-SNAPSHOT
+ ../../hadoop-project-dist
+
+ org.apache.hadoop
+ hadoop-tools-dist
+ 0.24.0-SNAPSHOT
+ Apache Hadoop Tools Dist
+ Apache Hadoop Tools Dist
+ jar
+
+
+ tools
+ false
+
+
+
+
+ org.apache.hadoop
+ hadoop-streaming
+ compile
+
+
+ org.apache.hadoop
+ hadoop-archives
+ compile
+
+
+
+
+
+
+ maven-deploy-plugin
+
+ true
+
+
+
+ org.apache.rat
+ apache-rat-plugin
+
+
+ pom.xml
+
+
+
+
+
+
+
diff --git a/hadoop-tools/pom.xml b/hadoop-tools/pom.xml
index aa4e20fc560..2a8b5cf8100 100644
--- a/hadoop-tools/pom.xml
+++ b/hadoop-tools/pom.xml
@@ -30,6 +30,7 @@
hadoop-streaming
hadoop-archives
+ hadoop-tools-dist
diff --git a/pom.xml b/pom.xml
index c2bc7c25d1e..1ee0471de82 100644
--- a/pom.xml
+++ b/pom.xml
@@ -108,9 +108,6 @@
org.apache.maven.plugins
maven-site-plugin
3.0
-
- true
-
@@ -164,7 +161,7 @@
-
+
maven-site-plugin
3.0
@@ -173,6 +170,9 @@
attach-descriptor
+
+ true
+