Merge r1293501 through r1293896 from 0.23.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23-PB@1293899 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2012-02-26 17:58:14 +00:00
commit def75f7560
68 changed files with 450 additions and 247 deletions

View File

@ -20,12 +20,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath> <relativePath>../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-assemblies</artifactId> <artifactId>hadoop-assemblies</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>Apache Hadoop Assemblies</name> <name>Apache Hadoop Assemblies</name>
<description>Apache Hadoop Assemblies</description> <description>Apache Hadoop Assemblies</description>

View File

@ -18,12 +18,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath> <relativePath>../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId> <artifactId>hadoop-client</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<description>Apache Hadoop Client</description> <description>Apache Hadoop Client</description>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId> <artifactId>hadoop-annotations</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Annotations</description> <description>Apache Hadoop Annotations</description>
<name>Apache Hadoop Annotations</name> <name>Apache Hadoop Annotations</name>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth-examples</artifactId> <artifactId>hadoop-auth-examples</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<packaging>war</packaging> <packaging>war</packaging>
<name>Apache Hadoop Auth Examples</name> <name>Apache Hadoop Auth Examples</name>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId> <artifactId>hadoop-auth</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<name>Apache Hadoop Auth</name> <name>Apache Hadoop Auth</name>

View File

@ -71,11 +71,24 @@ Release 0.23-PB - Unreleased
HADOOP-7931. o.a.h.ipc.WritableRpcEngine should have a way to force HADOOP-7931. o.a.h.ipc.WritableRpcEngine should have a way to force
initialization (atm) initialization (atm)
Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES
NEW FEATURES
IMPROVEMENTS
OPTIMIZATIONS
BUG FIXES
Release 0.23.2 - UNRELEASED Release 0.23.2 - UNRELEASED
NEW FEATURES NEW FEATURES
IMPROVEMENTS IMPROVEMENTS
HADOOP-8048. Allow merging of Credentials (Daryn Sharp via tgraves) HADOOP-8048. Allow merging of Credentials (Daryn Sharp via tgraves)
HADOOP-8032. mvn site:stage-deploy should be able to use the scp protocol HADOOP-8032. mvn site:stage-deploy should be able to use the scp protocol
@ -85,6 +98,7 @@ Release 0.23.2 - UNRELEASED
(szetszwo) (szetszwo)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-8071. Avoid an extra packet in client code when nagling is HADOOP-8071. Avoid an extra packet in client code when nagling is
disabled. (todd) disabled. (todd)

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-dist</artifactId> <artifactId>hadoop-project-dist</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project-dist</relativePath> <relativePath>../../hadoop-project-dist</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId> <artifactId>hadoop-common</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Common</description> <description>Apache Hadoop Common</description>
<name>Apache Hadoop Common</name> <name>Apache Hadoop Common</name>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath> <relativePath>../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common-project</artifactId> <artifactId>hadoop-common-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Common Project</description> <description>Apache Hadoop Common Project</description>
<name>Apache Hadoop Common Project</name> <name>Apache Hadoop Common Project</name>
<packaging>pom</packaging> <packaging>pom</packaging>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath> <relativePath>../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-dist</artifactId> <artifactId>hadoop-dist</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Distribution</description> <description>Apache Hadoop Distribution</description>
<name>Apache Hadoop Distribution</name> <name>Apache Hadoop Distribution</name>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -19,12 +19,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-httpfs</artifactId> <artifactId>hadoop-hdfs-httpfs</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<packaging>war</packaging> <packaging>war</packaging>
<name>Apache Hadoop HttpFS</name> <name>Apache Hadoop HttpFS</name>

View File

@ -122,6 +122,20 @@ Release 0.23-PB - Unreleased
HDFS-2968. Protocol translator for BlockRecoveryCommand broken when HDFS-2968. Protocol translator for BlockRecoveryCommand broken when
multiple blocks need recovery. (todd) multiple blocks need recovery. (todd)
Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES
NEW FEATURES
HDFS-2978. The NameNode should expose name dir statuses via JMX. (atm)
IMPROVEMENTS
OPTIMIZATIONS
BUG FIXES
Release 0.23.2 - UNRELEASED Release 0.23.2 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-dist</artifactId> <artifactId>hadoop-project-dist</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project-dist</relativePath> <relativePath>../../hadoop-project-dist</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId> <artifactId>hadoop-hdfs</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop HDFS</description> <description>Apache Hadoop HDFS</description>
<name>Apache Hadoop HDFS</name> <name>Apache Hadoop HDFS</name>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -140,6 +140,8 @@ import org.apache.hadoop.hdfs.server.common.GenerationStamp;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NamenodeRole;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.hdfs.server.common.Storage.StorageDirType;
import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.common.Storage;
import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport; import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
import org.apache.hadoop.hdfs.server.common.Util; import org.apache.hadoop.hdfs.server.common.Util;
@ -4423,6 +4425,30 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
public String getBlockPoolId() { public String getBlockPoolId() {
return blockPoolId; return blockPoolId;
} }
@Override // NameNodeMXBean
public String getNameDirStatuses() {
Map<String, Map<File, StorageDirType>> statusMap =
new HashMap<String, Map<File, StorageDirType>>();
Map<File, StorageDirType> activeDirs = new HashMap<File, StorageDirType>();
for (Iterator<StorageDirectory> it
= getFSImage().getStorage().dirIterator(); it.hasNext();) {
StorageDirectory st = it.next();
activeDirs.put(st.getRoot(), st.getStorageDirType());
}
statusMap.put("active", activeDirs);
List<Storage.StorageDirectory> removedStorageDirs
= getFSImage().getStorage().getRemovedStorageDirs();
Map<File, StorageDirType> failedDirs = new HashMap<File, StorageDirType>();
for (StorageDirectory st : removedStorageDirs) {
failedDirs.put(st.getRoot(), st.getStorageDirType());
}
statusMap.put("failed", failedDirs);
return JSON.toString(statusMap);
}
/** @return the block manager. */ /** @return the block manager. */
public BlockManager getBlockManager() { public BlockManager getBlockManager() {

View File

@ -166,4 +166,12 @@ public interface NameNodeMXBean {
* @return the block pool id * @return the block pool id
*/ */
public String getBlockPoolId(); public String getBlockPoolId();
/**
* Get status information about the directories storing image and edits logs
* of the NN.
*
* @return the name dir status information, as a JSON string.
*/
public String getNameDirStatuses();
} }

View File

@ -17,23 +17,33 @@
*/ */
package org.apache.hadoop.hdfs.server.namenode; package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.*;
import java.io.File;
import java.lang.management.ManagementFactory; import java.lang.management.ManagementFactory;
import java.net.URI;
import java.util.Collection;
import java.util.Map;
import javax.management.MBeanServer; import javax.management.MBeanServer;
import javax.management.ObjectName; import javax.management.ObjectName;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.util.VersionInfo;
import org.junit.Test; import org.junit.Test;
import org.mortbay.util.ajax.JSON;
import junit.framework.Assert; import junit.framework.Assert;
/** /**
* Class for testing {@link NameNodeMXBean} implementation * Class for testing {@link NameNodeMXBean} implementation
*/ */
public class TestNameNodeMXBean { public class TestNameNodeMXBean {
@SuppressWarnings({ "unchecked", "deprecation" })
@Test @Test
public void testNameNodeMXBeanInfo() throws Exception { public void testNameNodeMXBeanInfo() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
@ -88,8 +98,46 @@ public class TestNameNodeMXBean {
String deadnodeinfo = (String) (mbs.getAttribute(mxbeanName, String deadnodeinfo = (String) (mbs.getAttribute(mxbeanName,
"DeadNodes")); "DeadNodes"));
Assert.assertEquals(fsn.getDeadNodes(), deadnodeinfo); Assert.assertEquals(fsn.getDeadNodes(), deadnodeinfo);
// get attribute NameDirStatuses
String nameDirStatuses = (String) (mbs.getAttribute(mxbeanName,
"NameDirStatuses"));
Assert.assertEquals(fsn.getNameDirStatuses(), nameDirStatuses);
Map<String, Map<String, String>> statusMap =
(Map<String, Map<String, String>>) JSON.parse(nameDirStatuses);
Collection<URI> nameDirUris = cluster.getNameDirs(0);
for (URI nameDirUri : nameDirUris) {
File nameDir = new File(nameDirUri);
System.out.println("Checking for the presence of " + nameDir +
" in active name dirs.");
assertTrue(statusMap.get("active").containsKey(nameDir.getAbsolutePath()));
}
assertEquals(2, statusMap.get("active").size());
assertEquals(0, statusMap.get("failed").size());
// This will cause the first dir to fail.
File failedNameDir = new File(nameDirUris.toArray(new URI[0])[0]);
assertEquals(0, FileUtil.chmod(failedNameDir.getAbsolutePath(), "000"));
cluster.getNameNodeRpc().rollEditLog();
nameDirStatuses = (String) (mbs.getAttribute(mxbeanName,
"NameDirStatuses"));
statusMap = (Map<String, Map<String, String>>) JSON.parse(nameDirStatuses);
for (URI nameDirUri : nameDirUris) {
File nameDir = new File(nameDirUri);
String expectedStatus =
nameDir.equals(failedNameDir) ? "failed" : "active";
System.out.println("Checking for the presence of " + nameDir +
" in " + expectedStatus + " name dirs.");
assertTrue(statusMap.get(expectedStatus).containsKey(
nameDir.getAbsolutePath()));
}
assertEquals(1, statusMap.get("active").size());
assertEquals(1, statusMap.get("failed").size());
} finally { } finally {
if (cluster != null) { if (cluster != null) {
for (URI dir : cluster.getNameDirs(0)) {
FileUtil.chmod(new File(dir).toString(), "700");
}
cluster.shutdown(); cluster.shutdown();
} }
} }

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath> <relativePath>../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs-project</artifactId> <artifactId>hadoop-hdfs-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop HDFS Project</description> <description>Apache Hadoop HDFS Project</description>
<name>Apache Hadoop HDFS Project</name> <name>Apache Hadoop HDFS Project</name>
<packaging>pom</packaging> <packaging>pom</packaging>

View File

@ -29,6 +29,18 @@ Release 0.23-PB - Unreleased
MAPREDUCE-2942. TestNMAuditLogger.testNMAuditLoggerWithIP failing (Thomas Graves MAPREDUCE-2942. TestNMAuditLogger.testNMAuditLoggerWithIP failing (Thomas Graves
via mahadev) via mahadev)
Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES
NEW FEATURES
IMPROVEMENTS
OPTIMIZATIONS
BUG FIXES
Release 0.23.2 - UNRELEASED Release 0.23.2 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES
@ -52,9 +64,13 @@ Release 0.23.2 - UNRELEASED
MAPREDUCE-3730. Modified RM to allow restarted NMs to be able to join the MAPREDUCE-3730. Modified RM to allow restarted NMs to be able to join the
cluster without waiting for expiry. (Jason Lowe via vinodkv) cluster without waiting for expiry. (Jason Lowe via vinodkv)
MAPREDUCE-2793. Corrected AppIDs, JobIDs, TaskAttemptIDs to be of correct
format on the web pages. (Bikas Saha via vinodkv)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES
MAPREDUCE-3918 proc_historyserver no longer in command line arguments for MAPREDUCE-3918 proc_historyserver no longer in command line arguments for
HistoryServer (Jon Eagles via bobby) HistoryServer (Jon Eagles via bobby)
@ -90,6 +106,13 @@ Release 0.23.2 - UNRELEASED
MAPREDUCE-3904 Job history produced with mapreduce.cluster.acls.enabled MAPREDUCE-3904 Job history produced with mapreduce.cluster.acls.enabled
false can not be viewed with mapreduce.cluster.acls.enabled true false can not be viewed with mapreduce.cluster.acls.enabled true
(Jonathon Eagles via tgraves) (Jonathon Eagles via tgraves)
MAPREDUCE-3910. Fixed a bug in CapacityScheduler LeafQueue which was causing
app-submission to fail. (John George via vinodkv)
MAPREDUCE-3686. Fixed two bugs in Counters because of which web app displays
zero counter values for framework counters. (Bhallamudi Venkata Siva Kamesh
via vinodkv)
Release 0.23.1 - 2012-02-17 Release 0.23.1 - 2012-02-17

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-mapreduce-client</artifactId> <artifactId>hadoop-mapreduce-client</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-app</artifactId> <artifactId>hadoop-mapreduce-client-app</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-mapreduce-client-app</name> <name>hadoop-mapreduce-client-app</name>
<properties> <properties>

View File

@ -99,6 +99,14 @@ public class AMWebServices {
try { try {
jobId = MRApps.toJobID(jid); jobId = MRApps.toJobID(jid);
} catch (YarnException e) { } catch (YarnException e) {
// TODO: after MAPREDUCE-2793 YarnException is probably not expected here
// anymore but keeping it for now just in case other stuff starts failing.
// Also, the webservice should ideally return BadRequest (HTTP:400) when
// the id is malformed instead of NotFound (HTTP:404). The webserver on
// top of which AMWebServices is built seems to automatically do that for
// unhandled exceptions
throw new NotFoundException(e.getMessage());
} catch (IllegalArgumentException e) {
throw new NotFoundException(e.getMessage()); throw new NotFoundException(e.getMessage());
} }
if (jobId == null) { if (jobId == null) {
@ -121,10 +129,18 @@ public class AMWebServices {
try { try {
taskID = MRApps.toTaskID(tid); taskID = MRApps.toTaskID(tid);
} catch (YarnException e) { } catch (YarnException e) {
// TODO: after MAPREDUCE-2793 YarnException is probably not expected here
// anymore but keeping it for now just in case other stuff starts failing.
// Also, the webservice should ideally return BadRequest (HTTP:400) when
// the id is malformed instead of NotFound (HTTP:404). The webserver on
// top of which AMWebServices is built seems to automatically do that for
// unhandled exceptions
throw new NotFoundException(e.getMessage()); throw new NotFoundException(e.getMessage());
} catch (NumberFormatException ne) { } catch (NumberFormatException ne) {
throw new NotFoundException(ne.getMessage()); throw new NotFoundException(ne.getMessage());
} } catch (IllegalArgumentException e) {
throw new NotFoundException(e.getMessage());
}
if (taskID == null) { if (taskID == null) {
throw new NotFoundException("taskid " + tid + " not found or invalid"); throw new NotFoundException("taskid " + tid + " not found or invalid");
} }
@ -146,9 +162,17 @@ public class AMWebServices {
try { try {
attemptId = MRApps.toTaskAttemptID(attId); attemptId = MRApps.toTaskAttemptID(attId);
} catch (YarnException e) { } catch (YarnException e) {
// TODO: after MAPREDUCE-2793 YarnException is probably not expected here
// anymore but keeping it for now just in case other stuff starts failing.
// Also, the webservice should ideally return BadRequest (HTTP:400) when
// the id is malformed instead of NotFound (HTTP:404). The webserver on
// top of which AMWebServices is built seems to automatically do that for
// unhandled exceptions
throw new NotFoundException(e.getMessage()); throw new NotFoundException(e.getMessage());
} catch (NumberFormatException ne) { } catch (NumberFormatException ne) {
throw new NotFoundException(ne.getMessage()); throw new NotFoundException(ne.getMessage());
} catch (IllegalArgumentException e) {
throw new NotFoundException(e.getMessage());
} }
if (attemptId == null) { if (attemptId == null) {
throw new NotFoundException("task attempt id " + attId throw new NotFoundException("task attempt id " + attId

View File

@ -106,6 +106,20 @@ public class MockJobs extends MockApps {
return newAppName(); return newAppName();
} }
/**
* Create numJobs in a map with jobs having appId==jobId
*/
public static Map<JobId, Job> newJobs(int numJobs, int numTasksPerJob,
int numAttemptsPerTask) {
Map<JobId, Job> map = Maps.newHashMap();
for (int j = 0; j < numJobs; ++j) {
ApplicationId appID = MockJobs.newAppID(j);
Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask);
map.put(job.getID(), job);
}
return map;
}
public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp, public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp,
int numTasksPerJob, int numAttemptsPerTask) { int numTasksPerJob, int numAttemptsPerTask) {
Map<JobId, Job> map = Maps.newHashMap(); Map<JobId, Job> map = Maps.newHashMap();

View File

@ -396,36 +396,36 @@ public class TestAMWebServicesAttempts extends JerseyTest {
public void testTaskAttemptIdBogus() throws JSONException, Exception { public void testTaskAttemptIdBogus() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("bogusid", testTaskAttemptIdErrorGeneric("bogusid",
"java.lang.Exception: Error parsing attempt ID: bogusid"); "java.lang.Exception: TaskAttemptId string : bogusid is not properly formed");
} }
@Test @Test
public void testTaskAttemptIdNonExist() throws JSONException, Exception { public void testTaskAttemptIdNonExist() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric( testTaskAttemptIdErrorGeneric(
"attempt_12345_0_0_r_1_0", "attempt_0_12345_m_000000_0",
"java.lang.Exception: Error getting info on task attempt id attempt_12345_0_0_r_1_0"); "java.lang.Exception: Error getting info on task attempt id attempt_0_12345_m_000000_0");
} }
@Test @Test
public void testTaskAttemptIdInvalid() throws JSONException, Exception { public void testTaskAttemptIdInvalid() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("attempt_12345_0_0_d_1_0", testTaskAttemptIdErrorGeneric("attempt_0_12345_d_000000_0",
"java.lang.Exception: Unknown task symbol: d"); "java.lang.Exception: Bad TaskType identifier. TaskAttemptId string : attempt_0_12345_d_000000_0 is not properly formed.");
} }
@Test @Test
public void testTaskAttemptIdInvalid2() throws JSONException, Exception { public void testTaskAttemptIdInvalid2() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("attempt_12345_0_r_1_0", testTaskAttemptIdErrorGeneric("attempt_12345_m_000000_0",
"java.lang.Exception: For input string: \"r\""); "java.lang.Exception: TaskAttemptId string : attempt_12345_m_000000_0 is not properly formed");
} }
@Test @Test
public void testTaskAttemptIdInvalid3() throws JSONException, Exception { public void testTaskAttemptIdInvalid3() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("attempt_12345_0_0_r_1", testTaskAttemptIdErrorGeneric("attempt_0_12345_m_000000",
"java.lang.Exception: Error parsing attempt ID: attempt_12345_0_0_r_1"); "java.lang.Exception: TaskAttemptId string : attempt_0_12345_m_000000 is not properly formed");
} }
private void testTaskAttemptIdErrorGeneric(String attid, String error) private void testTaskAttemptIdErrorGeneric(String attid, String error)

View File

@ -320,7 +320,7 @@ public class TestAMWebServicesJobs extends JerseyTest {
try { try {
r.path("ws").path("v1").path("mapreduce").path("jobs") r.path("ws").path("v1").path("mapreduce").path("jobs")
.path("job_1234_1_2").get(JSONObject.class); .path("job_0_1234").get(JSONObject.class);
fail("should have thrown exception on invalid uri"); fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) { } catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse(); ClientResponse response = ue.getResponse();
@ -333,7 +333,7 @@ public class TestAMWebServicesJobs extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: job, job_1234_1_2, is not found", message); "java.lang.Exception: job, job_0_1234, is not found", message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",
@ -351,7 +351,7 @@ public class TestAMWebServicesJobs extends JerseyTest {
fail("should have thrown exception on invalid uri"); fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) { } catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse(); ClientResponse response = ue.getResponse();
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus()); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject msg = response.getEntity(JSONObject.class); JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException"); JSONObject exception = msg.getJSONObject("RemoteException");
@ -374,7 +374,7 @@ public class TestAMWebServicesJobs extends JerseyTest {
fail("should have thrown exception on invalid uri"); fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) { } catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse(); ClientResponse response = ue.getResponse();
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus()); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject msg = response.getEntity(JSONObject.class); JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException"); JSONObject exception = msg.getJSONObject("RemoteException");
@ -397,7 +397,7 @@ public class TestAMWebServicesJobs extends JerseyTest {
fail("should have thrown exception on invalid uri"); fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) { } catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse(); ClientResponse response = ue.getResponse();
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus()); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String msg = response.getEntity(String.class); String msg = response.getEntity(String.class);
System.out.println(msg); System.out.println(msg);
@ -418,11 +418,12 @@ public class TestAMWebServicesJobs extends JerseyTest {
private void verifyJobIdInvalid(String message, String type, String classname) { private void verifyJobIdInvalid(String message, String type, String classname) {
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"For input string: \"foo\"", message); "java.lang.Exception: JobId string : job_foo is not properly formed",
message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NumberFormatException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",
"java.lang.NumberFormatException", classname); "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
} }
@Test @Test
@ -443,8 +444,11 @@ public class TestAMWebServicesJobs extends JerseyTest {
String message = exception.getString("message"); String message = exception.getString("message");
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils
"java.lang.Exception: Error parsing job ID: bogusfoo", message); .checkStringMatch(
"exception message",
"java.lang.Exception: JobId string : bogusfoo is not properly formed",
message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",

View File

@ -424,7 +424,8 @@ public class TestAMWebServicesTasks extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: Error parsing task ID: bogustaskid", message); "java.lang.Exception: TaskId string : "
+ "bogustaskid is not properly formed", message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",
@ -439,7 +440,7 @@ public class TestAMWebServicesTasks extends JerseyTest {
Map<JobId, Job> jobsMap = appContext.getAllJobs(); Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) { for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
String tid = "task_1234_0_0_m_0"; String tid = "task_0_0000_m_000000";
try { try {
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
.path("tasks").path(tid).get(JSONObject.class); .path("tasks").path(tid).get(JSONObject.class);
@ -455,7 +456,7 @@ public class TestAMWebServicesTasks extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: task not found with id task_1234_0_0_m_0", "java.lang.Exception: task not found with id task_0_0000_m_000000",
message); message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
@ -471,7 +472,7 @@ public class TestAMWebServicesTasks extends JerseyTest {
Map<JobId, Job> jobsMap = appContext.getAllJobs(); Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) { for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
String tid = "task_1234_0_0_d_0"; String tid = "task_0_0000_d_000000";
try { try {
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
.path("tasks").path(tid).get(JSONObject.class); .path("tasks").path(tid).get(JSONObject.class);
@ -487,7 +488,8 @@ public class TestAMWebServicesTasks extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: Unknown task symbol: d", message); "java.lang.Exception: Bad TaskType identifier. TaskId string : "
+ "task_0_0000_d_000000 is not properly formed.", message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",
@ -502,7 +504,7 @@ public class TestAMWebServicesTasks extends JerseyTest {
Map<JobId, Job> jobsMap = appContext.getAllJobs(); Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) { for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
String tid = "task_1234_0_m_0"; String tid = "task_0_m_000000";
try { try {
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
.path("tasks").path(tid).get(JSONObject.class); .path("tasks").path(tid).get(JSONObject.class);
@ -518,7 +520,8 @@ public class TestAMWebServicesTasks extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: For input string: \"m\"", message); "java.lang.Exception: TaskId string : "
+ "task_0_m_000000 is not properly formed", message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",
@ -533,7 +536,7 @@ public class TestAMWebServicesTasks extends JerseyTest {
Map<JobId, Job> jobsMap = appContext.getAllJobs(); Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) { for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
String tid = "task_1234_0_0_m"; String tid = "task_0_0000_m";
try { try {
r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId) r.path("ws").path("v1").path("mapreduce").path("jobs").path(jobId)
.path("tasks").path(tid).get(JSONObject.class); .path("tasks").path(tid).get(JSONObject.class);
@ -549,8 +552,8 @@ public class TestAMWebServicesTasks extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: Error parsing task ID: task_1234_0_0_m", "java.lang.Exception: TaskId string : "
message); + "task_0_0000_m is not properly formed", message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-mapreduce-client</artifactId> <artifactId>hadoop-mapreduce-client</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-common</artifactId> <artifactId>hadoop-mapreduce-client-common</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-mapreduce-client-common</name> <name>hadoop-mapreduce-client-common</name>
<properties> <properties>

View File

@ -506,11 +506,9 @@ public class JobHistoryUtils {
sb.append(address.getHostName()); sb.append(address.getHostName());
} }
sb.append(":").append(address.getPort()); sb.append(":").append(address.getPort());
sb.append("/jobhistory/job/"); // TODO This will change when the history server sb.append("/jobhistory/job/");
// understands apps. JobID jobId = TypeConverter.fromYarn(appId);
// TOOD Use JobId toString once UI stops using _id_id sb.append(jobId.toString());
sb.append("job_").append(appId.getClusterTimestamp());
sb.append("_").append(appId.getId()).append("_").append(appId.getId());
return sb.toString(); return sb.toString();
} }
} }

View File

@ -18,9 +18,6 @@
package org.apache.hadoop.mapreduce.v2.util; package org.apache.hadoop.mapreduce.v2.util;
import static org.apache.hadoop.yarn.util.StringHelper._join;
import static org.apache.hadoop.yarn.util.StringHelper._split;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -30,7 +27,6 @@ import java.net.URI;
import java.net.URL; import java.net.URL;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -39,7 +35,11 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.filecache.DistributedCache; import org.apache.hadoop.mapreduce.filecache.DistributedCache;
import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
@ -50,12 +50,10 @@ import org.apache.hadoop.yarn.ContainerLogAppender;
import org.apache.hadoop.yarn.YarnException; import org.apache.hadoop.yarn.YarnException;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.util.Apps; import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.util.BuilderUtils; import org.apache.hadoop.yarn.util.BuilderUtils;
@ -65,64 +63,28 @@ import org.apache.hadoop.yarn.util.BuilderUtils;
@Private @Private
@Unstable @Unstable
public class MRApps extends Apps { public class MRApps extends Apps {
public static final String JOB = "job";
public static final String TASK = "task";
public static final String ATTEMPT = "attempt";
public static String toString(JobId jid) { public static String toString(JobId jid) {
return _join(JOB, jid.getAppId().getClusterTimestamp(), jid.getAppId().getId(), jid.getId()); return jid.toString();
} }
public static JobId toJobID(String jid) { public static JobId toJobID(String jid) {
Iterator<String> it = _split(jid).iterator(); return TypeConverter.toYarn(JobID.forName(jid));
return toJobID(JOB, jid, it);
}
// mostly useful for parsing task/attempt id like strings
public static JobId toJobID(String prefix, String s, Iterator<String> it) {
ApplicationId appId = toAppID(prefix, s, it);
shouldHaveNext(prefix, s, it);
JobId jobId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class);
jobId.setAppId(appId);
jobId.setId(Integer.parseInt(it.next()));
return jobId;
} }
public static String toString(TaskId tid) { public static String toString(TaskId tid) {
return _join("task", tid.getJobId().getAppId().getClusterTimestamp(), tid.getJobId().getAppId().getId(), return tid.toString();
tid.getJobId().getId(), taskSymbol(tid.getTaskType()), tid.getId());
} }
public static TaskId toTaskID(String tid) { public static TaskId toTaskID(String tid) {
Iterator<String> it = _split(tid).iterator(); return TypeConverter.toYarn(TaskID.forName(tid));
return toTaskID(TASK, tid, it);
}
public static TaskId toTaskID(String prefix, String s, Iterator<String> it) {
JobId jid = toJobID(prefix, s, it);
shouldHaveNext(prefix, s, it);
TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class);
tid.setJobId(jid);
tid.setTaskType(taskType(it.next()));
shouldHaveNext(prefix, s, it);
tid.setId(Integer.parseInt(it.next()));
return tid;
} }
public static String toString(TaskAttemptId taid) { public static String toString(TaskAttemptId taid) {
return _join("attempt", taid.getTaskId().getJobId().getAppId().getClusterTimestamp(), return taid.toString();
taid.getTaskId().getJobId().getAppId().getId(), taid.getTaskId().getJobId().getId(),
taskSymbol(taid.getTaskId().getTaskType()), taid.getTaskId().getId(), taid.getId());
} }
public static TaskAttemptId toTaskAttemptID(String taid) { public static TaskAttemptId toTaskAttemptID(String taid) {
Iterator<String> it = _split(taid).iterator(); return TypeConverter.toYarn(TaskAttemptID.forName(taid));
TaskId tid = toTaskID(ATTEMPT, taid, it);
shouldHaveNext(ATTEMPT, taid, it);
TaskAttemptId taId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class);
taId.setTaskId(tid);
taId.setId(Integer.parseInt(it.next()));
return taId;
} }
public static String taskSymbol(TaskType type) { public static String taskSymbol(TaskType type) {

View File

@ -43,18 +43,18 @@ public class TestMRApps {
@Test public void testJobIDtoString() { @Test public void testJobIDtoString() {
JobId jid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class); JobId jid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class);
jid.setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class)); jid.setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
assertEquals("job_0_0_0", MRApps.toString(jid)); assertEquals("job_0_0000", MRApps.toString(jid));
} }
@Test public void testToJobID() { @Test public void testToJobID() {
JobId jid = MRApps.toJobID("job_1_1_1"); JobId jid = MRApps.toJobID("job_1_1");
assertEquals(1, jid.getAppId().getClusterTimestamp()); assertEquals(1, jid.getAppId().getClusterTimestamp());
assertEquals(1, jid.getAppId().getId()); assertEquals(1, jid.getAppId().getId());
assertEquals(1, jid.getId()); assertEquals(1, jid.getId()); // tests against some proto.id and not a job.id field
} }
@Test(expected=YarnException.class) public void testJobIDShort() { @Test(expected=IllegalArgumentException.class) public void testJobIDShort() {
MRApps.toJobID("job_0_0"); MRApps.toJobID("job_0_0_0");
} }
//TODO_get.set //TODO_get.set
@ -68,29 +68,29 @@ public class TestMRApps {
type = TaskType.REDUCE; type = TaskType.REDUCE;
System.err.println(type); System.err.println(type);
System.err.println(tid.getTaskType()); System.err.println(tid.getTaskType());
assertEquals("task_0_0_0_m_0", MRApps.toString(tid)); assertEquals("task_0_0000_m_000000", MRApps.toString(tid));
tid.setTaskType(TaskType.REDUCE); tid.setTaskType(TaskType.REDUCE);
assertEquals("task_0_0_0_r_0", MRApps.toString(tid)); assertEquals("task_0_0000_r_000000", MRApps.toString(tid));
} }
@Test public void testToTaskID() { @Test public void testToTaskID() {
TaskId tid = MRApps.toTaskID("task_1_2_3_r_4"); TaskId tid = MRApps.toTaskID("task_1_2_r_3");
assertEquals(1, tid.getJobId().getAppId().getClusterTimestamp()); assertEquals(1, tid.getJobId().getAppId().getClusterTimestamp());
assertEquals(2, tid.getJobId().getAppId().getId()); assertEquals(2, tid.getJobId().getAppId().getId());
assertEquals(3, tid.getJobId().getId()); assertEquals(2, tid.getJobId().getId());
assertEquals(TaskType.REDUCE, tid.getTaskType()); assertEquals(TaskType.REDUCE, tid.getTaskType());
assertEquals(4, tid.getId()); assertEquals(3, tid.getId());
tid = MRApps.toTaskID("task_1_2_3_m_4"); tid = MRApps.toTaskID("task_1_2_m_3");
assertEquals(TaskType.MAP, tid.getTaskType()); assertEquals(TaskType.MAP, tid.getTaskType());
} }
@Test(expected=YarnException.class) public void testTaskIDShort() { @Test(expected=IllegalArgumentException.class) public void testTaskIDShort() {
MRApps.toTaskID("task_0_0_0_m"); MRApps.toTaskID("task_0_0000_m");
} }
@Test(expected=YarnException.class) public void testTaskIDBadType() { @Test(expected=IllegalArgumentException.class) public void testTaskIDBadType() {
MRApps.toTaskID("task_0_0_0_x_0"); MRApps.toTaskID("task_0_0000_x_000000");
} }
//TODO_get.set //TODO_get.set
@ -100,19 +100,19 @@ public class TestMRApps {
taid.getTaskId().setTaskType(TaskType.MAP); taid.getTaskId().setTaskType(TaskType.MAP);
taid.getTaskId().setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class)); taid.getTaskId().setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
taid.getTaskId().getJobId().setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class)); taid.getTaskId().getJobId().setAppId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
assertEquals("attempt_0_0_0_m_0_0", MRApps.toString(taid)); assertEquals("attempt_0_0000_m_000000_0", MRApps.toString(taid));
} }
@Test public void testToTaskAttemptID() { @Test public void testToTaskAttemptID() {
TaskAttemptId taid = MRApps.toTaskAttemptID("attempt_0_1_2_m_3_4"); TaskAttemptId taid = MRApps.toTaskAttemptID("attempt_0_1_m_2_3");
assertEquals(0, taid.getTaskId().getJobId().getAppId().getClusterTimestamp()); assertEquals(0, taid.getTaskId().getJobId().getAppId().getClusterTimestamp());
assertEquals(1, taid.getTaskId().getJobId().getAppId().getId()); assertEquals(1, taid.getTaskId().getJobId().getAppId().getId());
assertEquals(2, taid.getTaskId().getJobId().getId()); assertEquals(1, taid.getTaskId().getJobId().getId());
assertEquals(3, taid.getTaskId().getId()); assertEquals(2, taid.getTaskId().getId());
assertEquals(4, taid.getId()); assertEquals(3, taid.getId());
} }
@Test(expected=YarnException.class) public void testTaskAttemptIDShort() { @Test(expected=IllegalArgumentException.class) public void testTaskAttemptIDShort() {
MRApps.toTaskAttemptID("attempt_0_0_0_m_0"); MRApps.toTaskAttemptID("attempt_0_0_0_m_0");
} }

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-mapreduce-client</artifactId> <artifactId>hadoop-mapreduce-client</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-core</artifactId> <artifactId>hadoop-mapreduce-client-core</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-mapreduce-client-core</name> <name>hadoop-mapreduce-client-core</name>
<properties> <properties>

View File

@ -159,6 +159,7 @@ public class TaskAttemptID extends org.apache.hadoop.mapred.ID {
) throws IllegalArgumentException { ) throws IllegalArgumentException {
if(str == null) if(str == null)
return null; return null;
String exceptionMsg = null;
try { try {
String[] parts = str.split(Character.toString(SEPARATOR)); String[] parts = str.split(Character.toString(SEPARATOR));
if(parts.length == 6) { if(parts.length == 6) {
@ -171,14 +172,19 @@ public class TaskAttemptID extends org.apache.hadoop.mapred.ID {
Integer.parseInt(parts[2]), Integer.parseInt(parts[2]),
t, Integer.parseInt(parts[4]), t, Integer.parseInt(parts[4]),
Integer.parseInt(parts[5])); Integer.parseInt(parts[5]));
} else throw new Exception(); } else
exceptionMsg = "Bad TaskType identifier. TaskAttemptId string : "
+ str + " is not properly formed.";
} }
} }
} catch (Exception ex) { } catch (Exception ex) {
//fall below //fall below
} }
throw new IllegalArgumentException("TaskAttemptId string : " + str if (exceptionMsg == null) {
+ " is not properly formed"); exceptionMsg = "TaskAttemptId string : " + str
+ " is not properly formed";
}
throw new IllegalArgumentException(exceptionMsg);
} }
} }

View File

@ -184,6 +184,7 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
throws IllegalArgumentException { throws IllegalArgumentException {
if(str == null) if(str == null)
return null; return null;
String exceptionMsg = null;
try { try {
String[] parts = str.split("_"); String[] parts = str.split("_");
if(parts.length == 5) { if(parts.length == 5) {
@ -196,13 +197,17 @@ public class TaskID extends org.apache.hadoop.mapred.ID {
Integer.parseInt(parts[2]), Integer.parseInt(parts[2]),
t, t,
Integer.parseInt(parts[4])); Integer.parseInt(parts[4]));
} else throw new Exception(); } else
exceptionMsg = "Bad TaskType identifier. TaskId string : " + str
+ " is not properly formed.";
} }
} }
}catch (Exception ex) {//fall below }catch (Exception ex) {//fall below
} }
throw new IllegalArgumentException("TaskId string : " + str if (exceptionMsg == null) {
+ " is not properly formed"); exceptionMsg = "TaskId string : " + str + " is not properly formed";
}
throw new IllegalArgumentException(exceptionMsg);
} }
/** /**
* Gets the character representing the {@link TaskType} * Gets the character representing the {@link TaskType}

View File

@ -326,12 +326,10 @@ public abstract class AbstractCounters<C extends Counter,
*/ */
public synchronized void incrAllCounters(AbstractCounters<C, G> other) { public synchronized void incrAllCounters(AbstractCounters<C, G> other) {
for(G right : other) { for(G right : other) {
G left = groups.get(right.getName()); String groupName = right.getName();
G left = (isFrameworkGroup(groupName) ? fgroups : groups).get(groupName);
if (left == null) { if (left == null) {
limits.checkGroups(groups.size() + 1); left = addGroup(groupName, right.getDisplayName());
left = groupFactory.newGroup(right.getName(), right.getDisplayName(),
limits);
groups.put(right.getName(), left);
} }
left.incrAllCounters(right); left.incrAllCounters(right);
} }

View File

@ -107,6 +107,8 @@ public abstract class CounterGroupFactory<C extends Counter,
if (gf != null) return gf.newGroup(name); if (gf != null) return gf.newGroup(name);
if (name.equals(FS_GROUP_NAME)) { if (name.equals(FS_GROUP_NAME)) {
return newFileSystemGroup(); return newFileSystemGroup();
} else if (s2i.get(name) != null) {
return newFrameworkGroup(s2i.get(name));
} }
return newGenericGroup(name, displayName, limits); return newGenericGroup(name, displayName, limits);
} }

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-mapreduce-client</artifactId> <artifactId>hadoop-mapreduce-client</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-hs</artifactId> <artifactId>hadoop-mapreduce-client-hs</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-mapreduce-client-hs</name> <name>hadoop-mapreduce-client-hs</name>
<properties> <properties>

View File

@ -408,36 +408,40 @@ public class TestHsWebServicesAttempts extends JerseyTest {
public void testTaskAttemptIdBogus() throws JSONException, Exception { public void testTaskAttemptIdBogus() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("bogusid", testTaskAttemptIdErrorGeneric("bogusid",
"java.lang.Exception: Error parsing attempt ID: bogusid"); "java.lang.Exception: TaskAttemptId string : "
+ "bogusid is not properly formed");
} }
@Test @Test
public void testTaskAttemptIdNonExist() throws JSONException, Exception { public void testTaskAttemptIdNonExist() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric( testTaskAttemptIdErrorGeneric(
"attempt_12345_0_0_r_1_0", "attempt_0_1234_m_000000_0",
"java.lang.Exception: Error getting info on task attempt id attempt_12345_0_0_r_1_0"); "java.lang.Exception: Error getting info on task attempt id attempt_0_1234_m_000000_0");
} }
@Test @Test
public void testTaskAttemptIdInvalid() throws JSONException, Exception { public void testTaskAttemptIdInvalid() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("attempt_12345_0_0_d_1_0", testTaskAttemptIdErrorGeneric("attempt_0_1234_d_000000_0",
"java.lang.Exception: Unknown task symbol: d"); "java.lang.Exception: Bad TaskType identifier. TaskAttemptId string : "
+ "attempt_0_1234_d_000000_0 is not properly formed.");
} }
@Test @Test
public void testTaskAttemptIdInvalid2() throws JSONException, Exception { public void testTaskAttemptIdInvalid2() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("attempt_12345_0_r_1_0", testTaskAttemptIdErrorGeneric("attempt_1234_m_000000_0",
"java.lang.Exception: For input string: \"r\""); "java.lang.Exception: TaskAttemptId string : "
+ "attempt_1234_m_000000_0 is not properly formed");
} }
@Test @Test
public void testTaskAttemptIdInvalid3() throws JSONException, Exception { public void testTaskAttemptIdInvalid3() throws JSONException, Exception {
testTaskAttemptIdErrorGeneric("attempt_12345_0_0_r_1", testTaskAttemptIdErrorGeneric("attempt_0_1234_m_000000",
"java.lang.Exception: Error parsing attempt ID: attempt_12345_0_0_r_1"); "java.lang.Exception: TaskAttemptId string : "
+ "attempt_0_1234_m_000000 is not properly formed");
} }
private void testTaskAttemptIdErrorGeneric(String attid, String error) private void testTaskAttemptIdErrorGeneric(String attid, String error)

View File

@ -367,7 +367,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
try { try {
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs") r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path("job_1234_1_2").get(JSONObject.class); .path("job_0_1234").get(JSONObject.class);
fail("should have thrown exception on invalid uri"); fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) { } catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse(); ClientResponse response = ue.getResponse();
@ -380,7 +380,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: job, job_1234_1_2, is not found", message); "java.lang.Exception: job, job_0_1234, is not found", message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",
@ -399,7 +399,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
fail("should have thrown exception on invalid uri"); fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) { } catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse(); ClientResponse response = ue.getResponse();
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus()); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject msg = response.getEntity(JSONObject.class); JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException"); JSONObject exception = msg.getJSONObject("RemoteException");
@ -423,7 +423,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
fail("should have thrown exception on invalid uri"); fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) { } catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse(); ClientResponse response = ue.getResponse();
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus()); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType()); assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject msg = response.getEntity(JSONObject.class); JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException"); JSONObject exception = msg.getJSONObject("RemoteException");
@ -447,7 +447,7 @@ public class TestHsWebServicesJobs extends JerseyTest {
fail("should have thrown exception on invalid uri"); fail("should have thrown exception on invalid uri");
} catch (UniformInterfaceException ue) { } catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse(); ClientResponse response = ue.getResponse();
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus()); assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType()); assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String msg = response.getEntity(String.class); String msg = response.getEntity(String.class);
System.out.println(msg); System.out.println(msg);
@ -468,11 +468,12 @@ public class TestHsWebServicesJobs extends JerseyTest {
private void verifyJobIdInvalid(String message, String type, String classname) { private void verifyJobIdInvalid(String message, String type, String classname) {
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"For input string: \"foo\"", message); "java.lang.Exception: JobId string : job_foo is not properly formed",
message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NumberFormatException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",
"java.lang.NumberFormatException", classname); "org.apache.hadoop.yarn.webapp.NotFoundException", classname);
} }
@Test @Test
@ -494,7 +495,8 @@ public class TestHsWebServicesJobs extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: Error parsing job ID: bogusfoo", message); "java.lang.Exception: JobId string : "
+ "bogusfoo is not properly formed", message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",

View File

@ -72,30 +72,26 @@ public class TestHsWebServicesJobsQuery extends JerseyTest {
private static HsWebApp webApp; private static HsWebApp webApp;
static class TestAppContext implements AppContext { static class TestAppContext implements AppContext {
final ApplicationAttemptId appAttemptID;
final ApplicationId appID;
final String user = MockJobs.newUserName(); final String user = MockJobs.newUserName();
final Map<JobId, Job> jobs; final Map<JobId, Job> jobs;
final long startTime = System.currentTimeMillis(); final long startTime = System.currentTimeMillis();
TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) { TestAppContext(int numJobs, int numTasks, int numAttempts) {
appID = MockJobs.newAppID(appid); jobs = MockJobs.newJobs(numJobs, numTasks, numAttempts);
appAttemptID = MockJobs.newAppAttemptID(appID, 0);
jobs = MockJobs.newJobs(appID, numJobs, numTasks, numAttempts);
} }
TestAppContext() { TestAppContext() {
this(0, 3, 2, 1); this(3, 2, 1);
} }
@Override @Override
public ApplicationAttemptId getApplicationAttemptId() { public ApplicationAttemptId getApplicationAttemptId() {
return appAttemptID; return null;
} }
@Override @Override
public ApplicationId getApplicationID() { public ApplicationId getApplicationID() {
return appID; return null;
} }
@Override @Override

View File

@ -435,7 +435,8 @@ public class TestHsWebServicesTasks extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: Error parsing task ID: bogustaskid", message); "java.lang.Exception: TaskId string : "
+ "bogustaskid is not properly formed", message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",
@ -450,7 +451,7 @@ public class TestHsWebServicesTasks extends JerseyTest {
Map<JobId, Job> jobsMap = appContext.getAllJobs(); Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) { for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
String tid = "task_1234_0_0_m_0"; String tid = "task_0_0000_m_000000";
try { try {
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs") r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).get(JSONObject.class); .path(jobId).path("tasks").path(tid).get(JSONObject.class);
@ -466,7 +467,7 @@ public class TestHsWebServicesTasks extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: task not found with id task_1234_0_0_m_0", "java.lang.Exception: task not found with id task_0_0000_m_000000",
message); message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
@ -482,7 +483,7 @@ public class TestHsWebServicesTasks extends JerseyTest {
Map<JobId, Job> jobsMap = appContext.getAllJobs(); Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) { for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
String tid = "task_1234_0_0_d_0"; String tid = "task_0_0000_d_000000";
try { try {
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs") r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).get(JSONObject.class); .path(jobId).path("tasks").path(tid).get(JSONObject.class);
@ -498,7 +499,8 @@ public class TestHsWebServicesTasks extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: Unknown task symbol: d", message); "java.lang.Exception: Bad TaskType identifier. TaskId string : "
+ "task_0_0000_d_000000 is not properly formed.", message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",
@ -513,7 +515,7 @@ public class TestHsWebServicesTasks extends JerseyTest {
Map<JobId, Job> jobsMap = appContext.getAllJobs(); Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) { for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
String tid = "task_1234_0_m_0"; String tid = "task_0000_m_000000";
try { try {
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs") r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).get(JSONObject.class); .path(jobId).path("tasks").path(tid).get(JSONObject.class);
@ -529,7 +531,8 @@ public class TestHsWebServicesTasks extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: For input string: \"m\"", message); "java.lang.Exception: TaskId string : "
+ "task_0000_m_000000 is not properly formed", message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",
@ -544,7 +547,7 @@ public class TestHsWebServicesTasks extends JerseyTest {
Map<JobId, Job> jobsMap = appContext.getAllJobs(); Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) { for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id); String jobId = MRApps.toString(id);
String tid = "task_1234_0_0_m"; String tid = "task_0_0000_m";
try { try {
r.path("ws").path("v1").path("history").path("mapreduce").path("jobs") r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).get(JSONObject.class); .path(jobId).path("tasks").path(tid).get(JSONObject.class);
@ -560,8 +563,8 @@ public class TestHsWebServicesTasks extends JerseyTest {
String type = exception.getString("exception"); String type = exception.getString("exception");
String classname = exception.getString("javaClassName"); String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message", WebServicesTestUtils.checkStringMatch("exception message",
"java.lang.Exception: Error parsing task ID: task_1234_0_0_m", "java.lang.Exception: TaskId string : "
message); + "task_0_0000_m is not properly formed", message);
WebServicesTestUtils.checkStringMatch("exception type", WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type); "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname", WebServicesTestUtils.checkStringMatch("exception classname",

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-mapreduce-client</artifactId> <artifactId>hadoop-mapreduce-client</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-jobclient</artifactId> <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-mapreduce-client-jobclient</name> <name>hadoop-mapreduce-client-jobclient</name>
<properties> <properties>

View File

@ -70,6 +70,29 @@ public class TestCounters {
testMaxGroups(new Counters()); testMaxGroups(new Counters());
} }
} }
@Test
public void testCountersIncrement() {
Counters fCounters = new Counters();
Counter fCounter = fCounters.findCounter(FRAMEWORK_COUNTER);
fCounter.setValue(100);
Counter gCounter = fCounters.findCounter("test", "foo");
gCounter.setValue(200);
Counters counters = new Counters();
counters.incrAllCounters(fCounters);
Counter counter;
for (CounterGroup cg : fCounters) {
CounterGroup group = counters.getGroup(cg.getName());
if (group.getName().equals("test")) {
counter = counters.findCounter("test", "foo");
assertEquals(200, counter.getValue());
} else {
counter = counters.findCounter(FRAMEWORK_COUNTER);
assertEquals(100, counter.getValue());
}
}
}
static final Enum<?> FRAMEWORK_COUNTER = TaskCounter.CPU_MILLISECONDS; static final Enum<?> FRAMEWORK_COUNTER = TaskCounter.CPU_MILLISECONDS;
static final long FRAMEWORK_COUNTER_VALUE = 8; static final long FRAMEWORK_COUNTER_VALUE = 8;

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-mapreduce-client</artifactId> <artifactId>hadoop-mapreduce-client</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client-shuffle</artifactId> <artifactId>hadoop-mapreduce-client-shuffle</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-mapreduce-client-shuffle</name> <name>hadoop-mapreduce-client-shuffle</name>
<properties> <properties>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-client</artifactId> <artifactId>hadoop-mapreduce-client</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-mapreduce-client</name> <name>hadoop-mapreduce-client</name>
<packaging>pom</packaging> <packaging>pom</packaging>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce-examples</artifactId> <artifactId>hadoop-mapreduce-examples</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop MapReduce Examples</description> <description>Apache Hadoop MapReduce Examples</description>
<name>Apache Hadoop MapReduce Examples</name> <name>Apache Hadoop MapReduce Examples</name>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-yarn</artifactId> <artifactId>hadoop-yarn</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-api</artifactId> <artifactId>hadoop-yarn-api</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-yarn-api</name> <name>hadoop-yarn-api</name>
<properties> <properties>

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-yarn-applications</artifactId> <artifactId>hadoop-yarn-applications</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-applications-distributedshell</artifactId> <artifactId>hadoop-yarn-applications-distributedshell</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-yarn-applications-distributedshell</name> <name>hadoop-yarn-applications-distributedshell</name>
<properties> <properties>

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-yarn</artifactId> <artifactId>hadoop-yarn</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-applications</artifactId> <artifactId>hadoop-yarn-applications</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-yarn-applications</name> <name>hadoop-yarn-applications</name>
<packaging>pom</packaging> <packaging>pom</packaging>

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-yarn</artifactId> <artifactId>hadoop-yarn</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-common</artifactId> <artifactId>hadoop-yarn-common</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-yarn-common</name> <name>hadoop-yarn-common</name>
<properties> <properties>

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-yarn-server</artifactId> <artifactId>hadoop-yarn-server</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server-common</artifactId> <artifactId>hadoop-yarn-server-common</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-yarn-server-common</name> <name>hadoop-yarn-server-common</name>
<properties> <properties>

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-yarn-server</artifactId> <artifactId>hadoop-yarn-server</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server-nodemanager</artifactId> <artifactId>hadoop-yarn-server-nodemanager</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-yarn-server-nodemanager</name> <name>hadoop-yarn-server-nodemanager</name>
<properties> <properties>

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-yarn-server</artifactId> <artifactId>hadoop-yarn-server</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server-resourcemanager</artifactId> <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-yarn-server-resourcemanager</name> <name>hadoop-yarn-server-resourcemanager</name>
<properties> <properties>

View File

@ -572,12 +572,7 @@ public class LeafQueue implements CSQueue {
// Careful! Locking order is important! // Careful! Locking order is important!
// Check queue ACLs // Check queue ACLs
UserGroupInformation userUgi; UserGroupInformation userUgi = UserGroupInformation.createRemoteUser(userName);
try {
userUgi = UserGroupInformation.getCurrentUser();
} catch (IOException ioe) {
throw new AccessControlException(ioe);
}
if (!hasAccess(QueueACL.SUBMIT_APPLICATIONS, userUgi)) { if (!hasAccess(QueueACL.SUBMIT_APPLICATIONS, userUgi)) {
throw new AccessControlException("User " + userName + " cannot submit" + throw new AccessControlException("User " + userName + " cannot submit" +
" applications to queue " + getQueuePath()); " applications to queue " + getQueuePath());

View File

@ -119,10 +119,11 @@ public class TestLeafQueue {
private static final String B = "b"; private static final String B = "b";
private static final String C = "c"; private static final String C = "c";
private static final String C1 = "c1"; private static final String C1 = "c1";
private static final String D = "d";
private void setupQueueConfiguration(CapacitySchedulerConfiguration conf) { private void setupQueueConfiguration(CapacitySchedulerConfiguration conf) {
// Define top-level queues // Define top-level queues
conf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {A, B, C}); conf.setQueues(CapacitySchedulerConfiguration.ROOT, new String[] {A, B, C, D});
conf.setCapacity(CapacitySchedulerConfiguration.ROOT, 100); conf.setCapacity(CapacitySchedulerConfiguration.ROOT, 100);
conf.setMaximumCapacity(CapacitySchedulerConfiguration.ROOT, 100); conf.setMaximumCapacity(CapacitySchedulerConfiguration.ROOT, 100);
conf.setAcl(CapacitySchedulerConfiguration.ROOT, QueueACL.SUBMIT_APPLICATIONS, " "); conf.setAcl(CapacitySchedulerConfiguration.ROOT, QueueACL.SUBMIT_APPLICATIONS, " ");
@ -133,7 +134,7 @@ public class TestLeafQueue {
conf.setAcl(Q_A, QueueACL.SUBMIT_APPLICATIONS, "*"); conf.setAcl(Q_A, QueueACL.SUBMIT_APPLICATIONS, "*");
final String Q_B = CapacitySchedulerConfiguration.ROOT + "." + B; final String Q_B = CapacitySchedulerConfiguration.ROOT + "." + B;
conf.setCapacity(Q_B, 90); conf.setCapacity(Q_B, 80);
conf.setMaximumCapacity(Q_B, 99); conf.setMaximumCapacity(Q_B, 99);
conf.setAcl(Q_B, QueueACL.SUBMIT_APPLICATIONS, "*"); conf.setAcl(Q_B, QueueACL.SUBMIT_APPLICATIONS, "*");
@ -146,6 +147,11 @@ public class TestLeafQueue {
final String Q_C1 = Q_C + "." + C1; final String Q_C1 = Q_C + "." + C1;
conf.setCapacity(Q_C1, 100); conf.setCapacity(Q_C1, 100);
final String Q_D = CapacitySchedulerConfiguration.ROOT + "." + D;
conf.setCapacity(Q_D, 10);
conf.setMaximumCapacity(Q_D, 11);
conf.setAcl(Q_D, QueueACL.SUBMIT_APPLICATIONS, "user_d");
} }
@ -202,8 +208,8 @@ public class TestLeafQueue {
assertEquals(0.2, a.getAbsoluteMaximumCapacity(), epsilon); assertEquals(0.2, a.getAbsoluteMaximumCapacity(), epsilon);
LeafQueue b = stubLeafQueue((LeafQueue)queues.get(B)); LeafQueue b = stubLeafQueue((LeafQueue)queues.get(B));
assertEquals(0.9, b.getCapacity(), epsilon); assertEquals(0.80, b.getCapacity(), epsilon);
assertEquals(0.9, b.getAbsoluteCapacity(), epsilon); assertEquals(0.80, b.getAbsoluteCapacity(), epsilon);
assertEquals(0.99, b.getMaximumCapacity(), epsilon); assertEquals(0.99, b.getMaximumCapacity(), epsilon);
assertEquals(0.99, b.getAbsoluteMaximumCapacity(), epsilon); assertEquals(0.99, b.getAbsoluteMaximumCapacity(), epsilon);
@ -257,9 +263,34 @@ public class TestLeafQueue {
// Only 1 container // Only 1 container
a.assignContainers(clusterResource, node_0); a.assignContainers(clusterResource, node_0);
assertEquals(7*GB, a.getMetrics().getAvailableMB()); assertEquals(6*GB, a.getMetrics().getAvailableMB());
} }
@Test
public void testUserQueueAcl() throws Exception {
// Manipulate queue 'a'
LeafQueue d = stubLeafQueue((LeafQueue) queues.get(D));
// Users
final String user_d = "user_d";
// Submit applications
final ApplicationAttemptId appAttemptId_0 = TestUtils
.getMockApplicationAttemptId(0, 1);
SchedulerApp app_0 = new SchedulerApp(appAttemptId_0, user_d, d, null,
rmContext, null);
d.submitApplication(app_0, user_d, D);
// Attempt the same application again
final ApplicationAttemptId appAttemptId_1 = TestUtils
.getMockApplicationAttemptId(0, 2);
SchedulerApp app_1 = new SchedulerApp(appAttemptId_1, user_d, d, null,
rmContext, null);
d.submitApplication(app_1, user_d, D); // same user
}
@Test @Test
public void testAppAttemptMetrics() throws Exception { public void testAppAttemptMetrics() throws Exception {

View File

@ -16,11 +16,11 @@
<parent> <parent>
<artifactId>hadoop-yarn-server</artifactId> <artifactId>hadoop-yarn-server</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server-tests</artifactId> <artifactId>hadoop-yarn-server-tests</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-yarn-server-tests</name> <name>hadoop-yarn-server-tests</name>
<properties> <properties>

View File

@ -16,7 +16,7 @@
<parent> <parent>
<artifactId>hadoop-yarn-server</artifactId> <artifactId>hadoop-yarn-server</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-yarn</artifactId> <artifactId>hadoop-yarn</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-server</artifactId> <artifactId>hadoop-yarn-server</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-yarn-server</name> <name>hadoop-yarn-server</name>
<packaging>pom</packaging> <packaging>pom</packaging>

View File

@ -16,12 +16,12 @@
<parent> <parent>
<artifactId>hadoop-yarn</artifactId> <artifactId>hadoop-yarn</artifactId>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn-site</artifactId> <artifactId>hadoop-yarn-site</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<name>hadoop-yarn-site</name> <name>hadoop-yarn-site</name>
<properties> <properties>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-yarn</artifactId> <artifactId>hadoop-yarn</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>hadoop-yarn</name> <name>hadoop-yarn</name>

View File

@ -18,12 +18,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath> <relativePath>../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-mapreduce</artifactId> <artifactId>hadoop-mapreduce</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>hadoop-mapreduce</name> <name>hadoop-mapreduce</name>
<url>http://hadoop.apache.org/mapreduce/</url> <url>http://hadoop.apache.org/mapreduce/</url>

View File

@ -18,12 +18,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath> <relativePath>../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-minicluster</artifactId> <artifactId>hadoop-minicluster</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<packaging>jar</packaging> <packaging>jar</packaging>
<description>Apache Hadoop Mini-Cluster</description> <description>Apache Hadoop Mini-Cluster</description>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath> <relativePath>../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-dist</artifactId> <artifactId>hadoop-project-dist</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Project Dist POM</description> <description>Apache Hadoop Project Dist POM</description>
<name>Apache Hadoop Project Dist POM</name> <name>Apache Hadoop Project Dist POM</name>
<packaging>pom</packaging> <packaging>pom</packaging>

View File

@ -17,11 +17,11 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-main</artifactId> <artifactId>hadoop-main</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Project POM</description> <description>Apache Hadoop Project POM</description>
<name>Apache Hadoop Project POM</name> <name>Apache Hadoop Project POM</name>
<packaging>pom</packaging> <packaging>pom</packaging>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-archives</artifactId> <artifactId>hadoop-archives</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Archives</description> <description>Apache Hadoop Archives</description>
<name>Apache Hadoop Archives</name> <name>Apache Hadoop Archives</name>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-distcp</artifactId> <artifactId>hadoop-distcp</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Distributed Copy</description> <description>Apache Hadoop Distributed Copy</description>
<name>Apache Hadoop Distributed Copy</name> <name>Apache Hadoop Distributed Copy</name>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-extras</artifactId> <artifactId>hadoop-extras</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Extras</description> <description>Apache Hadoop Extras</description>
<name>Apache Hadoop Extras</name> <name>Apache Hadoop Extras</name>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-rumen</artifactId> <artifactId>hadoop-rumen</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Rumen</description> <description>Apache Hadoop Rumen</description>
<name>Apache Hadoop Rumen</name> <name>Apache Hadoop Rumen</name>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project</relativePath> <relativePath>../../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-streaming</artifactId> <artifactId>hadoop-streaming</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop MapReduce Streaming</description> <description>Apache Hadoop MapReduce Streaming</description>
<name>Apache Hadoop MapReduce Streaming</name> <name>Apache Hadoop MapReduce Streaming</name>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project-dist</artifactId> <artifactId>hadoop-project-dist</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../../hadoop-project-dist</relativePath> <relativePath>../../hadoop-project-dist</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-tools-dist</artifactId> <artifactId>hadoop-tools-dist</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Tools Dist</description> <description>Apache Hadoop Tools Dist</description>
<name>Apache Hadoop Tools Dist</name> <name>Apache Hadoop Tools Dist</name>
<packaging>jar</packaging> <packaging>jar</packaging>

View File

@ -17,12 +17,12 @@
<parent> <parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-project</artifactId> <artifactId>hadoop-project</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<relativePath>../hadoop-project</relativePath> <relativePath>../hadoop-project</relativePath>
</parent> </parent>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-tools</artifactId> <artifactId>hadoop-tools</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Tools</description> <description>Apache Hadoop Tools</description>
<name>Apache Hadoop Tools</name> <name>Apache Hadoop Tools</name>
<packaging>pom</packaging> <packaging>pom</packaging>

View File

@ -16,7 +16,7 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>org.apache.hadoop</groupId> <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-main</artifactId> <artifactId>hadoop-main</artifactId>
<version>0.23.2-SNAPSHOT</version> <version>0.23.3-SNAPSHOT</version>
<description>Apache Hadoop Main</description> <description>Apache Hadoop Main</description>
<name>Apache Hadoop Main</name> <name>Apache Hadoop Main</name>
<packaging>pom</packaging> <packaging>pom</packaging>