Merge -r 1207754:1207755 from trunk to branch-0.23. Fixes: MAPREDUCE-3433

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1207756 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas White 2011-11-29 05:12:07 +00:00
parent 180b0177bb
commit c4b13d74e5
4 changed files with 36 additions and 8 deletions

View File

@ -143,6 +143,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3468. Changed ant based infrastructure to use 0.23.1 version. MAPREDUCE-3468. Changed ant based infrastructure to use 0.23.1 version.
(sseth via acmurthy) (sseth via acmurthy)
MAPREDUCE-3433. Finding counters by legacy group name returns empty
counters. (tomwhite)
Release 0.23.0 - 2011-11-01 Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -194,15 +194,16 @@ public abstract class AbstractCounters<C extends Counter,
* @return the group * @return the group
*/ */
public synchronized G getGroup(String groupName) { public synchronized G getGroup(String groupName) {
boolean isFGroup = isFrameworkGroup(groupName); String newGroupName = filterGroupName(groupName);
G group = isFGroup ? fgroups.get(groupName) : groups.get(groupName); boolean isFGroup = isFrameworkGroup(newGroupName);
G group = isFGroup ? fgroups.get(newGroupName) : groups.get(newGroupName);
if (group == null) { if (group == null) {
group = groupFactory.newGroup(filterGroupName(groupName), limits); group = groupFactory.newGroup(newGroupName, limits);
if (isFGroup) { if (isFGroup) {
fgroups.put(groupName, group); fgroups.put(newGroupName, group);
} else { } else {
limits.checkGroups(groups.size() + 1); limits.checkGroups(groups.size() + 1);
groups.put(groupName, group); groups.put(newGroupName, group);
} }
} }
return group; return group;

View File

@ -17,16 +17,19 @@
*/ */
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import junit.framework.TestCase; import static org.junit.Assert.assertEquals;
import java.io.IOException; import java.io.IOException;
import java.text.ParseException; import java.text.ParseException;
import org.apache.hadoop.mapreduce.JobCounter;
import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.mapreduce.TaskCounter;
import org.junit.Test;
/** /**
* TestCounters checks the sanity and recoverability of {@code Counters} * TestCounters checks the sanity and recoverability of {@code Counters}
*/ */
public class TestCounters extends TestCase { public class TestCounters {
enum myCounters {TEST1, TEST2}; enum myCounters {TEST1, TEST2};
private static final long MAX_VALUE = 10; private static final long MAX_VALUE = 10;
@ -69,6 +72,7 @@ public class TestCounters extends TestCase {
counter.hashCode(), recoveredCounter.hashCode()); counter.hashCode(), recoveredCounter.hashCode());
} }
@Test
public void testCounters() throws IOException { public void testCounters() throws IOException {
Enum[] keysWithResource = {TaskCounter.MAP_INPUT_RECORDS, Enum[] keysWithResource = {TaskCounter.MAP_INPUT_RECORDS,
TaskCounter.MAP_OUTPUT_BYTES}; TaskCounter.MAP_OUTPUT_BYTES};
@ -92,6 +96,26 @@ public class TestCounters extends TestCase {
} }
} }
@SuppressWarnings("deprecation")
@Test
public void testLegacyNames() {
Counters counters = new Counters();
counters.incrCounter(TaskCounter.MAP_INPUT_RECORDS, 1);
counters.incrCounter(JobCounter.DATA_LOCAL_MAPS, 1);
assertEquals("New name", 1, counters.findCounter(
TaskCounter.class.getName(), "MAP_INPUT_RECORDS").getValue());
assertEquals("Legacy name", 1, counters.findCounter(
"org.apache.hadoop.mapred.Task$Counter",
"MAP_INPUT_RECORDS").getValue());
assertEquals("New name", 1, counters.findCounter(
JobCounter.class.getName(), "DATA_LOCAL_MAPS").getValue());
assertEquals("Legacy name", 1, counters.findCounter(
"org.apache.hadoop.mapred.JobInProgress$Counter",
"DATA_LOCAL_MAPS").getValue());
}
public static void main(String[] args) throws IOException { public static void main(String[] args) throws IOException {
new TestCounters().testCounters(); new TestCounters().testCounters();
} }

View File

@ -29,7 +29,7 @@
<properties> <properties>
<hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir> <hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir>
<test.exclude.pattern>%regex[.*(TestStreamingBadRecords|TestStreamingCombiner|TestStreamingStatus|TestUlimit).*]</test.exclude.pattern> <test.exclude.pattern>%regex[.*(TestStreamingBadRecords|TestStreamingStatus|TestUlimit).*]</test.exclude.pattern>
</properties> </properties>
<dependencies> <dependencies>