MAPREDUCE-3433. Finding counters by legacy group name returns empty counters.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1207755 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
784a4a3084
commit
be9c46a447
@ -189,6 +189,9 @@ Release 0.23.1 - Unreleased
|
||||
MAPREDUCE-3265. Removed debug logs during job submission to LOG.debug to
|
||||
cut down noise. (acmurthy)
|
||||
|
||||
MAPREDUCE-3433. Finding counters by legacy group name returns empty
|
||||
counters. (tomwhite)
|
||||
|
||||
Release 0.23.0 - 2011-11-01
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
@ -194,15 +194,16 @@ public Iterator<G> iterator() {
|
||||
* @return the group
|
||||
*/
|
||||
public synchronized G getGroup(String groupName) {
|
||||
boolean isFGroup = isFrameworkGroup(groupName);
|
||||
G group = isFGroup ? fgroups.get(groupName) : groups.get(groupName);
|
||||
String newGroupName = filterGroupName(groupName);
|
||||
boolean isFGroup = isFrameworkGroup(newGroupName);
|
||||
G group = isFGroup ? fgroups.get(newGroupName) : groups.get(newGroupName);
|
||||
if (group == null) {
|
||||
group = groupFactory.newGroup(filterGroupName(groupName), limits);
|
||||
group = groupFactory.newGroup(newGroupName, limits);
|
||||
if (isFGroup) {
|
||||
fgroups.put(groupName, group);
|
||||
fgroups.put(newGroupName, group);
|
||||
} else {
|
||||
limits.checkGroups(groups.size() + 1);
|
||||
groups.put(groupName, group);
|
||||
groups.put(newGroupName, group);
|
||||
}
|
||||
}
|
||||
return group;
|
||||
|
@ -17,16 +17,19 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.ParseException;
|
||||
|
||||
import org.apache.hadoop.mapreduce.JobCounter;
|
||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* TestCounters checks the sanity and recoverability of {@code Counters}
|
||||
*/
|
||||
public class TestCounters extends TestCase {
|
||||
public class TestCounters {
|
||||
enum myCounters {TEST1, TEST2};
|
||||
private static final long MAX_VALUE = 10;
|
||||
|
||||
@ -69,6 +72,7 @@ private void testCounter(Counters counter) throws ParseException {
|
||||
counter.hashCode(), recoveredCounter.hashCode());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCounters() throws IOException {
|
||||
Enum[] keysWithResource = {TaskCounter.MAP_INPUT_RECORDS,
|
||||
TaskCounter.MAP_OUTPUT_BYTES};
|
||||
@ -92,6 +96,26 @@ public void testCounters() throws IOException {
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@Test
|
||||
public void testLegacyNames() {
|
||||
Counters counters = new Counters();
|
||||
counters.incrCounter(TaskCounter.MAP_INPUT_RECORDS, 1);
|
||||
counters.incrCounter(JobCounter.DATA_LOCAL_MAPS, 1);
|
||||
|
||||
assertEquals("New name", 1, counters.findCounter(
|
||||
TaskCounter.class.getName(), "MAP_INPUT_RECORDS").getValue());
|
||||
assertEquals("Legacy name", 1, counters.findCounter(
|
||||
"org.apache.hadoop.mapred.Task$Counter",
|
||||
"MAP_INPUT_RECORDS").getValue());
|
||||
|
||||
assertEquals("New name", 1, counters.findCounter(
|
||||
JobCounter.class.getName(), "DATA_LOCAL_MAPS").getValue());
|
||||
assertEquals("Legacy name", 1, counters.findCounter(
|
||||
"org.apache.hadoop.mapred.JobInProgress$Counter",
|
||||
"DATA_LOCAL_MAPS").getValue());
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws IOException {
|
||||
new TestCounters().testCounters();
|
||||
}
|
@ -29,7 +29,7 @@
|
||||
|
||||
<properties>
|
||||
<hadoop.log.dir>${project.build.directory}/log</hadoop.log.dir>
|
||||
<test.exclude.pattern>%regex[.*(TestStreamingBadRecords|TestStreamingCombiner|TestStreamingStatus|TestUlimit).*]</test.exclude.pattern>
|
||||
<test.exclude.pattern>%regex[.*(TestStreamingBadRecords|TestStreamingStatus|TestUlimit).*]</test.exclude.pattern>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
|
Loading…
Reference in New Issue
Block a user