svn merge -c 1401467/1401473 FIXES: MAPREDUCE-4229. Intern counter names in the JT (bobby via daryn)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1401483 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Daryn Sharp 2012-10-23 21:10:17 +00:00
parent 243966374b
commit 7b36e659c1
6 changed files with 190 additions and 29 deletions

View File

@ -0,0 +1,78 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import com.google.common.collect.Interner;
import com.google.common.collect.Interners;
/**
* Provides equivalent behavior to String.intern() to optimize performance,
* whereby does not consume memory in the permanent generation.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class StringInterner {
/**
* Retains a strong reference to each string instance it has interned.
*/
private final static Interner<String> strongInterner;
/**
* Retains a weak reference to each string instance it has interned.
*/
private final static Interner<String> weakInterner;
static {
strongInterner = Interners.newStrongInterner();
weakInterner = Interners.newWeakInterner();
}
/**
* Interns and returns a reference to the representative instance
* for any of a collection of string instances that are equal to each other.
* Retains strong reference to the instance,
* thus preventing it from being garbage-collected.
*
* @param sample string instance to be interned
* @return strong reference to interned string instance
*/
public static String strongIntern(String sample) {
return strongInterner.intern(sample);
}
/**
* Interns and returns a reference to the representative instance
* for any of a collection of string instances that are equal to each other.
* Retains weak reference to the instance,
* and so does not prevent it from being garbage-collected.
*
* @param sample string instance to be interned
* @return weak reference to interned string instance
*/
public static String weakIntern(String sample) {
return weakInterner.intern(sample);
}
}

View File

@ -0,0 +1,76 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import static org.junit.Assert.*;
import static org.apache.hadoop.util.StringInterner.*;
import org.junit.Test;
/**
*
* Tests string interning {@link StringInterner}
*/
public class TestStringInterner {
/**
* Test different references are returned for any of string
* instances that are equal to each other but not interned.
*/
@Test
public void testNoIntern() {
String literalABC = "ABC";
String substringABC = "ABCDE".substring(0,3);
String heapABC = new String("ABC");
assertNotSame(literalABC, substringABC);
assertNotSame(literalABC, heapABC);
assertNotSame(substringABC, heapABC);
}
/**
* Test the same strong reference is returned for any
* of string instances that are equal to each other.
*/
@Test
public void testStrongIntern() {
String strongInternLiteralABC = strongIntern("ABC");
String strongInternSubstringABC = strongIntern("ABCDE".substring(0,3));
String strongInternHeapABC = strongIntern(new String("ABC"));
assertSame(strongInternLiteralABC, strongInternSubstringABC);
assertSame(strongInternLiteralABC, strongInternHeapABC);
assertSame(strongInternSubstringABC, strongInternHeapABC);
}
/**
* Test the same weak reference is returned for any
* of string instances that are equal to each other.
*/
@Test
public void testWeakIntern() {
String weakInternLiteralABC = weakIntern("ABC");
String weakInternSubstringABC = weakIntern("ABCDE".substring(0,3));
String weakInternHeapABC = weakIntern(new String("ABC"));
assertSame(weakInternLiteralABC, weakInternSubstringABC);
assertSame(weakInternLiteralABC, weakInternHeapABC);
assertSame(weakInternSubstringABC, weakInternHeapABC);
}
}

View File

@ -458,6 +458,8 @@ Release 0.23.5 - UNRELEASED
MAPREDUCE-4740. only .jars can be added to the Distributed Cache
classpath. (Robert Joseph Evans via jlowe)
MAPREDUCE-4229. Intern counter names in the JT (bobby via daryn)
Release 0.23.4 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.CounterGroup;
import org.apache.hadoop.mapreduce.Counters;
import org.apache.hadoop.util.StringInterner;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
@ -170,9 +171,11 @@ public class EventReader implements Closeable {
Counters result = new Counters();
for (JhCounterGroup g : counters.groups) {
CounterGroup group =
result.addGroup(g.name.toString(), g.displayName.toString());
result.addGroup(StringInterner.weakIntern(g.name.toString()),
StringInterner.weakIntern(g.displayName.toString()));
for (JhCounter c : g.counts) {
group.addCounter(c.name.toString(), c.displayName.toString(), c.value);
group.addCounter(StringInterner.weakIntern(c.name.toString()),
StringInterner.weakIntern(c.displayName.toString()), c.value);
}
}
return result;

View File

@ -42,6 +42,7 @@ import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapred.TaskStatus;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.StringInterner;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ContainerId;
@ -226,10 +227,10 @@ public class JobHistoryParser {
TaskAttemptInfo attemptInfo =
taskInfo.attemptsMap.get(event.getAttemptId());
attemptInfo.finishTime = event.getFinishTime();
attemptInfo.status = event.getTaskStatus();
attemptInfo.state = event.getState();
attemptInfo.status = StringInterner.weakIntern(event.getTaskStatus());
attemptInfo.state = StringInterner.weakIntern(event.getState());
attemptInfo.counters = event.getCounters();
attemptInfo.hostname = event.getHostname();
attemptInfo.hostname = StringInterner.weakIntern(event.getHostname());
}
private void handleReduceAttemptFinishedEvent
@ -238,14 +239,14 @@ public class JobHistoryParser {
TaskAttemptInfo attemptInfo =
taskInfo.attemptsMap.get(event.getAttemptId());
attemptInfo.finishTime = event.getFinishTime();
attemptInfo.status = event.getTaskStatus();
attemptInfo.state = event.getState();
attemptInfo.status = StringInterner.weakIntern(event.getTaskStatus());
attemptInfo.state = StringInterner.weakIntern(event.getState());
attemptInfo.shuffleFinishTime = event.getShuffleFinishTime();
attemptInfo.sortFinishTime = event.getSortFinishTime();
attemptInfo.counters = event.getCounters();
attemptInfo.hostname = event.getHostname();
attemptInfo.hostname = StringInterner.weakIntern(event.getHostname());
attemptInfo.port = event.getPort();
attemptInfo.rackname = event.getRackName();
attemptInfo.rackname = StringInterner.weakIntern(event.getRackName());
}
private void handleMapAttemptFinishedEvent(MapAttemptFinishedEvent event) {
@ -253,13 +254,13 @@ public class JobHistoryParser {
TaskAttemptInfo attemptInfo =
taskInfo.attemptsMap.get(event.getAttemptId());
attemptInfo.finishTime = event.getFinishTime();
attemptInfo.status = event.getTaskStatus();
attemptInfo.state = event.getState();
attemptInfo.status = StringInterner.weakIntern(event.getTaskStatus());
attemptInfo.state = StringInterner.weakIntern(event.getState());
attemptInfo.mapFinishTime = event.getMapFinishTime();
attemptInfo.counters = event.getCounters();
attemptInfo.hostname = event.getHostname();
attemptInfo.hostname = StringInterner.weakIntern(event.getHostname());
attemptInfo.port = event.getPort();
attemptInfo.rackname = event.getRackName();
attemptInfo.rackname = StringInterner.weakIntern(event.getRackName());
}
private void handleTaskAttemptFailedEvent(
@ -269,10 +270,10 @@ public class JobHistoryParser {
taskInfo.attemptsMap.get(event.getTaskAttemptId());
attemptInfo.finishTime = event.getFinishTime();
attemptInfo.error = event.getError();
attemptInfo.status = event.getTaskStatus();
attemptInfo.hostname = event.getHostname();
attemptInfo.status = StringInterner.weakIntern(event.getTaskStatus());
attemptInfo.hostname = StringInterner.weakIntern(event.getHostname());
attemptInfo.port = event.getPort();
attemptInfo.rackname = event.getRackName();
attemptInfo.rackname = StringInterner.weakIntern(event.getRackName());
attemptInfo.shuffleFinishTime = event.getFinishTime();
attemptInfo.sortFinishTime = event.getFinishTime();
attemptInfo.mapFinishTime = event.getFinishTime();
@ -300,7 +301,7 @@ public class JobHistoryParser {
attemptInfo.startTime = event.getStartTime();
attemptInfo.attemptId = event.getTaskAttemptId();
attemptInfo.httpPort = event.getHttpPort();
attemptInfo.trackerName = event.getTrackerName();
attemptInfo.trackerName = StringInterner.weakIntern(event.getTrackerName());
attemptInfo.taskType = event.getTaskType();
attemptInfo.shufflePort = event.getShufflePort();
attemptInfo.containerId = event.getContainerId();
@ -344,7 +345,7 @@ public class JobHistoryParser {
info.finishTime = event.getFinishTime();
info.finishedMaps = event.getFinishedMaps();
info.finishedReduces = event.getFinishedReduces();
info.jobStatus = event.getStatus();
info.jobStatus = StringInterner.weakIntern(event.getStatus());
}
private void handleJobFinishedEvent(JobFinishedEvent event) {
@ -375,7 +376,7 @@ public class JobHistoryParser {
amInfo.appAttemptId = event.getAppAttemptId();
amInfo.startTime = event.getStartTime();
amInfo.containerId = event.getContainerId();
amInfo.nodeManagerHost = event.getNodeManagerHost();
amInfo.nodeManagerHost = StringInterner.weakIntern(event.getNodeManagerHost());
amInfo.nodeManagerPort = event.getNodeManagerPort();
amInfo.nodeManagerHttpPort = event.getNodeManagerHttpPort();
if (info.amInfos == null) {
@ -393,11 +394,11 @@ public class JobHistoryParser {
private void handleJobSubmittedEvent(JobSubmittedEvent event) {
info.jobid = event.getJobId();
info.jobname = event.getJobName();
info.username = event.getUserName();
info.username = StringInterner.weakIntern(event.getUserName());
info.submitTime = event.getSubmitTime();
info.jobConfPath = event.getJobConfPath();
info.jobACLs = event.getJobAcls();
info.jobQueueName = event.getJobQueueName();
info.jobQueueName = StringInterner.weakIntern(event.getJobQueueName());
}
/**

View File

@ -28,6 +28,7 @@ import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.mapreduce.counters.AbstractCounters;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.mapreduce.counters.CounterGroupBase;
import org.apache.hadoop.util.StringInterner;
import org.apache.hadoop.util.StringUtils;
/**
@ -235,13 +236,13 @@ public class CountersStrings {
// Get the actual name
String groupName =
getBlock(groupString, UNIT_OPEN, UNIT_CLOSE, groupIndex);
groupName = unescape(groupName);
StringInterner.weakIntern(getBlock(groupString, UNIT_OPEN, UNIT_CLOSE, groupIndex));
groupName = StringInterner.weakIntern(unescape(groupName));
// Get the display name
String groupDisplayName =
getBlock(groupString, UNIT_OPEN, UNIT_CLOSE, groupIndex);
groupDisplayName = unescape(groupDisplayName);
StringInterner.weakIntern(getBlock(groupString, UNIT_OPEN, UNIT_CLOSE, groupIndex));
groupDisplayName = StringInterner.weakIntern(unescape(groupDisplayName));
// Get the counters
G group = counters.getGroup(groupName);
@ -255,13 +256,13 @@ public class CountersStrings {
// Get the actual name
String counterName =
getBlock(counterString, UNIT_OPEN, UNIT_CLOSE, counterIndex);
counterName = unescape(counterName);
StringInterner.weakIntern(getBlock(counterString, UNIT_OPEN, UNIT_CLOSE, counterIndex));
counterName = StringInterner.weakIntern(unescape(counterName));
// Get the display name
String counterDisplayName =
getBlock(counterString, UNIT_OPEN, UNIT_CLOSE, counterIndex);
counterDisplayName = unescape(counterDisplayName);
StringInterner.weakIntern(getBlock(counterString, UNIT_OPEN, UNIT_CLOSE, counterIndex));
counterDisplayName = StringInterner.weakIntern(unescape(counterDisplayName));
// Get the value
long value =