HDFS-8792. BlockManager#postponedMisreplicatedBlocks should use a LightWeightHashSet to save memory (Yi Liu via Colin P. McCabe)

(cherry picked from commit c77bd6af16)
This commit is contained in:
Colin Patrick Mccabe 2015-08-17 12:00:45 -07:00
parent 55f7097900
commit 08508ca1c8
4 changed files with 50 additions and 8 deletions

View File

@ -464,6 +464,9 @@ Release 2.8.0 - UNRELEASED
HDFS-7433. Optimize performance of DatanodeManager's node map. HDFS-7433. Optimize performance of DatanodeManager's node map.
(daryn via kihwal) (daryn via kihwal)
HDFS-8792. BlockManager#postponedMisreplicatedBlocks should use a
LightWeightHashSet to save memory (Yi Liu via Colin P. McCabe)
BUG FIXES BUG FIXES
HDFS-8091: ACLStatus and XAttributes should be presented to HDFS-8091: ACLStatus and XAttributes should be presented to

View File

@ -84,6 +84,7 @@ import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage.State;
import org.apache.hadoop.hdfs.server.protocol.KeyUpdateCommand; import org.apache.hadoop.hdfs.server.protocol.KeyUpdateCommand;
import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo; import org.apache.hadoop.hdfs.server.protocol.ReceivedDeletedBlockInfo;
import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks; import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
import org.apache.hadoop.hdfs.util.LightWeightHashSet;
import org.apache.hadoop.hdfs.util.LightWeightLinkedSet; import org.apache.hadoop.hdfs.util.LightWeightLinkedSet;
import org.apache.hadoop.metrics2.util.MBeans; import org.apache.hadoop.metrics2.util.MBeans;
import org.apache.hadoop.net.Node; import org.apache.hadoop.net.Node;
@ -94,7 +95,6 @@ import org.apache.hadoop.util.Time;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import com.google.common.collect.Sets;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -195,7 +195,8 @@ public class BlockManager implements BlockStatsMXBean {
* notified of all block deletions that might have been pending * notified of all block deletions that might have been pending
* when the failover happened. * when the failover happened.
*/ */
private final Set<Block> postponedMisreplicatedBlocks = Sets.newHashSet(); private final LightWeightHashSet<Block> postponedMisreplicatedBlocks =
new LightWeightHashSet<>();
/** /**
* Maps a StorageID to the set of blocks that are "extra" for this * Maps a StorageID to the set of blocks that are "extra" for this

View File

@ -528,12 +528,13 @@ public class LightWeightHashSet<T> implements Collection<T> {
} }
private class LinkedSetIterator implements Iterator<T> { private class LinkedSetIterator implements Iterator<T> {
/** The starting modification for fail-fast. */ /** The current modification epoch. */
private final int startModification = modification; private int expectedModification = modification;
/** The current index of the entry array. */ /** The current index of the entry array. */
private int index = -1; private int index = -1;
/** The next element to return. */ /** The next element to return. */
private LinkedElement<T> next = nextNonemptyEntry(); private LinkedElement<T> next = nextNonemptyEntry();
private LinkedElement<T> current;
private LinkedElement<T> nextNonemptyEntry() { private LinkedElement<T> nextNonemptyEntry() {
for (index++; index < entries.length && entries[index] == null; index++); for (index++; index < entries.length && entries[index] == null; index++);
@ -547,13 +548,14 @@ public class LightWeightHashSet<T> implements Collection<T> {
@Override @Override
public T next() { public T next() {
if (modification != startModification) { if (modification != expectedModification) {
throw new ConcurrentModificationException("modification=" throw new ConcurrentModificationException("modification="
+ modification + " != startModification = " + startModification); + modification + " != expectedModification = " + expectedModification);
} }
if (next == null) { if (next == null) {
throw new NoSuchElementException(); throw new NoSuchElementException();
} }
current = next;
final T e = next.element; final T e = next.element;
// find the next element // find the next element
final LinkedElement<T> n = next.next; final LinkedElement<T> n = next.next;
@ -563,7 +565,16 @@ public class LightWeightHashSet<T> implements Collection<T> {
@Override @Override
public void remove() { public void remove() {
throw new UnsupportedOperationException("Remove is not supported."); if (current == null) {
throw new NoSuchElementException();
}
if (modification != expectedModification) {
throw new ConcurrentModificationException("modification="
+ modification + " != expectedModification = " + expectedModification);
}
LightWeightHashSet.this.removeElem(current.element);
current = null;
expectedModification = modification;
} }
} }

View File

@ -190,6 +190,33 @@ public class TestLightWeightHashSet{
LOG.info("Test remove all - DONE"); LOG.info("Test remove all - DONE");
} }
@Test
public void testRemoveAllViaIterator() {
LOG.info("Test remove all via iterator");
for (Integer i : list) {
assertTrue(set.add(i));
}
for (Iterator<Integer> iter = set.iterator(); iter.hasNext(); ) {
int e = iter.next();
// element should be there before removing
assertTrue(set.contains(e));
iter.remove();
// element should not be there now
assertFalse(set.contains(e));
}
// the deleted elements should not be there
for (int i = 0; i < NUM; i++) {
assertFalse(set.contains(list.get(i)));
}
// iterator should not have next
Iterator<Integer> iter = set.iterator();
assertFalse(iter.hasNext());
assertTrue(set.isEmpty());
LOG.info("Test remove all via iterator - DONE");
}
@Test @Test
public void testPollAll() { public void testPollAll() {
LOG.info("Test poll all"); LOG.info("Test poll all");