HDFS-3940. Merging change r1465851 from trunk
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1466101 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
335e02124b
commit
97b95fb3a0
|
@ -41,6 +41,9 @@ Release 2.0.5-beta - UNRELEASED
|
|||
HDFS-4525. Provide an API for knowing that whether file is closed or not.
|
||||
(SreeHari via umamahesh)
|
||||
|
||||
HDFS-3940. Add Gset#clear method and clear the block map when namenode is
|
||||
shutdown. (suresh)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
BUG FIXES
|
||||
|
|
|
@ -3192,4 +3192,7 @@ assert storedBlock.findDatanode(dn) < 0 : "Block " + block
|
|||
OK
|
||||
}
|
||||
|
||||
public void shutdown() {
|
||||
blocksMap.close();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ class BlocksMap {
|
|||
|
||||
|
||||
void close() {
|
||||
// Empty blocks once GSet#clear is implemented (HDFS-3940)
|
||||
blocks.clear();
|
||||
}
|
||||
|
||||
BlockCollection getBlockCollection(Block b) {
|
||||
|
|
|
@ -2173,4 +2173,8 @@ public class FSDirectory implements Closeable {
|
|||
inode.setLocalName(name.getBytes());
|
||||
}
|
||||
}
|
||||
|
||||
void shutdown() {
|
||||
nameCache.reset();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4751,8 +4751,15 @@ public class FSNamesystem implements Namesystem, FSClusterStats,
|
|||
* shutdown FSNamesystem
|
||||
*/
|
||||
void shutdown() {
|
||||
if (mbeanName != null)
|
||||
if (mbeanName != null) {
|
||||
MBeans.unregister(mbeanName);
|
||||
}
|
||||
if (dir != null) {
|
||||
dir.shutdown();
|
||||
}
|
||||
if (blockManager != null) {
|
||||
blockManager.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -81,4 +81,6 @@ public interface GSet<K, E extends K> extends Iterable<E> {
|
|||
* @throws NullPointerException if key == null.
|
||||
*/
|
||||
E remove(K key);
|
||||
|
||||
void clear();
|
||||
}
|
||||
|
|
|
@ -65,4 +65,9 @@ public class GSetByHashMap<K, E extends K> implements GSet<K, E> {
|
|||
public Iterator<E> iterator() {
|
||||
return m.values().iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
m.clear();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -85,7 +85,6 @@ public class LightWeightGSet<K, E extends K> implements GSet<K, E> {
|
|||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("recommended=" + recommended_length + ", actual=" + actual);
|
||||
}
|
||||
|
||||
entries = new LinkedElement[actual];
|
||||
hash_mask = entries.length - 1;
|
||||
}
|
||||
|
@ -329,13 +328,18 @@ public class LightWeightGSet<K, E extends K> implements GSet<K, E> {
|
|||
final int exponent = e2 < 0? 0: e2 > 30? 30: e2;
|
||||
final int c = 1 << exponent;
|
||||
|
||||
if (LightWeightGSet.LOG.isDebugEnabled()) {
|
||||
LOG.debug("Computing capacity for map " + mapName);
|
||||
LOG.debug("VM type = " + vmBit + "-bit");
|
||||
LOG.debug(percentage + "% max memory = "
|
||||
+ StringUtils.TraditionalBinaryPrefix.long2String(maxMemory, "B", 1));
|
||||
LOG.debug("capacity = 2^" + exponent + " = " + c + " entries");
|
||||
}
|
||||
LOG.info("Computing capacity for map " + mapName);
|
||||
LOG.info("VM type = " + vmBit + "-bit");
|
||||
LOG.info(percentage + "% max memory = "
|
||||
+ StringUtils.TraditionalBinaryPrefix.long2String(maxMemory, "B", 1));
|
||||
LOG.info("capacity = 2^" + exponent + " = " + c + " entries");
|
||||
return c;
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
for (int i = 0; i < entries.length; i++) {
|
||||
entries[i] = null;
|
||||
}
|
||||
size = 0;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -388,6 +388,11 @@ public class TestGSet {
|
|||
return String.format(" iterate=%5d, contain=%5d, time elapsed=%5d.%03ds",
|
||||
iterate_count, contain_count, t/1000, t%1000);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
gset.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/** Test data set */
|
||||
|
|
Loading…
Reference in New Issue