From f680865d994b8b75c11fa85f3241b1b9c6851187 Mon Sep 17 00:00:00 2001 From: Suresh Srinivas Date: Tue, 9 Apr 2013 01:21:48 +0000 Subject: [PATCH] HDFS-3940. Add Gset#clear method and clear the block map when namenode is shutdown. Contributed by Suresh Srinivas. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1465851 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../server/blockmanagement/BlockManager.java | 3 +++ .../server/blockmanagement/BlocksMap.java | 2 +- .../hdfs/server/namenode/FSDirectory.java | 4 ++++ .../hdfs/server/namenode/FSNamesystem.java | 9 ++++++++- .../org/apache/hadoop/hdfs/util/GSet.java | 2 ++ .../hadoop/hdfs/util/GSetByHashMap.java | 5 +++++ .../hadoop/hdfs/util/LightWeightGSet.java | 20 +++++++++++-------- .../org/apache/hadoop/hdfs/util/TestGSet.java | 5 +++++ 9 files changed, 43 insertions(+), 10 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 24d192be7eb..c682143a0af 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -393,6 +393,9 @@ Release 2.0.5-beta - UNRELEASED HDFS-4525. Provide an API for knowing that whether file is closed or not. (SreeHari via umamahesh) + HDFS-3940. Add Gset#clear method and clear the block map when namenode is + shutdown. (suresh) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java index 1caf18a3cbe..51eced86dda 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlockManager.java @@ -3180,4 +3180,7 @@ assert storedBlock.findDatanode(dn) < 0 : "Block " + block OK } + public void shutdown() { + blocksMap.close(); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlocksMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlocksMap.java index dbfcaa70138..2f1c06bed6e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlocksMap.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/blockmanagement/BlocksMap.java @@ -67,7 +67,7 @@ class BlocksMap { void close() { - // Empty blocks once GSet#clear is implemented (HDFS-3940) + blocks.clear(); } BlockCollection getBlockCollection(Block b) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java index b11059a4bb4..07679b2eb7d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java @@ -2098,4 +2098,8 @@ public class FSDirectory implements Closeable { inode.setLocalName(name.getBytes()); } } + + void shutdown() { + nameCache.reset(); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index ad528f0ad0a..a9b16494f17 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -4820,8 +4820,15 @@ public class FSNamesystem implements Namesystem, FSClusterStats, * shutdown FSNamesystem */ void shutdown() { - if (mbeanName != null) + if (mbeanName != null) { MBeans.unregister(mbeanName); + } + if (dir != null) { + dir.shutdown(); + } + if (blockManager != null) { + blockManager.shutdown(); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/GSet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/GSet.java index b3de3aac223..f409f5e50f1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/GSet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/GSet.java @@ -81,4 +81,6 @@ public interface GSet extends Iterable { * @throws NullPointerException if key == null. */ E remove(K key); + + void clear(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/GSetByHashMap.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/GSetByHashMap.java index 7d2c61a7f12..92e49cfc1d5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/GSetByHashMap.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/GSetByHashMap.java @@ -65,4 +65,9 @@ public class GSetByHashMap implements GSet { public Iterator iterator() { return m.values().iterator(); } + + @Override + public void clear() { + m.clear(); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightGSet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightGSet.java index 5ab9a8cb355..9919175c72a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightGSet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/util/LightWeightGSet.java @@ -85,7 +85,6 @@ public class LightWeightGSet implements GSet { if (LOG.isDebugEnabled()) { LOG.debug("recommended=" + recommended_length + ", actual=" + actual); } - entries = new LinkedElement[actual]; hash_mask = entries.length - 1; } @@ -329,13 +328,18 @@ public class LightWeightGSet implements GSet { final int exponent = e2 < 0? 0: e2 > 30? 30: e2; final int c = 1 << exponent; - if (LightWeightGSet.LOG.isDebugEnabled()) { - LOG.debug("Computing capacity for map " + mapName); - LOG.debug("VM type = " + vmBit + "-bit"); - LOG.debug(percentage + "% max memory = " - + StringUtils.TraditionalBinaryPrefix.long2String(maxMemory, "B", 1)); - LOG.debug("capacity = 2^" + exponent + " = " + c + " entries"); - } + LOG.info("Computing capacity for map " + mapName); + LOG.info("VM type = " + vmBit + "-bit"); + LOG.info(percentage + "% max memory = " + + StringUtils.TraditionalBinaryPrefix.long2String(maxMemory, "B", 1)); + LOG.info("capacity = 2^" + exponent + " = " + c + " entries"); return c; } + + public void clear() { + for (int i = 0; i < entries.length; i++) { + entries[i] = null; + } + size = 0; + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestGSet.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestGSet.java index 971d538b272..5f43cb53f93 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestGSet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestGSet.java @@ -388,6 +388,11 @@ public class TestGSet { return String.format(" iterate=%5d, contain=%5d, time elapsed=%5d.%03ds", iterate_count, contain_count, t/1000, t%1000); } + + @Override + public void clear() { + gset.clear(); + } } /** Test data set */