HDFS-6114. Block Scan log rolling will never happen if blocks written continuously leading to huge size of dncp_block_verification.log.curr (vinayakumarb via cmccabe)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1612944 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Colin McCabe 2014-07-23 21:00:07 +00:00
parent 288247d08c
commit ecc0a2879e
2 changed files with 46 additions and 12 deletions

View File

@ -105,6 +105,10 @@ Release 2.6.0 - UNRELEASED
HDFS-6731. Run "hdfs zkfc-formatZK" on a server in a non-namenode will cause HDFS-6731. Run "hdfs zkfc-formatZK" on a server in a non-namenode will cause
a null pointer exception. (Masatake Iwasaki via brandonli) a null pointer exception. (Masatake Iwasaki via brandonli)
HDFS-6114. Block Scan log rolling will never happen if blocks written
continuously leading to huge size of dncp_block_verification.log.curr
(vinayakumarb via cmccabe)
Release 2.5.0 - UNRELEASED Release 2.5.0 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -84,6 +84,10 @@ class BlockPoolSliceScanner {
private final SortedSet<BlockScanInfo> blockInfoSet private final SortedSet<BlockScanInfo> blockInfoSet
= new TreeSet<BlockScanInfo>(BlockScanInfo.LAST_SCAN_TIME_COMPARATOR); = new TreeSet<BlockScanInfo>(BlockScanInfo.LAST_SCAN_TIME_COMPARATOR);
private final SortedSet<BlockScanInfo> newBlockInfoSet =
new TreeSet<BlockScanInfo>(BlockScanInfo.LAST_SCAN_TIME_COMPARATOR);
private final GSet<Block, BlockScanInfo> blockMap private final GSet<Block, BlockScanInfo> blockMap
= new LightWeightGSet<Block, BlockScanInfo>( = new LightWeightGSet<Block, BlockScanInfo>(
LightWeightGSet.computeCapacity(0.5, "BlockMap")); LightWeightGSet.computeCapacity(0.5, "BlockMap"));
@ -195,7 +199,7 @@ class BlockPoolSliceScanner {
BlockScanInfo info = new BlockScanInfo( block ); BlockScanInfo info = new BlockScanInfo( block );
info.lastScanTime = scanTime--; info.lastScanTime = scanTime--;
//still keep 'info.lastScanType' to NONE. //still keep 'info.lastScanType' to NONE.
addBlockInfo(info); addBlockInfo(info, false);
} }
RollingLogs rollingLogs = null; RollingLogs rollingLogs = null;
@ -222,8 +226,22 @@ class BlockPoolSliceScanner {
// not really required. // not really required.
} }
private synchronized void addBlockInfo(BlockScanInfo info) { /**
boolean added = blockInfoSet.add(info); * Add the BlockScanInfo to sorted set of blockScanInfo
* @param info BlockScanInfo to be added
* @param isNewBlock true if the block is the new Block, false if
* BlockScanInfo is being updated with new scanTime
*/
private synchronized void addBlockInfo(BlockScanInfo info,
boolean isNewBlock) {
boolean added = false;
if (isNewBlock) {
// check whether the block already present
boolean exists = blockInfoSet.contains(info);
added = !exists && newBlockInfoSet.add(info);
} else {
added = blockInfoSet.add(info);
}
blockMap.put(info); blockMap.put(info);
if (added) { if (added) {
@ -233,6 +251,9 @@ class BlockPoolSliceScanner {
private synchronized void delBlockInfo(BlockScanInfo info) { private synchronized void delBlockInfo(BlockScanInfo info) {
boolean exists = blockInfoSet.remove(info); boolean exists = blockInfoSet.remove(info);
if (!exists){
exists = newBlockInfoSet.remove(info);
}
blockMap.remove(info); blockMap.remove(info);
if (exists) { if (exists) {
@ -249,7 +270,7 @@ class BlockPoolSliceScanner {
delBlockInfo(info); delBlockInfo(info);
info.lastScanTime = e.verificationTime; info.lastScanTime = e.verificationTime;
info.lastScanType = ScanType.VERIFICATION_SCAN; info.lastScanType = ScanType.VERIFICATION_SCAN;
addBlockInfo(info); addBlockInfo(info, false);
} }
} }
@ -275,14 +296,14 @@ class BlockPoolSliceScanner {
info = new BlockScanInfo(block.getLocalBlock()); info = new BlockScanInfo(block.getLocalBlock());
info.lastScanTime = getNewBlockScanTime(); info.lastScanTime = getNewBlockScanTime();
addBlockInfo(info); addBlockInfo(info, true);
adjustThrottler(); adjustThrottler();
} }
/** Deletes the block from internal structures */ /** Deletes the block from internal structures */
synchronized void deleteBlock(Block block) { synchronized void deleteBlock(Block block) {
BlockScanInfo info = blockMap.get(block); BlockScanInfo info = blockMap.get(block);
if ( info != null ) { if (info != null) {
delBlockInfo(info); delBlockInfo(info);
} }
} }
@ -319,7 +340,7 @@ class BlockPoolSliceScanner {
info.lastScanType = type; info.lastScanType = type;
info.lastScanTime = now; info.lastScanTime = now;
info.lastScanOk = scanOk; info.lastScanOk = scanOk;
addBlockInfo(info); addBlockInfo(info, false);
// Don't update meta data if the verification failed. // Don't update meta data if the verification failed.
if (!scanOk) { if (!scanOk) {
@ -578,7 +599,7 @@ class BlockPoolSliceScanner {
delBlockInfo(info); delBlockInfo(info);
info.lastScanTime = lastScanTime; info.lastScanTime = lastScanTime;
lastScanTime += verifyInterval; lastScanTime += verifyInterval;
addBlockInfo(info); addBlockInfo(info, false);
} }
} }
} }
@ -674,12 +695,21 @@ class BlockPoolSliceScanner {
throw e; throw e;
} finally { } finally {
rollVerificationLogs(); rollVerificationLogs();
rollNewBlocksInfo();
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Done scanning block pool: " + blockPoolId); LOG.debug("Done scanning block pool: " + blockPoolId);
} }
} }
} }
// add new blocks to scan in next iteration
private synchronized void rollNewBlocksInfo() {
for (BlockScanInfo newBlock : newBlockInfoSet) {
blockInfoSet.add(newBlock);
}
newBlockInfoSet.clear();
}
private synchronized void rollVerificationLogs() { private synchronized void rollVerificationLogs() {
if (verificationLog != null) { if (verificationLog != null) {
try { try {