HDFS-6114. Block Scan log rolling will never happen if blocks written continuously leading to huge size of dncp_block_verification.log.curr (vinayakumarb via cmccabe)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1612944 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Colin McCabe 2014-07-23 21:00:07 +00:00
parent 288247d08c
commit ecc0a2879e
2 changed files with 46 additions and 12 deletions

View File

@ -105,6 +105,10 @@ Release 2.6.0 - UNRELEASED
HDFS-6731. Run "hdfs zkfc-formatZK" on a server in a non-namenode will cause
a null pointer exception. (Masatake Iwasaki via brandonli)
HDFS-6114. Block Scan log rolling will never happen if blocks written
continuously leading to huge size of dncp_block_verification.log.curr
(vinayakumarb via cmccabe)
Release 2.5.0 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -84,6 +84,10 @@ class BlockPoolSliceScanner {
private final SortedSet<BlockScanInfo> blockInfoSet
= new TreeSet<BlockScanInfo>(BlockScanInfo.LAST_SCAN_TIME_COMPARATOR);
private final SortedSet<BlockScanInfo> newBlockInfoSet =
new TreeSet<BlockScanInfo>(BlockScanInfo.LAST_SCAN_TIME_COMPARATOR);
private final GSet<Block, BlockScanInfo> blockMap
= new LightWeightGSet<Block, BlockScanInfo>(
LightWeightGSet.computeCapacity(0.5, "BlockMap"));
@ -195,7 +199,7 @@ class BlockPoolSliceScanner {
BlockScanInfo info = new BlockScanInfo( block );
info.lastScanTime = scanTime--;
//still keep 'info.lastScanType' to NONE.
addBlockInfo(info);
addBlockInfo(info, false);
}
RollingLogs rollingLogs = null;
@ -222,8 +226,22 @@ class BlockPoolSliceScanner {
// not really required.
}
private synchronized void addBlockInfo(BlockScanInfo info) {
boolean added = blockInfoSet.add(info);
/**
* Add the BlockScanInfo to sorted set of blockScanInfo
* @param info BlockScanInfo to be added
* @param isNewBlock true if the block is the new Block, false if
* BlockScanInfo is being updated with new scanTime
*/
private synchronized void addBlockInfo(BlockScanInfo info,
boolean isNewBlock) {
boolean added = false;
if (isNewBlock) {
// check whether the block already present
boolean exists = blockInfoSet.contains(info);
added = !exists && newBlockInfoSet.add(info);
} else {
added = blockInfoSet.add(info);
}
blockMap.put(info);
if (added) {
@ -233,6 +251,9 @@ class BlockPoolSliceScanner {
private synchronized void delBlockInfo(BlockScanInfo info) {
boolean exists = blockInfoSet.remove(info);
if (!exists){
exists = newBlockInfoSet.remove(info);
}
blockMap.remove(info);
if (exists) {
@ -249,7 +270,7 @@ class BlockPoolSliceScanner {
delBlockInfo(info);
info.lastScanTime = e.verificationTime;
info.lastScanType = ScanType.VERIFICATION_SCAN;
addBlockInfo(info);
addBlockInfo(info, false);
}
}
@ -275,7 +296,7 @@ class BlockPoolSliceScanner {
info = new BlockScanInfo(block.getLocalBlock());
info.lastScanTime = getNewBlockScanTime();
addBlockInfo(info);
addBlockInfo(info, true);
adjustThrottler();
}
@ -319,7 +340,7 @@ class BlockPoolSliceScanner {
info.lastScanType = type;
info.lastScanTime = now;
info.lastScanOk = scanOk;
addBlockInfo(info);
addBlockInfo(info, false);
// Don't update meta data if the verification failed.
if (!scanOk) {
@ -578,7 +599,7 @@ class BlockPoolSliceScanner {
delBlockInfo(info);
info.lastScanTime = lastScanTime;
lastScanTime += verifyInterval;
addBlockInfo(info);
addBlockInfo(info, false);
}
}
}
@ -674,12 +695,21 @@ class BlockPoolSliceScanner {
throw e;
} finally {
rollVerificationLogs();
rollNewBlocksInfo();
if (LOG.isDebugEnabled()) {
LOG.debug("Done scanning block pool: " + blockPoolId);
}
}
}
// add new blocks to scan in next iteration
private synchronized void rollNewBlocksInfo() {
for (BlockScanInfo newBlock : newBlockInfoSet) {
blockInfoSet.add(newBlock);
}
newBlockInfoSet.clear();
}
private synchronized void rollVerificationLogs() {
if (verificationLog != null) {
try {