HDFS-13350. Negative legacy block ID will confuse Erasure Coding to be considered as striped block. (Contributed by Lei (Eddy) Xu).

(cherry picked from commit 1cbf23df145af01692b8aaa438642b64e330cd05)
This commit is contained in:
Lei Xu 2018-04-04 15:56:17 -07:00
parent fa464c12ad
commit 2543a9dd77
8 changed files with 136 additions and 57 deletions

View File

@ -239,6 +239,23 @@ public class BlockIdManager {
legacyGenerationStampLimit = HdfsConstants.GRANDFATHER_GENERATION_STAMP; legacyGenerationStampLimit = HdfsConstants.GRANDFATHER_GENERATION_STAMP;
} }
/**
* Return true if the block is a striped block.
*
* Before HDFS-4645, block ID was randomly generated (legacy), so it is
* possible that legacy block ID to be negative, which should not be
* considered as striped block ID.
*
* @see #isLegacyBlock(Block) detecting legacy block IDs.
*/
public boolean isStripedBlock(Block block) {
return isStripedBlockID(block.getBlockId()) && !isLegacyBlock(block);
}
/**
* See {@link #isStripedBlock(Block)}, we should not use this function alone
* to determine a block is striped block.
*/
public static boolean isStripedBlockID(long id) { public static boolean isStripedBlockID(long id) {
return BlockType.fromBlockId(id) == STRIPED; return BlockType.fromBlockId(id) == STRIPED;
} }

View File

@ -448,7 +448,8 @@ public class BlockManager implements BlockStatsMXBean {
DFSConfigKeys.DFS_NAMENODE_STARTUP_DELAY_BLOCK_DELETION_SEC_DEFAULT) * 1000L; DFSConfigKeys.DFS_NAMENODE_STARTUP_DELAY_BLOCK_DELETION_SEC_DEFAULT) * 1000L;
invalidateBlocks = new InvalidateBlocks( invalidateBlocks = new InvalidateBlocks(
datanodeManager.getBlockInvalidateLimit(), datanodeManager.getBlockInvalidateLimit(),
startupDelayBlockDeletionInMs); startupDelayBlockDeletionInMs,
blockIdManager);
// Compute the map capacity by allocating 2% of total memory // Compute the map capacity by allocating 2% of total memory
blocksMap = new BlocksMap( blocksMap = new BlocksMap(
@ -1677,7 +1678,7 @@ public class BlockManager implements BlockStatsMXBean {
corrupted.setBlockId(b.getStored().getBlockId()); corrupted.setBlockId(b.getStored().getBlockId());
} }
corruptReplicas.addToCorruptReplicasMap(corrupted, node, b.getReason(), corruptReplicas.addToCorruptReplicasMap(corrupted, node, b.getReason(),
b.getReasonCode()); b.getReasonCode(), b.getStored().isStriped());
NumberReplicas numberOfReplicas = countNodes(b.getStored()); NumberReplicas numberOfReplicas = countNodes(b.getStored());
boolean hasEnoughLiveReplicas = numberOfReplicas.liveReplicas() >= boolean hasEnoughLiveReplicas = numberOfReplicas.liveReplicas() >=

View File

@ -486,7 +486,7 @@ class BlockManagerSafeMode {
if (!blockManager.getShouldPostponeBlocksFromFuture() && if (!blockManager.getShouldPostponeBlocksFromFuture() &&
!inRollBack && blockManager.isGenStampInFuture(brr)) { !inRollBack && blockManager.isGenStampInFuture(brr)) {
if (BlockIdManager.isStripedBlockID(brr.getBlockId())) { if (blockManager.getBlockIdManager().isStripedBlock(brr)) {
bytesInFutureECBlockGroups.add(brr.getBytesOnDisk()); bytesInFutureECBlockGroups.add(brr.getBytesOnDisk());
} else { } else {
bytesInFutureBlocks.add(brr.getBytesOnDisk()); bytesInFutureBlocks.add(brr.getBytesOnDisk());

View File

@ -93,7 +93,7 @@ class BlocksMap {
* remove it from all data-node lists it belongs to; * remove it from all data-node lists it belongs to;
* and remove all data-node locations associated with the block. * and remove all data-node locations associated with the block.
*/ */
void removeBlock(Block block) { void removeBlock(BlockInfo block) {
BlockInfo blockInfo = blocks.remove(block); BlockInfo blockInfo = blocks.remove(block);
if (blockInfo == null) { if (blockInfo == null) {
return; return;
@ -175,7 +175,7 @@ class BlocksMap {
if (info.hasNoStorage() // no datanodes left if (info.hasNoStorage() // no datanodes left
&& info.isDeleted()) { // does not belong to a file && info.isDeleted()) { // does not belong to a file
blocks.remove(b); // remove block from the map blocks.remove(b); // remove block from the map
decrementBlockStat(b); decrementBlockStat(info);
} }
return removed; return removed;
} }
@ -207,16 +207,16 @@ class BlocksMap {
return capacity; return capacity;
} }
private void incrementBlockStat(Block block) { private void incrementBlockStat(BlockInfo block) {
if (BlockIdManager.isStripedBlockID(block.getBlockId())) { if (block.isStriped()) {
totalECBlockGroups.increment(); totalECBlockGroups.increment();
} else { } else {
totalReplicatedBlocks.increment(); totalReplicatedBlocks.increment();
} }
} }
private void decrementBlockStat(Block block) { private void decrementBlockStat(BlockInfo block) {
if (BlockIdManager.isStripedBlockID(block.getBlockId())) { if (block.isStriped()) {
totalECBlockGroups.decrement(); totalECBlockGroups.decrement();
assert totalECBlockGroups.longValue() >= 0 : assert totalECBlockGroups.longValue() >= 0 :
"Total number of ec block groups should be non-negative"; "Total number of ec block groups should be non-negative";

View File

@ -69,12 +69,12 @@ public class CorruptReplicasMap{
* @param reasonCode the enum representation of the reason * @param reasonCode the enum representation of the reason
*/ */
void addToCorruptReplicasMap(Block blk, DatanodeDescriptor dn, void addToCorruptReplicasMap(Block blk, DatanodeDescriptor dn,
String reason, Reason reasonCode) { String reason, Reason reasonCode, boolean isStriped) {
Map <DatanodeDescriptor, Reason> nodes = corruptReplicasMap.get(blk); Map <DatanodeDescriptor, Reason> nodes = corruptReplicasMap.get(blk);
if (nodes == null) { if (nodes == null) {
nodes = new HashMap<DatanodeDescriptor, Reason>(); nodes = new HashMap<DatanodeDescriptor, Reason>();
corruptReplicasMap.put(blk, nodes); corruptReplicasMap.put(blk, nodes);
incrementBlockStat(blk); incrementBlockStat(isStriped);
} }
String reasonText; String reasonText;
@ -103,11 +103,11 @@ public class CorruptReplicasMap{
* Remove Block from CorruptBlocksMap. * Remove Block from CorruptBlocksMap.
* @param blk Block to be removed * @param blk Block to be removed
*/ */
void removeFromCorruptReplicasMap(Block blk) { void removeFromCorruptReplicasMap(BlockInfo blk) {
if (corruptReplicasMap != null) { if (corruptReplicasMap != null) {
Map<DatanodeDescriptor, Reason> value = corruptReplicasMap.remove(blk); Map<DatanodeDescriptor, Reason> value = corruptReplicasMap.remove(blk);
if (value != null) { if (value != null) {
decrementBlockStat(blk); decrementBlockStat(blk.isStriped());
} }
} }
} }
@ -119,12 +119,13 @@ public class CorruptReplicasMap{
* @return true if the removal is successful; * @return true if the removal is successful;
false if the replica is not in the map false if the replica is not in the map
*/ */
boolean removeFromCorruptReplicasMap(Block blk, DatanodeDescriptor datanode) { boolean removeFromCorruptReplicasMap(
BlockInfo blk, DatanodeDescriptor datanode) {
return removeFromCorruptReplicasMap(blk, datanode, Reason.ANY); return removeFromCorruptReplicasMap(blk, datanode, Reason.ANY);
} }
boolean removeFromCorruptReplicasMap(Block blk, DatanodeDescriptor datanode, boolean removeFromCorruptReplicasMap(
Reason reason) { BlockInfo blk, DatanodeDescriptor datanode, Reason reason) {
Map <DatanodeDescriptor, Reason> datanodes = corruptReplicasMap.get(blk); Map <DatanodeDescriptor, Reason> datanodes = corruptReplicasMap.get(blk);
if (datanodes == null) { if (datanodes == null) {
return false; return false;
@ -141,23 +142,23 @@ public class CorruptReplicasMap{
if (datanodes.isEmpty()) { if (datanodes.isEmpty()) {
// remove the block if there is no more corrupted replicas // remove the block if there is no more corrupted replicas
corruptReplicasMap.remove(blk); corruptReplicasMap.remove(blk);
decrementBlockStat(blk); decrementBlockStat(blk.isStriped());
} }
return true; return true;
} }
return false; return false;
} }
private void incrementBlockStat(Block block) { private void incrementBlockStat(boolean isStriped) {
if (BlockIdManager.isStripedBlockID(block.getBlockId())) { if (isStriped) {
totalCorruptECBlockGroups.increment(); totalCorruptECBlockGroups.increment();
} else { } else {
totalCorruptBlocks.increment(); totalCorruptBlocks.increment();
} }
} }
private void decrementBlockStat(Block block) { private void decrementBlockStat(boolean isStriped) {
if (BlockIdManager.isStripedBlockID(block.getBlockId())) { if (isStriped) {
totalCorruptECBlockGroups.decrement(); totalCorruptECBlockGroups.decrement();
} else { } else {
totalCorruptBlocks.decrement(); totalCorruptBlocks.decrement();
@ -205,6 +206,8 @@ public class CorruptReplicasMap{
* is null, up to numExpectedBlocks blocks are returned from the beginning. * is null, up to numExpectedBlocks blocks are returned from the beginning.
* If startingBlockId cannot be found, null is returned. * If startingBlockId cannot be found, null is returned.
* *
* @param bim BlockIdManager to determine the block type.
* @param blockType desired block type to return.
* @param numExpectedBlocks Number of block ids to return. * @param numExpectedBlocks Number of block ids to return.
* 0 <= numExpectedBlocks <= 100 * 0 <= numExpectedBlocks <= 100
* @param startingBlockId Block id from which to start. If null, start at * @param startingBlockId Block id from which to start. If null, start at
@ -212,7 +215,7 @@ public class CorruptReplicasMap{
* @return Up to numExpectedBlocks blocks from startingBlockId if it exists * @return Up to numExpectedBlocks blocks from startingBlockId if it exists
*/ */
@VisibleForTesting @VisibleForTesting
long[] getCorruptBlockIdsForTesting(BlockType blockType, long[] getCorruptBlockIdsForTesting(BlockIdManager bim, BlockType blockType,
int numExpectedBlocks, Long startingBlockId) { int numExpectedBlocks, Long startingBlockId) {
if (numExpectedBlocks < 0 || numExpectedBlocks > 100) { if (numExpectedBlocks < 0 || numExpectedBlocks > 100) {
return null; return null;
@ -223,11 +226,9 @@ public class CorruptReplicasMap{
.stream() .stream()
.filter(r -> { .filter(r -> {
if (blockType == BlockType.STRIPED) { if (blockType == BlockType.STRIPED) {
return BlockIdManager.isStripedBlockID(r.getBlockId()) && return bim.isStripedBlock(r) && r.getBlockId() >= cursorBlockId;
r.getBlockId() >= cursorBlockId;
} else { } else {
return !BlockIdManager.isStripedBlockID(r.getBlockId()) && return !bim.isStripedBlock(r) && r.getBlockId() >= cursorBlockId;
r.getBlockId() >= cursorBlockId;
} }
}) })
.sorted() .sorted()

View File

@ -57,6 +57,7 @@ class InvalidateBlocks {
private final LongAdder numBlocks = new LongAdder(); private final LongAdder numBlocks = new LongAdder();
private final LongAdder numECBlocks = new LongAdder(); private final LongAdder numECBlocks = new LongAdder();
private final int blockInvalidateLimit; private final int blockInvalidateLimit;
private final BlockIdManager blockIdManager;
/** /**
* The period of pending time for block invalidation since the NameNode * The period of pending time for block invalidation since the NameNode
@ -66,9 +67,11 @@ class InvalidateBlocks {
/** the startup time */ /** the startup time */
private final long startupTime = Time.monotonicNow(); private final long startupTime = Time.monotonicNow();
InvalidateBlocks(final int blockInvalidateLimit, long pendingPeriodInMs) { InvalidateBlocks(final int blockInvalidateLimit, long pendingPeriodInMs,
final BlockIdManager blockIdManager) {
this.blockInvalidateLimit = blockInvalidateLimit; this.blockInvalidateLimit = blockInvalidateLimit;
this.pendingPeriodInMs = pendingPeriodInMs; this.pendingPeriodInMs = pendingPeriodInMs;
this.blockIdManager = blockIdManager;
printBlockDeletionTime(BlockManager.LOG); printBlockDeletionTime(BlockManager.LOG);
} }
@ -124,7 +127,7 @@ class InvalidateBlocks {
private LightWeightHashSet<Block> getBlocksSet(final DatanodeInfo dn, private LightWeightHashSet<Block> getBlocksSet(final DatanodeInfo dn,
final Block block) { final Block block) {
if (BlockIdManager.isStripedBlockID(block.getBlockId())) { if (blockIdManager.isStripedBlock(block)) {
return getECBlocksSet(dn); return getECBlocksSet(dn);
} else { } else {
return getBlocksSet(dn); return getBlocksSet(dn);
@ -133,7 +136,7 @@ class InvalidateBlocks {
private void putBlocksSet(final DatanodeInfo dn, final Block block, private void putBlocksSet(final DatanodeInfo dn, final Block block,
final LightWeightHashSet set) { final LightWeightHashSet set) {
if (BlockIdManager.isStripedBlockID(block.getBlockId())) { if (blockIdManager.isStripedBlock(block)) {
assert getECBlocksSet(dn) == null; assert getECBlocksSet(dn) == null;
nodeToECBlocks.put(dn, set); nodeToECBlocks.put(dn, set);
} else { } else {
@ -178,7 +181,7 @@ class InvalidateBlocks {
putBlocksSet(datanode, block, set); putBlocksSet(datanode, block, set);
} }
if (set.add(block)) { if (set.add(block)) {
if (BlockIdManager.isStripedBlockID(block.getBlockId())) { if (blockIdManager.isStripedBlock(block)) {
numECBlocks.increment(); numECBlocks.increment();
} else { } else {
numBlocks.increment(); numBlocks.increment();
@ -206,7 +209,7 @@ class InvalidateBlocks {
synchronized void remove(final DatanodeInfo dn, final Block block) { synchronized void remove(final DatanodeInfo dn, final Block block) {
final LightWeightHashSet<Block> v = getBlocksSet(dn, block); final LightWeightHashSet<Block> v = getBlocksSet(dn, block);
if (v != null && v.remove(block)) { if (v != null && v.remove(block)) {
if (BlockIdManager.isStripedBlockID(block.getBlockId())) { if (blockIdManager.isStripedBlock(block)) {
numECBlocks.decrement(); numECBlocks.decrement();
} else { } else {
numBlocks.decrement(); numBlocks.decrement();

View File

@ -39,6 +39,8 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.StripedFileTestUtil; import org.apache.hadoop.hdfs.StripedFileTestUtil;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs; import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
import org.apache.hadoop.hdfs.protocol.BlockType;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
@ -114,10 +116,13 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.reset; import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.spy; import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TestBlockManager { public class TestBlockManager {
private DatanodeStorageInfo[] storages; private DatanodeStorageInfo[] storages;
@ -1343,14 +1348,14 @@ public class TestBlockManager {
spyBM.createLocatedBlocks(new BlockInfo[]{blockInfo}, 3L, false, 0L, 3L, spyBM.createLocatedBlocks(new BlockInfo[]{blockInfo}, 3L, false, 0L, 3L,
false, false, null, null); false, false, null, null);
verify(spyBM, Mockito.atLeast(0)). verify(spyBM, Mockito.atLeast(0)).
isReplicaCorrupt(Mockito.any(BlockInfo.class), isReplicaCorrupt(any(BlockInfo.class),
Mockito.any(DatanodeDescriptor.class)); any(DatanodeDescriptor.class));
addCorruptBlockOnNodes(0, origNodes); addCorruptBlockOnNodes(0, origNodes);
spyBM.createLocatedBlocks(new BlockInfo[]{blockInfo}, 3L, false, 0L, 3L, spyBM.createLocatedBlocks(new BlockInfo[]{blockInfo}, 3L, false, 0L, 3L,
false, false, null, null); false, false, null, null);
verify(spyBM, Mockito.atLeast(1)). verify(spyBM, Mockito.atLeast(1)).
isReplicaCorrupt(Mockito.any(BlockInfo.class), isReplicaCorrupt(any(BlockInfo.class),
Mockito.any(DatanodeDescriptor.class)); any(DatanodeDescriptor.class));
} }
@Test (timeout = 300000) @Test (timeout = 300000)
@ -1506,8 +1511,8 @@ public class TestBlockManager {
blockInfo.getGenerationStamp() + 1, blockInfo.getGenerationStamp() + 1,
blockInfo.getNumBytes(), blockInfo.getNumBytes(),
new DatanodeStorageInfo[]{}); new DatanodeStorageInfo[]{});
BlockCollection mockedBc = Mockito.mock(BlockCollection.class); BlockCollection mockedBc = mock(BlockCollection.class);
Mockito.when(mockedBc.getBlocks()).thenReturn(new BlockInfo[]{blockInfo}); when(mockedBc.getBlocks()).thenReturn(new BlockInfo[]{blockInfo});
bm.checkRedundancy(mockedBc); bm.checkRedundancy(mockedBc);
return blockInfo; return blockInfo;
} }
@ -1524,8 +1529,8 @@ public class TestBlockManager {
Mockito.doReturn(bc).when(fsn).getBlockCollection(inodeId); Mockito.doReturn(bc).when(fsn).getBlockCollection(inodeId);
bm.blocksMap.addBlockCollection(blockInfo, bc); bm.blocksMap.addBlockCollection(blockInfo, bc);
nodesList.get(0).setInMaintenance(); nodesList.get(0).setInMaintenance();
BlockCollection mockedBc = Mockito.mock(BlockCollection.class); BlockCollection mockedBc = mock(BlockCollection.class);
Mockito.when(mockedBc.getBlocks()).thenReturn(new BlockInfo[]{blockInfo}); when(mockedBc.getBlocks()).thenReturn(new BlockInfo[]{blockInfo});
bm.checkRedundancy(mockedBc); bm.checkRedundancy(mockedBc);
return blockInfo; return blockInfo;
} }
@ -1580,8 +1585,8 @@ public class TestBlockManager {
Mockito.doReturn(bc).when(fsn).getBlockCollection(inodeId); Mockito.doReturn(bc).when(fsn).getBlockCollection(inodeId);
bm.blocksMap.addBlockCollection(blockInfo, bc); bm.blocksMap.addBlockCollection(blockInfo, bc);
nodesList.get(0).startDecommission(); nodesList.get(0).startDecommission();
BlockCollection mockedBc = Mockito.mock(BlockCollection.class); BlockCollection mockedBc = mock(BlockCollection.class);
Mockito.when(mockedBc.getBlocks()).thenReturn(new BlockInfo[]{blockInfo}); when(mockedBc.getBlocks()).thenReturn(new BlockInfo[]{blockInfo});
bm.checkRedundancy(mockedBc); bm.checkRedundancy(mockedBc);
return blockInfo; return blockInfo;
} }
@ -1623,4 +1628,40 @@ public class TestBlockManager {
} }
} }
@Test
public void testLegacyBlockInInvalidateBlocks() {
final long legancyGenerationStampLimit = 10000;
BlockIdManager bim = Mockito.mock(BlockIdManager.class);
when(bim.getLegacyGenerationStampLimit())
.thenReturn(legancyGenerationStampLimit);
when(bim.isStripedBlock(any(Block.class))).thenCallRealMethod();
when(bim.isLegacyBlock(any(Block.class))).thenCallRealMethod();
InvalidateBlocks ibs = new InvalidateBlocks(100, 30000, bim);
Block legacy = new Block(-1, 10, legancyGenerationStampLimit / 10);
Block striped = new Block(
bm.nextBlockId(BlockType.STRIPED), 10,
legancyGenerationStampLimit + 10);
DatanodeInfo legacyDnInfo = DFSTestUtil.getLocalDatanodeInfo();
DatanodeInfo stripedDnInfo = DFSTestUtil.getLocalDatanodeInfo();
ibs.add(legacy, legacyDnInfo, false);
assertEquals(1, ibs.getBlocks());
assertEquals(0, ibs.getECBlocks());
ibs.add(striped, stripedDnInfo, false);
assertEquals(1, ibs.getBlocks());
assertEquals(1, ibs.getECBlocks());
ibs.remove(legacyDnInfo);
assertEquals(0, ibs.getBlocks());
assertEquals(1, ibs.getECBlocks());
ibs.remove(stripedDnInfo);
assertEquals(0, ibs.getBlocks());
assertEquals(0, ibs.getECBlocks());
}
} }

View File

@ -21,6 +21,8 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.when;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
@ -30,10 +32,12 @@ import java.util.Map;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.StripedFileTestUtil;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.BlockType; import org.apache.hadoop.hdfs.protocol.BlockType;
import org.apache.hadoop.hdfs.server.blockmanagement.CorruptReplicasMap.Reason; import org.apache.hadoop.hdfs.server.blockmanagement.CorruptReplicasMap.Reason;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito;
/** /**
@ -46,27 +50,31 @@ public class TestCorruptReplicaInfo {
private static final Log LOG = LogFactory.getLog( private static final Log LOG = LogFactory.getLog(
TestCorruptReplicaInfo.class); TestCorruptReplicaInfo.class);
private final Map<Long, Block> replicaMap = new HashMap<>(); private final Map<Long, BlockInfo> replicaMap = new HashMap<>();
private final Map<Long, Block> stripedBlocksMap = new HashMap<>(); private final Map<Long, BlockInfo> stripedBlocksMap = new HashMap<>();
// Allow easy block creation by block id. Return existing // Allow easy block creation by block id. Return existing
// replica block if one with same block id already exists. // replica block if one with same block id already exists.
private Block getReplica(Long blockId) { private BlockInfo getReplica(Long blockId) {
if (!replicaMap.containsKey(blockId)) { if (!replicaMap.containsKey(blockId)) {
replicaMap.put(blockId, new Block(blockId, 0, 0)); short replFactor = 3;
replicaMap.put(blockId,
new BlockInfoContiguous(new Block(blockId, 0, 0), replFactor));
} }
return replicaMap.get(blockId); return replicaMap.get(blockId);
} }
private Block getReplica(int blkId) { private BlockInfo getReplica(int blkId) {
return getReplica(Long.valueOf(blkId)); return getReplica(Long.valueOf(blkId));
} }
private Block getStripedBlock(int blkId) { private BlockInfo getStripedBlock(int blkId) {
Long stripedBlockId = (1L << 63) + blkId; Long stripedBlockId = (1L << 63) + blkId;
assertTrue(BlockIdManager.isStripedBlockID(stripedBlockId)); assertTrue(BlockIdManager.isStripedBlockID(stripedBlockId));
if (!stripedBlocksMap.containsKey(stripedBlockId)) { if (!stripedBlocksMap.containsKey(stripedBlockId)) {
stripedBlocksMap.put(stripedBlockId, new Block(stripedBlockId, 1024, 0)); stripedBlocksMap.put(stripedBlockId,
new BlockInfoStriped(new Block(stripedBlockId, 1024, 0),
StripedFileTestUtil.getDefaultECPolicy()));
} }
return stripedBlocksMap.get(stripedBlockId); return stripedBlocksMap.get(stripedBlockId);
} }
@ -88,6 +96,10 @@ public class TestCorruptReplicaInfo {
public void testCorruptReplicaInfo() public void testCorruptReplicaInfo()
throws IOException, InterruptedException { throws IOException, InterruptedException {
CorruptReplicasMap crm = new CorruptReplicasMap(); CorruptReplicasMap crm = new CorruptReplicasMap();
BlockIdManager bim = Mockito.mock(BlockIdManager.class);
when(bim.isLegacyBlock(any(Block.class))).thenReturn(false);
when(bim.isStripedBlock(any(Block.class))).thenCallRealMethod();
assertTrue(!bim.isLegacyBlock(new Block(-1)));
// Make sure initial values are returned correctly // Make sure initial values are returned correctly
assertEquals("Total number of corrupt blocks must initially be 0!", assertEquals("Total number of corrupt blocks must initially be 0!",
@ -97,10 +109,11 @@ public class TestCorruptReplicaInfo {
assertEquals("Number of corrupt striped block groups must initially be 0!", assertEquals("Number of corrupt striped block groups must initially be 0!",
0, crm.getCorruptECBlockGroups()); 0, crm.getCorruptECBlockGroups());
assertNull("Param n cannot be less than 0", assertNull("Param n cannot be less than 0",
crm.getCorruptBlockIdsForTesting(BlockType.CONTIGUOUS, -1, null)); crm.getCorruptBlockIdsForTesting(bim, BlockType.CONTIGUOUS, -1, null));
assertNull("Param n cannot be greater than 100", assertNull("Param n cannot be greater than 100",
crm.getCorruptBlockIdsForTesting(BlockType.CONTIGUOUS, 101, null)); crm.getCorruptBlockIdsForTesting(bim, BlockType.CONTIGUOUS, 101, null));
long[] l = crm.getCorruptBlockIdsForTesting(BlockType.CONTIGUOUS, 0, null); long[] l = crm.getCorruptBlockIdsForTesting(
bim, BlockType.CONTIGUOUS, 0, null);
assertNotNull("n = 0 must return non-null", l); assertNotNull("n = 0 must return non-null", l);
assertEquals("n = 0 must return an empty list", 0, l.length); assertEquals("n = 0 must return an empty list", 0, l.length);
@ -156,22 +169,25 @@ public class TestCorruptReplicaInfo {
2 * blockCount, crm.size()); 2 * blockCount, crm.size());
assertTrue("First five corrupt replica blocks ids are not right!", assertTrue("First five corrupt replica blocks ids are not right!",
Arrays.equals(Arrays.copyOfRange(replicaIds, 0, 5), Arrays.equals(Arrays.copyOfRange(replicaIds, 0, 5),
crm.getCorruptBlockIdsForTesting(BlockType.CONTIGUOUS, 5, null))); crm.getCorruptBlockIdsForTesting(
bim, BlockType.CONTIGUOUS, 5, null)));
assertTrue("First five corrupt striped blocks ids are not right!", assertTrue("First five corrupt striped blocks ids are not right!",
Arrays.equals(Arrays.copyOfRange(stripedIds, 0, 5), Arrays.equals(Arrays.copyOfRange(stripedIds, 0, 5),
crm.getCorruptBlockIdsForTesting(BlockType.STRIPED, 5, null))); crm.getCorruptBlockIdsForTesting(
bim, BlockType.STRIPED, 5, null)));
assertTrue("10 replica blocks after 7 not returned correctly!", assertTrue("10 replica blocks after 7 not returned correctly!",
Arrays.equals(Arrays.copyOfRange(replicaIds, 7, 17), Arrays.equals(Arrays.copyOfRange(replicaIds, 7, 17),
crm.getCorruptBlockIdsForTesting(BlockType.CONTIGUOUS, 10, 7L))); crm.getCorruptBlockIdsForTesting(
bim, BlockType.CONTIGUOUS, 10, 7L)));
assertTrue("10 striped blocks after 7 not returned correctly!", assertTrue("10 striped blocks after 7 not returned correctly!",
Arrays.equals(Arrays.copyOfRange(stripedIds, 7, 17), Arrays.equals(Arrays.copyOfRange(stripedIds, 7, 17),
crm.getCorruptBlockIdsForTesting(BlockType.STRIPED, crm.getCorruptBlockIdsForTesting(bim, BlockType.STRIPED,
10, getStripedBlock(7).getBlockId()))); 10, getStripedBlock(7).getBlockId())));
} }
private static void addToCorruptReplicasMap(CorruptReplicasMap crm, private static void addToCorruptReplicasMap(CorruptReplicasMap crm,
Block blk, DatanodeDescriptor dn) { BlockInfo blk, DatanodeDescriptor dn) {
crm.addToCorruptReplicasMap(blk, dn, "TEST", Reason.NONE); crm.addToCorruptReplicasMap(blk, dn, "TEST", Reason.NONE, blk.isStriped());
} }
} }