HDFS-11121. Add assertions to BlockInfo#addStorage to protect from breaking reportedBlock-blockGroup mapping. Contributed by Takanobu Asanuma.
This commit is contained in:
parent
d79c645b2e
commit
355b907c09
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hdfs.protocol.Block;
|
||||
import org.apache.hadoop.hdfs.protocol.BlockType;
|
||||
|
@ -55,6 +56,9 @@ public class BlockInfoContiguous extends BlockInfo {
|
|||
|
||||
@Override
|
||||
boolean addStorage(DatanodeStorageInfo storage, Block reportedBlock) {
|
||||
Preconditions.checkArgument(this.getBlockId() == reportedBlock.getBlockId(),
|
||||
"reported blk_%s is different from stored blk_%s",
|
||||
reportedBlock.getBlockId(), this.getBlockId());
|
||||
// find the last null node
|
||||
int lastNode = ensureCapacity(1);
|
||||
setStorageInfo(lastNode, storage);
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hdfs.protocol.Block;
|
||||
import org.apache.hadoop.hdfs.protocol.BlockType;
|
||||
|
@ -114,6 +115,12 @@ public class BlockInfoStriped extends BlockInfo {
|
|||
|
||||
@Override
|
||||
boolean addStorage(DatanodeStorageInfo storage, Block reportedBlock) {
|
||||
Preconditions.checkArgument(BlockIdManager.isStripedBlockID(
|
||||
reportedBlock.getBlockId()), "reportedBlock is not striped");
|
||||
Preconditions.checkArgument(BlockIdManager.convertToStripedID(
|
||||
reportedBlock.getBlockId()) == this.getBlockId(),
|
||||
"reported blk_%s does not belong to the group of stored blk_%s",
|
||||
reportedBlock.getBlockId(), this.getBlockId());
|
||||
int blockIndex = BlockIdManager.getBlockIndex(reportedBlock);
|
||||
int index = blockIndex;
|
||||
DatanodeStorageInfo old = getStorageInfo(index);
|
||||
|
|
|
@ -23,6 +23,7 @@ import static org.hamcrest.core.Is.is;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hdfs.DFSTestUtil;
|
||||
import org.apache.hadoop.hdfs.protocol.Block;
|
||||
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo.AddBlockResult;
|
||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
|
||||
import org.junit.Assert;
|
||||
|
@ -54,7 +55,8 @@ public class TestBlockInfo {
|
|||
public void testAddStorage() throws Exception {
|
||||
BlockInfo blockInfo = new BlockInfoContiguous((short) 3);
|
||||
|
||||
final DatanodeStorageInfo storage = DFSTestUtil.createDatanodeStorageInfo("storageID", "127.0.0.1");
|
||||
final DatanodeStorageInfo storage = DFSTestUtil.createDatanodeStorageInfo(
|
||||
"storageID", "127.0.0.1");
|
||||
|
||||
boolean added = blockInfo.addStorage(storage, blockInfo);
|
||||
|
||||
|
@ -66,8 +68,10 @@ public class TestBlockInfo {
|
|||
public void testReplaceStorage() throws Exception {
|
||||
|
||||
// Create two dummy storages.
|
||||
final DatanodeStorageInfo storage1 = DFSTestUtil.createDatanodeStorageInfo("storageID1", "127.0.0.1");
|
||||
final DatanodeStorageInfo storage2 = new DatanodeStorageInfo(storage1.getDatanodeDescriptor(), new DatanodeStorage("storageID2"));
|
||||
final DatanodeStorageInfo storage1 = DFSTestUtil.createDatanodeStorageInfo(
|
||||
"storageID1", "127.0.0.1");
|
||||
final DatanodeStorageInfo storage2 = new DatanodeStorageInfo(
|
||||
storage1.getDatanodeDescriptor(), new DatanodeStorage("storageID2"));
|
||||
final int NUM_BLOCKS = 10;
|
||||
BlockInfo[] blockInfos = new BlockInfo[NUM_BLOCKS];
|
||||
|
||||
|
@ -83,4 +87,14 @@ public class TestBlockInfo {
|
|||
Assert.assertThat(added, is(false));
|
||||
Assert.assertThat(blockInfos[NUM_BLOCKS/2].getStorageInfo(0), is(storage2));
|
||||
}
|
||||
|
||||
@Test(expected=IllegalArgumentException.class)
|
||||
public void testAddStorageWithDifferentBlock() throws Exception {
|
||||
BlockInfo blockInfo1 = new BlockInfoContiguous(new Block(1000L), (short) 3);
|
||||
BlockInfo blockInfo2 = new BlockInfoContiguous(new Block(1001L), (short) 3);
|
||||
|
||||
final DatanodeStorageInfo storage = DFSTestUtil.createDatanodeStorageInfo(
|
||||
"storageID", "127.0.0.1");
|
||||
blockInfo1.addStorage(storage, blockInfo2);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -220,4 +220,21 @@ public class TestBlockInfoStriped {
|
|||
assertEquals(byteBuffer.array().length, byteStream.toByteArray().length);
|
||||
assertArrayEquals(byteBuffer.array(), byteStream.toByteArray());
|
||||
}
|
||||
|
||||
@Test(expected=IllegalArgumentException.class)
|
||||
public void testAddStorageWithReplicatedBlock() {
|
||||
DatanodeStorageInfo storage = DFSTestUtil.createDatanodeStorageInfo(
|
||||
"storageID", "127.0.0.1");
|
||||
BlockInfo replica = new BlockInfoContiguous(new Block(1000L), (short) 3);
|
||||
info.addStorage(storage, replica);
|
||||
}
|
||||
|
||||
@Test(expected=IllegalArgumentException.class)
|
||||
public void testAddStorageWithDifferentBlockGroup() {
|
||||
DatanodeStorageInfo storage = DFSTestUtil.createDatanodeStorageInfo(
|
||||
"storageID", "127.0.0.1");
|
||||
BlockInfo diffGroup = new BlockInfoStriped(new Block(BASE_ID + 100),
|
||||
testECPolicy);
|
||||
info.addStorage(storage, diffGroup);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue