HDFS-6812. Remove addBlock and replaceBlock from DatanodeDescriptor.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1616426 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2014-08-07 07:30:53 +00:00
parent 83b9933db3
commit 0ed8732fee
7 changed files with 35 additions and 68 deletions

View File

@ -364,6 +364,9 @@ Release 2.6.0 - UNRELEASED
standalone classes and separates KeyManager from NameNodeConnector. standalone classes and separates KeyManager from NameNodeConnector.
(szetszwo) (szetszwo)
HDFS-6812. Remove addBlock and replaceBlock from DatanodeDescriptor.
(szetszwo)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-6690. Deduplicate xattr names in memory. (wang) HDFS-6690. Deduplicate xattr names in memory. (wang)

View File

@ -21,7 +21,6 @@ import java.util.LinkedList;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
import org.apache.hadoop.util.LightWeightGSet; import org.apache.hadoop.util.LightWeightGSet;
@ -254,18 +253,18 @@ public class BlockInfo extends Block implements LightWeightGSet.LinkedElement {
} }
/** /**
* Find specified DatanodeStorageInfo. * Find specified DatanodeStorageInfo.
* @return index or -1 if not found. * @return DatanodeStorageInfo or null if not found.
*/ */
int findStorageInfo(DatanodeInfo dn) { DatanodeStorageInfo findStorageInfo(DatanodeDescriptor dn) {
int len = getCapacity(); int len = getCapacity();
for(int idx = 0; idx < len; idx++) { for(int idx = 0; idx < len; idx++) {
DatanodeStorageInfo cur = getStorageInfo(idx); DatanodeStorageInfo cur = getStorageInfo(idx);
if(cur == null) if(cur == null)
break; break;
if(cur.getDatanodeDescriptor() == dn) if(cur.getDatanodeDescriptor() == dn)
return idx; return cur;
} }
return -1; return null;
} }
/** /**

View File

@ -23,8 +23,8 @@ import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage; import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
import org.apache.hadoop.util.GSet; import org.apache.hadoop.util.GSet;
import org.apache.hadoop.util.LightWeightGSet; import org.apache.hadoop.util.LightWeightGSet;
import org.apache.hadoop.util.LightWeightGSet.SetIterator;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
@ -217,9 +217,14 @@ class BlocksMap {
BlockInfo currentBlock = blocks.get(newBlock); BlockInfo currentBlock = blocks.get(newBlock);
assert currentBlock != null : "the block if not in blocksMap"; assert currentBlock != null : "the block if not in blocksMap";
// replace block in data-node lists // replace block in data-node lists
for(int idx = currentBlock.numNodes()-1; idx >= 0; idx--) { for (int i = currentBlock.numNodes() - 1; i >= 0; i--) {
DatanodeDescriptor dn = currentBlock.getDatanode(idx); final DatanodeDescriptor dn = currentBlock.getDatanode(i);
dn.replaceBlock(currentBlock, newBlock); final DatanodeStorageInfo storage = currentBlock.findStorageInfo(dn);
final boolean removed = storage.removeBlock(currentBlock);
Preconditions.checkState(removed, "currentBlock not found.");
final boolean added = storage.addBlock(newBlock);
Preconditions.checkState(added, "newBlock already exists.");
} }
// replace block in the map itself // replace block in the map itself
blocks.put(newBlock); blocks.put(newBlock);

View File

@ -49,18 +49,6 @@ public class CorruptReplicasMap{
private final SortedMap<Block, Map<DatanodeDescriptor, Reason>> corruptReplicasMap = private final SortedMap<Block, Map<DatanodeDescriptor, Reason>> corruptReplicasMap =
new TreeMap<Block, Map<DatanodeDescriptor, Reason>>(); new TreeMap<Block, Map<DatanodeDescriptor, Reason>>();
/**
* Mark the block belonging to datanode as corrupt.
*
* @param blk Block to be added to CorruptReplicasMap
* @param dn DatanodeDescriptor which holds the corrupt replica
* @param reason a textual reason (for logging purposes)
*/
public void addToCorruptReplicasMap(Block blk, DatanodeDescriptor dn,
String reason) {
addToCorruptReplicasMap(blk, dn, reason, Reason.NONE);
}
/** /**
* Mark the block belonging to datanode as corrupt. * Mark the block belonging to datanode as corrupt.
* *
@ -69,7 +57,7 @@ public class CorruptReplicasMap{
* @param reason a textual reason (for logging purposes) * @param reason a textual reason (for logging purposes)
* @param reasonCode the enum representation of the reason * @param reasonCode the enum representation of the reason
*/ */
public void addToCorruptReplicasMap(Block blk, DatanodeDescriptor dn, void addToCorruptReplicasMap(Block blk, DatanodeDescriptor dn,
String reason, Reason reasonCode) { String reason, Reason reasonCode) {
Map <DatanodeDescriptor, Reason> nodes = corruptReplicasMap.get(blk); Map <DatanodeDescriptor, Reason> nodes = corruptReplicasMap.get(blk);
if (nodes == null) { if (nodes == null) {
@ -127,7 +115,6 @@ public class CorruptReplicasMap{
boolean removeFromCorruptReplicasMap(Block blk, DatanodeDescriptor datanode, boolean removeFromCorruptReplicasMap(Block blk, DatanodeDescriptor datanode,
Reason reason) { Reason reason) {
Map <DatanodeDescriptor, Reason> datanodes = corruptReplicasMap.get(blk); Map <DatanodeDescriptor, Reason> datanodes = corruptReplicasMap.get(blk);
boolean removed = false;
if (datanodes==null) if (datanodes==null)
return false; return false;
@ -174,12 +161,12 @@ public class CorruptReplicasMap{
return ((nodes != null) && (nodes.contains(node))); return ((nodes != null) && (nodes.contains(node)));
} }
public int numCorruptReplicas(Block blk) { int numCorruptReplicas(Block blk) {
Collection<DatanodeDescriptor> nodes = getNodes(blk); Collection<DatanodeDescriptor> nodes = getNodes(blk);
return (nodes == null) ? 0 : nodes.size(); return (nodes == null) ? 0 : nodes.size();
} }
public int size() { int size() {
return corruptReplicasMap.size(); return corruptReplicasMap.size();
} }

View File

@ -234,18 +234,6 @@ public class DatanodeDescriptor extends DatanodeInfo {
updateHeartbeat(StorageReport.EMPTY_ARRAY, 0L, 0L, 0, 0); updateHeartbeat(StorageReport.EMPTY_ARRAY, 0L, 0L, 0, 0);
} }
/**
* Add data-node to the block. Add block to the head of the list of blocks
* belonging to the data-node.
*/
public boolean addBlock(String storageID, BlockInfo b) {
DatanodeStorageInfo s = getStorageInfo(storageID);
if (s != null) {
return s.addBlock(b);
}
return false;
}
@VisibleForTesting @VisibleForTesting
public DatanodeStorageInfo getStorageInfo(String storageID) { public DatanodeStorageInfo getStorageInfo(String storageID) {
synchronized (storageMap) { synchronized (storageMap) {
@ -284,14 +272,11 @@ public class DatanodeDescriptor extends DatanodeInfo {
* data-node from the block. * data-node from the block.
*/ */
boolean removeBlock(BlockInfo b) { boolean removeBlock(BlockInfo b) {
int index = b.findStorageInfo(this); final DatanodeStorageInfo s = b.findStorageInfo(this);
// if block exists on this datanode // if block exists on this datanode
if (index >= 0) {
DatanodeStorageInfo s = b.getStorageInfo(index);
if (s != null) { if (s != null) {
return s.removeBlock(b); return s.removeBlock(b);
} }
}
return false; return false;
} }
@ -307,24 +292,6 @@ public class DatanodeDescriptor extends DatanodeInfo {
return false; return false;
} }
/**
* Replace specified old block with a new one in the DataNodeDescriptor.
*
* @param oldBlock - block to be replaced
* @param newBlock - a replacement block
* @return the new block
*/
public BlockInfo replaceBlock(BlockInfo oldBlock, BlockInfo newBlock) {
int index = oldBlock.findStorageInfo(this);
DatanodeStorageInfo s = oldBlock.getStorageInfo(index);
boolean done = s.removeBlock(oldBlock);
assert done : "Old block should belong to the data-node when replacing";
done = s.addBlock(newBlock);
assert done : "New block should not belong to the data-node when replacing";
return newBlock;
}
public void resetBlocks() { public void resetBlocks() {
setCapacity(0); setCapacity(0);
setRemaining(0); setRemaining(0);

View File

@ -23,9 +23,9 @@ import java.util.Queue;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
/** /**
* In the Standby Node, we can receive messages about blocks * In the Standby Node, we can receive messages about blocks
@ -123,7 +123,7 @@ class PendingDataNodeMessages {
return queue; return queue;
} }
public int count() { int count() {
return count ; return count ;
} }
@ -140,7 +140,7 @@ class PendingDataNodeMessages {
return sb.toString(); return sb.toString();
} }
public Iterable<ReportedBlockInfo> takeAll() { Iterable<ReportedBlockInfo> takeAll() {
List<ReportedBlockInfo> rbis = Lists.newArrayListWithCapacity( List<ReportedBlockInfo> rbis = Lists.newArrayListWithCapacity(
count); count);
for (Queue<ReportedBlockInfo> q : queueByBlockId.values()) { for (Queue<ReportedBlockInfo> q : queueByBlockId.values()) {

View File

@ -33,6 +33,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.blockmanagement.CorruptReplicasMap.Reason;
import org.junit.Test; import org.junit.Test;
@ -89,14 +90,14 @@ public class TestCorruptReplicaInfo {
DatanodeDescriptor dn1 = DFSTestUtil.getLocalDatanodeDescriptor(); DatanodeDescriptor dn1 = DFSTestUtil.getLocalDatanodeDescriptor();
DatanodeDescriptor dn2 = DFSTestUtil.getLocalDatanodeDescriptor(); DatanodeDescriptor dn2 = DFSTestUtil.getLocalDatanodeDescriptor();
crm.addToCorruptReplicasMap(getBlock(0), dn1, "TEST"); addToCorruptReplicasMap(crm, getBlock(0), dn1);
assertEquals("Number of corrupt blocks not returning correctly", assertEquals("Number of corrupt blocks not returning correctly",
1, crm.size()); 1, crm.size());
crm.addToCorruptReplicasMap(getBlock(1), dn1, "TEST"); addToCorruptReplicasMap(crm, getBlock(1), dn1);
assertEquals("Number of corrupt blocks not returning correctly", assertEquals("Number of corrupt blocks not returning correctly",
2, crm.size()); 2, crm.size());
crm.addToCorruptReplicasMap(getBlock(1), dn2, "TEST"); addToCorruptReplicasMap(crm, getBlock(1), dn2);
assertEquals("Number of corrupt blocks not returning correctly", assertEquals("Number of corrupt blocks not returning correctly",
2, crm.size()); 2, crm.size());
@ -109,7 +110,7 @@ public class TestCorruptReplicaInfo {
0, crm.size()); 0, crm.size());
for (Long block_id: block_ids) { for (Long block_id: block_ids) {
crm.addToCorruptReplicasMap(getBlock(block_id), dn1, "TEST"); addToCorruptReplicasMap(crm, getBlock(block_id), dn1);
} }
assertEquals("Number of corrupt blocks not returning correctly", assertEquals("Number of corrupt blocks not returning correctly",
@ -127,4 +128,9 @@ public class TestCorruptReplicaInfo {
crm.getCorruptReplicaBlockIds(10, 7L))); crm.getCorruptReplicaBlockIds(10, 7L)));
} }
private static void addToCorruptReplicasMap(CorruptReplicasMap crm,
Block blk, DatanodeDescriptor dn) {
crm.addToCorruptReplicasMap(blk, dn, "TEST", Reason.NONE);
}
} }