HDFS-2857. Merge r1238747 from trunk

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1492890 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2013-06-13 22:39:49 +00:00
parent 85543c3683
commit e277c2a521
2 changed files with 39 additions and 43 deletions

View File

@ -30,6 +30,8 @@ Release 2.1.0-beta - UNRELEASED
INCOMPATIBLE CHANGES
HDFS-4053. Increase the default block size. (eli)
HDFS-4305. Add a configurable limit on number of blocks per file, and min
block size. (Andrew Wang via atm)
@ -154,6 +156,10 @@ Release 2.1.0-beta - UNRELEASED
HDFS-4698. Provide client-side metrics for remote reads, local reads, and
short-circuit reads. (Colin Patrick McCabe via atm)
HDFS-3498. Support replica removal in BlockPlacementPolicy and make
BlockPlacementPolicyDefault extensible for reusing code in subclasses.
(Junping Du via szetszwo)
HDFS-4234. Use generic code for choosing datanode in Balancer. (szetszwo)
HDFS-4880. Print the image and edits file loaded by the namenode in the
@ -161,6 +167,8 @@ Release 2.1.0-beta - UNRELEASED
HDFS-2572. Remove unnecessary double-check in DN#getHostName. (harsh)
HDFS-2857. Cleanup BlockInfo class. (suresh)
OPTIMIZATIONS
BUG FIXES
@ -1110,14 +1118,8 @@ Release 2.0.3-alpha - 2013-02-06
HDFS-4456. Add concat to HttpFS and WebHDFS REST API docs. (plamenj2003 via tucu)
HDFS-4053. Increase the default block size. (eli)
HDFS-3131. Improve TestStorageRestore. (Brandon Li via atm)
HDFS-3498. Support replica removal in BlockPlacementPolicy and make
BlockPlacementPolicyDefault extensible for reusing code in subclasses.
(Junping Du via szetszwo)
OPTIMIZATIONS
HDFS-3429. DataNode reads checksums even if client does not need them (todd)

View File

@ -17,13 +17,17 @@
*/
package org.apache.hadoop.hdfs.server.blockmanagement;
import java.util.LinkedList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
import org.apache.hadoop.hdfs.util.LightWeightGSet;
/**
* Internal class for block metadata.
* BlockInfo class maintains for a given block
* the {@link INodeFile} it is part of and datanodes where the replicas of
* the block are stored.
* BlockInfo class maintains for a given block
* the {@link BlockCollection} it is part of and datanodes where the replicas of
* the block are stored.
@ -38,12 +42,16 @@ public class BlockInfo extends Block implements LightWeightGSet.LinkedElement {
private LightWeightGSet.LinkedElement nextLinkedElement;
/**
* This array contains triplets of references.
* For each i-th datanode the block belongs to
* triplets[3*i] is the reference to the DatanodeDescriptor
* and triplets[3*i+1] and triplets[3*i+2] are references
* to the previous and the next blocks, respectively, in the
* list of blocks belonging to this data-node.
* This array contains triplets of references. For each i-th datanode the
* block belongs to triplets[3*i] is the reference to the DatanodeDescriptor
* and triplets[3*i+1] and triplets[3*i+2] are references to the previous and
* the next blocks, respectively, in the list of blocks belonging to this
* data-node.
*
* Using previous and next in Object triplets is done instead of a
* {@link LinkedList} list to efficiently use memory. With LinkedList the cost
* per replica is 42 bytes (LinkedList#Entry object per replica) versus 16
* bytes using the triplets.
*/
private Object[] triplets;
@ -86,7 +94,7 @@ DatanodeDescriptor getDatanode(int index) {
return (DatanodeDescriptor)triplets[index*3];
}
BlockInfo getPrevious(int index) {
private BlockInfo getPrevious(int index) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+1 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo)triplets[index*3+1];
@ -106,22 +114,14 @@ BlockInfo getNext(int index) {
return info;
}
void setDatanode(int index, DatanodeDescriptor node) {
private void setDatanode(int index, DatanodeDescriptor node, BlockInfo previous,
BlockInfo next) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3 < triplets.length : "Index is out of bound";
triplets[index*3] = node;
}
void setPrevious(int index, BlockInfo to) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+1 < triplets.length : "Index is out of bound";
triplets[index*3+1] = to;
}
void setNext(int index, BlockInfo to) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+2 < triplets.length : "Index is out of bound";
triplets[index*3+2] = to;
int i = index * 3;
assert index >= 0 && i+2 < triplets.length : "Index is out of bound";
triplets[i] = node;
triplets[i+1] = previous;
triplets[i+2] = next;
}
/**
@ -132,7 +132,7 @@ void setNext(int index, BlockInfo to) {
* @param to - block to be set to previous on the list of blocks
* @return current previous block on the list of blocks
*/
BlockInfo getSetPrevious(int index, BlockInfo to) {
private BlockInfo setPrevious(int index, BlockInfo to) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+1 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo)triplets[index*3+1];
@ -148,7 +148,7 @@ BlockInfo getSetPrevious(int index, BlockInfo to) {
* @param to - block to be set to next on the list of blocks
* * @return current next block on the list of blocks
*/
BlockInfo getSetNext(int index, BlockInfo to) {
private BlockInfo setNext(int index, BlockInfo to) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+2 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo)triplets[index*3+2];
@ -200,9 +200,7 @@ public boolean addNode(DatanodeDescriptor node) {
return false;
// find the last null node
int lastNode = ensureCapacity(1);
setDatanode(lastNode, node);
setNext(lastNode, null);
setPrevious(lastNode, null);
setDatanode(lastNode, node, null, null);
return true;
}
@ -218,13 +216,10 @@ assert getPrevious(dnIndex) == null && getNext(dnIndex) == null :
// find the last not null node
int lastNode = numNodes()-1;
// replace current node triplet by the lastNode one
setDatanode(dnIndex, getDatanode(lastNode));
setNext(dnIndex, getNext(lastNode));
setPrevious(dnIndex, getPrevious(lastNode));
setDatanode(dnIndex, getDatanode(lastNode), getPrevious(lastNode),
getNext(lastNode));
// set the last triplet to null
setDatanode(lastNode, null);
setNext(lastNode, null);
setPrevious(lastNode, null);
setDatanode(lastNode, null, null, null);
return true;
}
@ -302,8 +297,8 @@ public BlockInfo moveBlockToHead(BlockInfo head, DatanodeDescriptor dn,
if (head == this) {
return this;
}
BlockInfo next = this.getSetNext(curIndex, head);
BlockInfo prev = this.getSetPrevious(curIndex, null);
BlockInfo next = this.setNext(curIndex, head);
BlockInfo prev = this.setPrevious(curIndex, null);
head.setPrevious(headIndex, this);
prev.setNext(prev.findDatanode(dn), next);
@ -333,7 +328,6 @@ public boolean isComplete() {
/**
* Convert a complete block to an under construction block.
*
* @return BlockInfoUnderConstruction - an under construction block.
*/
public BlockInfoUnderConstruction convertToBlockUnderConstruction(