HDFS-2857. Cleanup BlockInfo class. Contributed by Suresh Srinivas.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1238747 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2012-01-31 18:59:58 +00:00
parent 05efddf28e
commit 319021d6fb
2 changed files with 40 additions and 41 deletions

View File

@ -48,7 +48,7 @@ Trunk (unreleased changes)
IMPROVEMENTS IMPROVEMENTS
HADOOP-7524 Change RPC to allow multiple protocols including multuple HADOOP-7524 Change RPC to allow multiple protocols including multuple
versions of the same protocol (sanjay Radia) versions of the same protocol (Sanjay Radia)
HDFS-1620. Rename HdfsConstants -> HdfsServerConstants, FSConstants -> HDFS-1620. Rename HdfsConstants -> HdfsServerConstants, FSConstants ->
HdfsConstants. (Harsh J Chouraria via atm) HdfsConstants. (Harsh J Chouraria via atm)
@ -100,9 +100,9 @@ Trunk (unreleased changes)
HDFS-2651 ClientNameNodeProtocol Translators for Protocol Buffers (sanjay) HDFS-2651 ClientNameNodeProtocol Translators for Protocol Buffers (sanjay)
HDFS-2650. Replace @inheritDoc with @Override. (Hari Mankude via suresh). HDFS-2650. Replace @inheritDoc with @Override. (Hari Mankude via suresh)
HDFS-2669 Enable protobuf rpc for ClientNamenodeProtocol HDFS-2669. Enable protobuf rpc for ClientNamenodeProtocol. (Sanjay Radia)
HDFS-2801. Provide a method in client side translators to check for a HDFS-2801. Provide a method in client side translators to check for a
methods supported in underlying protocol. (jitendra) methods supported in underlying protocol. (jitendra)
@ -284,6 +284,8 @@ Release 0.23.1 - UNRELEASED
HDFS-2397. Undeprecate SecondaryNameNode (eli) HDFS-2397. Undeprecate SecondaryNameNode (eli)
HDFS-2857. Cleanup BlockInfo class. (suresh)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-2130. Switch default checksum to CRC32C. (todd) HDFS-2130. Switch default checksum to CRC32C. (todd)

View File

@ -17,27 +17,38 @@
*/ */
package org.apache.hadoop.hdfs.server.blockmanagement; package org.apache.hadoop.hdfs.server.blockmanagement;
import java.util.LinkedList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.BlockUCState;
import org.apache.hadoop.hdfs.server.namenode.INodeFile; import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.util.LightWeightGSet; import org.apache.hadoop.hdfs.util.LightWeightGSet;
/** /**
* Internal class for block metadata. * BlockInfo class maintains for a given block
* the {@link INodeFile} it is part of and datanodes where the replicas of
* the block are stored.
*/ */
public class BlockInfo extends Block implements LightWeightGSet.LinkedElement { @InterfaceAudience.Private
public class BlockInfo extends Block implements
LightWeightGSet.LinkedElement {
private INodeFile inode; private INodeFile inode;
/** For implementing {@link LightWeightGSet.LinkedElement} interface */ /** For implementing {@link LightWeightGSet.LinkedElement} interface */
private LightWeightGSet.LinkedElement nextLinkedElement; private LightWeightGSet.LinkedElement nextLinkedElement;
/** /**
* This array contains triplets of references. * This array contains triplets of references. For each i-th datanode the
* For each i-th datanode the block belongs to * block belongs to triplets[3*i] is the reference to the DatanodeDescriptor
* triplets[3*i] is the reference to the DatanodeDescriptor * and triplets[3*i+1] and triplets[3*i+2] are references to the previous and
* and triplets[3*i+1] and triplets[3*i+2] are references * the next blocks, respectively, in the list of blocks belonging to this
* to the previous and the next blocks, respectively, in the * data-node.
* list of blocks belonging to this data-node. *
* Using previous and next in Object triplets is done instead of a
* {@link LinkedList} list to efficiently use memory. With LinkedList the cost
* per replica is 42 bytes (LinkedList#Entry object per replica) versus 16
* bytes using the triplets.
*/ */
private Object[] triplets; private Object[] triplets;
@ -84,7 +95,7 @@ public class BlockInfo extends Block implements LightWeightGSet.LinkedElement {
return node; return node;
} }
BlockInfo getPrevious(int index) { private BlockInfo getPrevious(int index) {
assert this.triplets != null : "BlockInfo is not initialized"; assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+1 < triplets.length : "Index is out of bound"; assert index >= 0 && index*3+1 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo)triplets[index*3+1]; BlockInfo info = (BlockInfo)triplets[index*3+1];
@ -104,22 +115,14 @@ public class BlockInfo extends Block implements LightWeightGSet.LinkedElement {
return info; return info;
} }
void setDatanode(int index, DatanodeDescriptor node) { private void setDatanode(int index, DatanodeDescriptor node, BlockInfo previous,
BlockInfo next) {
assert this.triplets != null : "BlockInfo is not initialized"; assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3 < triplets.length : "Index is out of bound"; int i = index * 3;
triplets[index*3] = node; assert index >= 0 && i+2 < triplets.length : "Index is out of bound";
} triplets[i] = node;
triplets[i+1] = previous;
void setPrevious(int index, BlockInfo to) { triplets[i+2] = next;
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+1 < triplets.length : "Index is out of bound";
triplets[index*3+1] = to;
}
void setNext(int index, BlockInfo to) {
assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+2 < triplets.length : "Index is out of bound";
triplets[index*3+2] = to;
} }
/** /**
@ -130,7 +133,7 @@ public class BlockInfo extends Block implements LightWeightGSet.LinkedElement {
* @param to - block to be set to previous on the list of blocks * @param to - block to be set to previous on the list of blocks
* @return current previous block on the list of blocks * @return current previous block on the list of blocks
*/ */
BlockInfo getSetPrevious(int index, BlockInfo to) { private BlockInfo setPrevious(int index, BlockInfo to) {
assert this.triplets != null : "BlockInfo is not initialized"; assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+1 < triplets.length : "Index is out of bound"; assert index >= 0 && index*3+1 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo)triplets[index*3+1]; BlockInfo info = (BlockInfo)triplets[index*3+1];
@ -146,7 +149,7 @@ public class BlockInfo extends Block implements LightWeightGSet.LinkedElement {
* @param to - block to be set to next on the list of blocks * @param to - block to be set to next on the list of blocks
* * @return current next block on the list of blocks * * @return current next block on the list of blocks
*/ */
BlockInfo getSetNext(int index, BlockInfo to) { private BlockInfo setNext(int index, BlockInfo to) {
assert this.triplets != null : "BlockInfo is not initialized"; assert this.triplets != null : "BlockInfo is not initialized";
assert index >= 0 && index*3+2 < triplets.length : "Index is out of bound"; assert index >= 0 && index*3+2 < triplets.length : "Index is out of bound";
BlockInfo info = (BlockInfo)triplets[index*3+2]; BlockInfo info = (BlockInfo)triplets[index*3+2];
@ -198,9 +201,7 @@ public class BlockInfo extends Block implements LightWeightGSet.LinkedElement {
return false; return false;
// find the last null node // find the last null node
int lastNode = ensureCapacity(1); int lastNode = ensureCapacity(1);
setDatanode(lastNode, node); setDatanode(lastNode, node, null, null);
setNext(lastNode, null);
setPrevious(lastNode, null);
return true; return true;
} }
@ -216,13 +217,10 @@ public class BlockInfo extends Block implements LightWeightGSet.LinkedElement {
// find the last not null node // find the last not null node
int lastNode = numNodes()-1; int lastNode = numNodes()-1;
// replace current node triplet by the lastNode one // replace current node triplet by the lastNode one
setDatanode(dnIndex, getDatanode(lastNode)); setDatanode(dnIndex, getDatanode(lastNode), getPrevious(lastNode),
setNext(dnIndex, getNext(lastNode)); getNext(lastNode));
setPrevious(dnIndex, getPrevious(lastNode));
// set the last triplet to null // set the last triplet to null
setDatanode(lastNode, null); setDatanode(lastNode, null, null, null);
setNext(lastNode, null);
setPrevious(lastNode, null);
return true; return true;
} }
@ -300,8 +298,8 @@ public class BlockInfo extends Block implements LightWeightGSet.LinkedElement {
if (head == this) { if (head == this) {
return this; return this;
} }
BlockInfo next = this.getSetNext(curIndex, head); BlockInfo next = this.setNext(curIndex, head);
BlockInfo prev = this.getSetPrevious(curIndex, null); BlockInfo prev = this.setPrevious(curIndex, null);
head.setPrevious(headIndex, this); head.setPrevious(headIndex, this);
prev.setNext(prev.findDatanode(dn), next); prev.setNext(prev.findDatanode(dn), next);
@ -331,7 +329,6 @@ public class BlockInfo extends Block implements LightWeightGSet.LinkedElement {
/** /**
* Convert a complete block to an under construction block. * Convert a complete block to an under construction block.
*
* @return BlockInfoUnderConstruction - an under construction block. * @return BlockInfoUnderConstruction - an under construction block.
*/ */
public BlockInfoUnderConstruction convertToBlockUnderConstruction( public BlockInfoUnderConstruction convertToBlockUnderConstruction(