HDFS-3418. svn merge -c 1338830 from trunk
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1338833 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
9a8cf2b5a9
commit
b24cc33566
|
@ -334,6 +334,9 @@ Release 2.0.0 - UNRELEASED
|
||||||
|
|
||||||
HDFS-3419. Cleanup LocatedBlock. (eli)
|
HDFS-3419. Cleanup LocatedBlock. (eli)
|
||||||
|
|
||||||
|
HDFS-3418. Rename BlockWithLocationsProto datanodeIDs field to storageIDs.
|
||||||
|
(eli)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
HDFS-2477. Optimize computing the diff between a block report and the
|
HDFS-2477. Optimize computing the diff between a block report and the
|
||||||
|
|
|
@ -254,11 +254,11 @@ public class PBHelper {
|
||||||
public static BlockWithLocationsProto convert(BlockWithLocations blk) {
|
public static BlockWithLocationsProto convert(BlockWithLocations blk) {
|
||||||
return BlockWithLocationsProto.newBuilder()
|
return BlockWithLocationsProto.newBuilder()
|
||||||
.setBlock(convert(blk.getBlock()))
|
.setBlock(convert(blk.getBlock()))
|
||||||
.addAllDatanodeIDs(Arrays.asList(blk.getDatanodes())).build();
|
.addAllStorageIDs(Arrays.asList(blk.getStorageIDs())).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static BlockWithLocations convert(BlockWithLocationsProto b) {
|
public static BlockWithLocations convert(BlockWithLocationsProto b) {
|
||||||
return new BlockWithLocations(convert(b.getBlock()), b.getDatanodeIDsList()
|
return new BlockWithLocations(convert(b.getBlock()), b.getStorageIDsList()
|
||||||
.toArray(new String[0]));
|
.toArray(new String[0]));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -205,6 +205,7 @@ public class Balancer {
|
||||||
private Map<Block, BalancerBlock> globalBlockList
|
private Map<Block, BalancerBlock> globalBlockList
|
||||||
= new HashMap<Block, BalancerBlock>();
|
= new HashMap<Block, BalancerBlock>();
|
||||||
private MovedBlocks movedBlocks = new MovedBlocks();
|
private MovedBlocks movedBlocks = new MovedBlocks();
|
||||||
|
// Map storage IDs to BalancerDatanodes
|
||||||
private Map<String, BalancerDatanode> datanodes
|
private Map<String, BalancerDatanode> datanodes
|
||||||
= new HashMap<String, BalancerDatanode>();
|
= new HashMap<String, BalancerDatanode>();
|
||||||
|
|
||||||
|
@ -621,8 +622,8 @@ public class Balancer {
|
||||||
|
|
||||||
synchronized (block) {
|
synchronized (block) {
|
||||||
// update locations
|
// update locations
|
||||||
for ( String location : blk.getDatanodes() ) {
|
for ( String storageID : blk.getStorageIDs() ) {
|
||||||
BalancerDatanode datanode = datanodes.get(location);
|
BalancerDatanode datanode = datanodes.get(storageID);
|
||||||
if (datanode != null) { // not an unknown datanode
|
if (datanode != null) { // not an unknown datanode
|
||||||
block.addLocation(datanode);
|
block.addLocation(datanode);
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,9 +21,8 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.hdfs.protocol.Block;
|
import org.apache.hadoop.hdfs.protocol.Block;
|
||||||
|
|
||||||
/** A class to implement an array of BlockLocations
|
/**
|
||||||
* It provide efficient customized serialization/deserialization methods
|
* Maintains an array of blocks and their corresponding storage IDs.
|
||||||
* in stead of using the default array (de)serialization provided by RPC
|
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
@InterfaceStability.Evolving
|
@InterfaceStability.Evolving
|
||||||
|
@ -36,12 +35,12 @@ public class BlocksWithLocations {
|
||||||
@InterfaceStability.Evolving
|
@InterfaceStability.Evolving
|
||||||
public static class BlockWithLocations {
|
public static class BlockWithLocations {
|
||||||
Block block;
|
Block block;
|
||||||
String datanodeIDs[];
|
String storageIDs[];
|
||||||
|
|
||||||
/** constructor */
|
/** constructor */
|
||||||
public BlockWithLocations(Block b, String[] datanodes) {
|
public BlockWithLocations(Block block, String[] storageIDs) {
|
||||||
block = b;
|
this.block = block;
|
||||||
datanodeIDs = datanodes;
|
this.storageIDs = storageIDs;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** get the block */
|
/** get the block */
|
||||||
|
@ -50,15 +49,15 @@ public class BlocksWithLocations {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** get the block's locations */
|
/** get the block's locations */
|
||||||
public String[] getDatanodes() {
|
public String[] getStorageIDs() {
|
||||||
return datanodeIDs;
|
return storageIDs;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private BlockWithLocations[] blocks;
|
private BlockWithLocations[] blocks;
|
||||||
|
|
||||||
/** Constructor with one parameter */
|
/** Constructor with one parameter */
|
||||||
public BlocksWithLocations( BlockWithLocations[] blocks ) {
|
public BlocksWithLocations(BlockWithLocations[] blocks) {
|
||||||
this.blocks = blocks;
|
this.blocks = blocks;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -274,7 +274,7 @@ message BlockProto {
|
||||||
*/
|
*/
|
||||||
message BlockWithLocationsProto {
|
message BlockWithLocationsProto {
|
||||||
required BlockProto block = 1; // Block
|
required BlockProto block = 1; // Block
|
||||||
repeated string datanodeIDs = 2; // Datanodes with replicas of the block
|
repeated string storageIDs = 2; // Datanodes with replicas of the block
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -101,18 +101,18 @@ public class TestGetBlocks extends TestCase {
|
||||||
BlockWithLocations[] locs;
|
BlockWithLocations[] locs;
|
||||||
locs = namenode.getBlocks(dataNodes[0], fileLen).getBlocks();
|
locs = namenode.getBlocks(dataNodes[0], fileLen).getBlocks();
|
||||||
assertEquals(locs.length, 2);
|
assertEquals(locs.length, 2);
|
||||||
assertEquals(locs[0].getDatanodes().length, 2);
|
assertEquals(locs[0].getStorageIDs().length, 2);
|
||||||
assertEquals(locs[1].getDatanodes().length, 2);
|
assertEquals(locs[1].getStorageIDs().length, 2);
|
||||||
|
|
||||||
// get blocks of size BlockSize from dataNodes[0]
|
// get blocks of size BlockSize from dataNodes[0]
|
||||||
locs = namenode.getBlocks(dataNodes[0], DEFAULT_BLOCK_SIZE).getBlocks();
|
locs = namenode.getBlocks(dataNodes[0], DEFAULT_BLOCK_SIZE).getBlocks();
|
||||||
assertEquals(locs.length, 1);
|
assertEquals(locs.length, 1);
|
||||||
assertEquals(locs[0].getDatanodes().length, 2);
|
assertEquals(locs[0].getStorageIDs().length, 2);
|
||||||
|
|
||||||
// get blocks of size 1 from dataNodes[0]
|
// get blocks of size 1 from dataNodes[0]
|
||||||
locs = namenode.getBlocks(dataNodes[0], 1).getBlocks();
|
locs = namenode.getBlocks(dataNodes[0], 1).getBlocks();
|
||||||
assertEquals(locs.length, 1);
|
assertEquals(locs.length, 1);
|
||||||
assertEquals(locs[0].getDatanodes().length, 2);
|
assertEquals(locs[0].getStorageIDs().length, 2);
|
||||||
|
|
||||||
// get blocks of size 0 from dataNodes[0]
|
// get blocks of size 0 from dataNodes[0]
|
||||||
getBlocksWithException(namenode, dataNodes[0], 0);
|
getBlocksWithException(namenode, dataNodes[0], 0);
|
||||||
|
|
|
@ -161,7 +161,7 @@ public class TestPBHelper {
|
||||||
|
|
||||||
private void compare(BlockWithLocations locs1, BlockWithLocations locs2) {
|
private void compare(BlockWithLocations locs1, BlockWithLocations locs2) {
|
||||||
assertEquals(locs1.getBlock(), locs2.getBlock());
|
assertEquals(locs1.getBlock(), locs2.getBlock());
|
||||||
assertTrue(Arrays.equals(locs1.getDatanodes(), locs2.getDatanodes()));
|
assertTrue(Arrays.equals(locs1.getStorageIDs(), locs2.getStorageIDs()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
Loading…
Reference in New Issue