HDFS-2968. Protocol translator for BlockRecoveryCommand broken when multiple blocks need recovery. Contributed by Todd Lipcon.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1245832 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Todd Lipcon 2012-02-18 01:18:01 +00:00
parent a8e7f745cd
commit 9ae4fac8dd
4 changed files with 53 additions and 4 deletions

View File

@ -213,6 +213,9 @@ Trunk (unreleased changes)
dfs.client.block.write.replace-datanode-on-failure.enable to be mistakenly
disabled. (atm)
HDFS-2968. Protocol translator for BlockRecoveryCommand broken when
multiple blocks need recovery. (todd)
Release 0.23.2 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -767,8 +767,9 @@ public class PBHelper {
List<RecoveringBlockProto> list = recoveryCmd.getBlocksList();
List<RecoveringBlock> recoveringBlocks = new ArrayList<RecoveringBlock>(
list.size());
for (int i = 0; i < list.size(); i++) {
recoveringBlocks.add(PBHelper.convert(list.get(0)));
for (RecoveringBlockProto rbp : list) {
recoveringBlocks.add(PBHelper.convert(rbp));
}
return new BlockRecoveryCommand(recoveringBlocks);
}

View File

@ -32,6 +32,8 @@ import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories;
import org.apache.hadoop.io.WritableFactory;
import com.google.common.base.Joiner;
/**
* BlockRecoveryCommand is an instruction to a data-node to recover
* the specified blocks.
@ -139,6 +141,15 @@ public class BlockRecoveryCommand extends DatanodeCommand {
recoveringBlocks.add(block);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("BlockRecoveryCommand(\n ");
Joiner.on("\n ").appendTo(sb, recoveringBlocks);
sb.append("\n)");
return sb.toString();
}
///////////////////////////////////////////
// Writable
///////////////////////////////////////////

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockCommandProto;
import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockRecoveryCommandProto;
import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeRegistrationProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
@ -58,6 +59,7 @@ import org.apache.hadoop.hdfs.server.protocol.BlockCommand;
import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations;
import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations.BlockWithLocations;
import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand;
import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol;
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration;
@ -68,6 +70,10 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.Token;
import org.junit.Test;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
/**
* Tests for {@link PBHelper}
*/
@ -265,9 +271,12 @@ public class TestPBHelper {
compare(logs.get(i), logs1.get(i));
}
}
public ExtendedBlock getExtendedBlock() {
return new ExtendedBlock("bpid", 1, 100, 2);
return getExtendedBlock(1);
}
public ExtendedBlock getExtendedBlock(long blkid) {
return new ExtendedBlock("bpid", blkid, 100, 2);
}
public DatanodeInfo getDNInfo() {
@ -318,6 +327,31 @@ public class TestPBHelper {
}
}
@Test
public void testConvertBlockRecoveryCommand() {
DatanodeInfo[] dnInfo = new DatanodeInfo[] { getDNInfo(), getDNInfo() };
List<RecoveringBlock> blks = ImmutableList.of(
new RecoveringBlock(getExtendedBlock(1), dnInfo, 3),
new RecoveringBlock(getExtendedBlock(2), dnInfo, 3)
);
BlockRecoveryCommand cmd = new BlockRecoveryCommand(blks);
BlockRecoveryCommandProto proto = PBHelper.convert(cmd);
assertEquals(1, proto.getBlocks(0).getBlock().getB().getBlockId());
assertEquals(2, proto.getBlocks(1).getBlock().getB().getBlockId());
BlockRecoveryCommand cmd2 = PBHelper.convert(proto);
List<RecoveringBlock> cmd2Blks = Lists.newArrayList(
cmd2.getRecoveringBlocks());
assertEquals(blks.get(0).getBlock(), cmd2Blks.get(0).getBlock());
assertEquals(blks.get(1).getBlock(), cmd2Blks.get(1).getBlock());
assertEquals(Joiner.on(",").join(blks), Joiner.on(",").join(cmd2Blks));
assertEquals(cmd.toString(), cmd2.toString());
}
@Test
public void testConvertText() {
Text t = new Text("abc".getBytes());