MAPREDUCE-2588. Change raid to the new DataTransferProtocol API.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1135209 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2011-06-13 18:16:45 +00:00
parent adfa76a8a1
commit 87fee9cb2a
3 changed files with 5 additions and 4 deletions

View File

@ -290,6 +290,8 @@ Trunk (unreleased changes)
MAPREDUCE-2581. Spelling errors in log messages. (Tim Sell via eli) MAPREDUCE-2581. Spelling errors in log messages. (Tim Sell via eli)
MAPREDUCE-2588. Change raid to the new DataTransferProtocol API. (szetszwo)
Release 0.22.0 - Unreleased Release 0.22.0 - Unreleased
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -32,7 +32,7 @@ import org.apache.commons.logging.Log;
import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.protocol.FSConstants;
import org.apache.hadoop.hdfs.protocol.DataTransferProtocol.PacketHeader; import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.SocketOutputStream; import org.apache.hadoop.net.SocketOutputStream;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;

View File

@ -43,7 +43,7 @@ import java.lang.reflect.InvocationTargetException;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DataTransferProtocol; import org.apache.hadoop.hdfs.protocol.datatransfer.*;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.protocol.FSConstants;
@ -780,8 +780,7 @@ public abstract class BlockFixer extends Configured implements Runnable {
}); });
DatanodeInfo[] nodes = new DatanodeInfo[]{datanode}; DatanodeInfo[] nodes = new DatanodeInfo[]{datanode};
DataTransferProtocol.Sender.opWriteBlock(out, block.getBlock(), 1, Sender.opWriteBlock(out, block.getBlock(), 1,
DataTransferProtocol.
BlockConstructionStage. BlockConstructionStage.
PIPELINE_SETUP_CREATE, PIPELINE_SETUP_CREATE,
0, blockSize, 0, "", null, 0, blockSize, 0, "", null,