svn merge -c 1311959 from trunk for MAPREDUCE-4057.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1311960 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2012-04-10 19:50:09 +00:00
parent 07321a677c
commit b46380e463
4 changed files with 15 additions and 9 deletions

View File

@ -125,6 +125,9 @@ Release 2.0.0 - UNRELEASED
MAPREDUCE-3869. Fix classpath for DistributedShell application. (Devaraj K MAPREDUCE-3869. Fix classpath for DistributedShell application. (Devaraj K
via sseth) via sseth)
MAPREDUCE-4057. Update RAID for the HA and fsdataset changes. (Devaraj K
via szetszwo)
Release 0.23.3 - UNRELEASED Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -33,6 +33,7 @@
import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader; import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.SocketOutputStream; import org.apache.hadoop.net.SocketOutputStream;
import org.apache.hadoop.util.DataChecksum; import org.apache.hadoop.util.DataChecksum;
@ -441,9 +442,9 @@ public static interface InputStreamFactory {
private static class BlockInputStreamFactory implements InputStreamFactory { private static class BlockInputStreamFactory implements InputStreamFactory {
private final ExtendedBlock block; private final ExtendedBlock block;
private final FSDatasetInterface data; private final FsDatasetSpi<?> data;
private BlockInputStreamFactory(ExtendedBlock block, FSDatasetInterface data) { private BlockInputStreamFactory(ExtendedBlock block, FsDatasetSpi<?> data) {
this.block = block; this.block = block;
this.data = data; this.data = data;
} }

View File

@ -22,6 +22,7 @@
import org.apache.hadoop.classification.*; import org.apache.hadoop.classification.*;
import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.*;
import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.protocol.*;
import org.apache.hadoop.ipc.StandbyException;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
/** Utilities used by RAID for accessing NameNode. */ /** Utilities used by RAID for accessing NameNode. */
@ -35,10 +36,11 @@ public static HdfsFileStatus getFileInfo(final FSDirectory dir,
return dir.getFileInfo(src, resolveLink); return dir.getFileInfo(src, resolveLink);
} }
/** Accessing FSNamesystem.getFileInfo(..) */ /** Accessing FSNamesystem.getFileInfo(..)
* @throws StandbyException */
public static HdfsFileStatus getFileInfo(final FSNamesystem namesystem, public static HdfsFileStatus getFileInfo(final FSNamesystem namesystem,
final String src, final boolean resolveLink final String src, final boolean resolveLink
) throws AccessControlException, UnresolvedLinkException { ) throws AccessControlException, UnresolvedLinkException, StandbyException {
return namesystem.getFileInfo(src, resolveLink); return namesystem.getFileInfo(src, resolveLink);
} }

View File

@ -622,8 +622,8 @@ private DatanodeInfo chooseDatanode(DatanodeInfo[] locationsToAvoid)
int idx = rand.nextInt(live.length); int idx = rand.nextInt(live.length);
chosen = live[idx]; chosen = live[idx];
for (DatanodeInfo avoid: locationsToAvoid) { for (DatanodeInfo avoid: locationsToAvoid) {
if (chosen.name.equals(avoid.name)) { if (chosen.getName().equals(avoid.getName())) {
LOG.info("Avoiding " + avoid.name); LOG.info("Avoiding " + avoid.getName());
chosen = null; chosen = null;
break; break;
} }
@ -632,7 +632,7 @@ private DatanodeInfo chooseDatanode(DatanodeInfo[] locationsToAvoid)
if (chosen == null) { if (chosen == null) {
throw new IOException("Could not choose datanode"); throw new IOException("Could not choose datanode");
} }
LOG.info("Choosing datanode " + chosen.name); LOG.info("Choosing datanode " + chosen.getName());
return chosen; return chosen;
} }
@ -736,7 +736,7 @@ private void sendFixedBlock(DatanodeInfo datanode,
DataInputStream metadataIn, DataInputStream metadataIn,
LocatedBlock block, long blockSize) LocatedBlock block, long blockSize)
throws IOException { throws IOException {
InetSocketAddress target = NetUtils.createSocketAddr(datanode.name); InetSocketAddress target = NetUtils.createSocketAddr(datanode.getName());
Socket sock = SocketChannel.open().socket(); Socket sock = SocketChannel.open().socket();
int readTimeout = int readTimeout =
@ -785,7 +785,7 @@ private void sendFixedBlock(DatanodeInfo datanode,
1, 0L, blockSize, 0L, DataChecksum.newDataChecksum(metadataIn)); 1, 0L, blockSize, 0L, DataChecksum.newDataChecksum(metadataIn));
blockSender.sendBlock(out, baseStream); blockSender.sendBlock(out, baseStream);
LOG.info("Sent block " + block.getBlock() + " to " + datanode.name); LOG.info("Sent block " + block.getBlock() + " to " + datanode.getName());
} finally { } finally {
out.close(); out.close();
} }