svn merge -c 1311959 from trunk for MAPREDUCE-4057.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1311960 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2012-04-10 19:50:09 +00:00
parent 07321a677c
commit b46380e463
4 changed files with 15 additions and 9 deletions

View File

@ -125,6 +125,9 @@ Release 2.0.0 - UNRELEASED
MAPREDUCE-3869. Fix classpath for DistributedShell application. (Devaraj K
via sseth)
MAPREDUCE-4057. Update RAID for the HA and fsdataset changes. (Devaraj K
via szetszwo)
Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.net.SocketOutputStream;
import org.apache.hadoop.util.DataChecksum;
@ -441,9 +442,9 @@ public class RaidBlockSender implements java.io.Closeable {
private static class BlockInputStreamFactory implements InputStreamFactory {
private final ExtendedBlock block;
private final FSDatasetInterface data;
private final FsDatasetSpi<?> data;
private BlockInputStreamFactory(ExtendedBlock block, FSDatasetInterface data) {
private BlockInputStreamFactory(ExtendedBlock block, FsDatasetSpi<?> data) {
this.block = block;
this.data = data;
}

View File

@ -22,6 +22,7 @@ import java.io.*;
import org.apache.hadoop.classification.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.hdfs.protocol.*;
import org.apache.hadoop.ipc.StandbyException;
import org.apache.hadoop.security.AccessControlException;
/** Utilities used by RAID for accessing NameNode. */
@ -35,10 +36,11 @@ public class NameNodeRaidUtil {
return dir.getFileInfo(src, resolveLink);
}
/** Accessing FSNamesystem.getFileInfo(..) */
/** Accessing FSNamesystem.getFileInfo(..)
* @throws StandbyException */
public static HdfsFileStatus getFileInfo(final FSNamesystem namesystem,
final String src, final boolean resolveLink
) throws AccessControlException, UnresolvedLinkException {
) throws AccessControlException, UnresolvedLinkException, StandbyException {
return namesystem.getFileInfo(src, resolveLink);
}

View File

@ -622,8 +622,8 @@ public abstract class BlockFixer extends Configured implements Runnable {
int idx = rand.nextInt(live.length);
chosen = live[idx];
for (DatanodeInfo avoid: locationsToAvoid) {
if (chosen.name.equals(avoid.name)) {
LOG.info("Avoiding " + avoid.name);
if (chosen.getName().equals(avoid.getName())) {
LOG.info("Avoiding " + avoid.getName());
chosen = null;
break;
}
@ -632,7 +632,7 @@ public abstract class BlockFixer extends Configured implements Runnable {
if (chosen == null) {
throw new IOException("Could not choose datanode");
}
LOG.info("Choosing datanode " + chosen.name);
LOG.info("Choosing datanode " + chosen.getName());
return chosen;
}
@ -736,7 +736,7 @@ public abstract class BlockFixer extends Configured implements Runnable {
DataInputStream metadataIn,
LocatedBlock block, long blockSize)
throws IOException {
InetSocketAddress target = NetUtils.createSocketAddr(datanode.name);
InetSocketAddress target = NetUtils.createSocketAddr(datanode.getName());
Socket sock = SocketChannel.open().socket();
int readTimeout =
@ -785,7 +785,7 @@ public abstract class BlockFixer extends Configured implements Runnable {
1, 0L, blockSize, 0L, DataChecksum.newDataChecksum(metadataIn));
blockSender.sendBlock(out, baseStream);
LOG.info("Sent block " + block.getBlock() + " to " + datanode.name);
LOG.info("Sent block " + block.getBlock() + " to " + datanode.getName());
} finally {
out.close();
}