From d9b740cb1413de7a6631275c2ecb19d11ef1f0c5 Mon Sep 17 00:00:00 2001 From: Tsz-wo Sze Date: Tue, 10 Apr 2012 19:46:44 +0000 Subject: [PATCH] MAPREDUCE-4057. Update RAID for the HA and fsdataset changes. Contributed by Devaraj K git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1311959 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-mapreduce-project/CHANGES.txt | 3 +++ .../hadoop/hdfs/server/datanode/RaidBlockSender.java | 5 +++-- .../hadoop/hdfs/server/namenode/NameNodeRaidUtil.java | 6 ++++-- .../src/java/org/apache/hadoop/raid/BlockFixer.java | 10 +++++----- 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 4b539aaf1c4..55778b5cf51 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -224,6 +224,9 @@ Release 2.0.0 - UNRELEASED MAPREDUCE-3869. Fix classpath for DistributedShell application. (Devaraj K via sseth) + MAPREDUCE-4057. Update RAID for the HA and fsdataset changes. (Devaraj K + via szetszwo) + Release 0.23.3 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java b/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java index c1fc998471d..a29a3ca1b12 100644 --- a/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java +++ b/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/datanode/RaidBlockSender.java @@ -33,6 +33,7 @@ import org.apache.hadoop.fs.ChecksumException; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.datatransfer.PacketHeader; +import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.net.SocketOutputStream; import org.apache.hadoop.util.DataChecksum; @@ -441,9 +442,9 @@ public class RaidBlockSender implements java.io.Closeable { private static class BlockInputStreamFactory implements InputStreamFactory { private final ExtendedBlock block; - private final FSDatasetInterface data; + private final FsDatasetSpi data; - private BlockInputStreamFactory(ExtendedBlock block, FSDatasetInterface data) { + private BlockInputStreamFactory(ExtendedBlock block, FsDatasetSpi data) { this.block = block; this.data = data; } diff --git a/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java b/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java index 6b2c32da9df..531a0f238e9 100644 --- a/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java +++ b/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRaidUtil.java @@ -22,6 +22,7 @@ import java.io.*; import org.apache.hadoop.classification.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.protocol.*; +import org.apache.hadoop.ipc.StandbyException; import org.apache.hadoop.security.AccessControlException; /** Utilities used by RAID for accessing NameNode. */ @@ -35,10 +36,11 @@ public class NameNodeRaidUtil { return dir.getFileInfo(src, resolveLink); } - /** Accessing FSNamesystem.getFileInfo(..) */ + /** Accessing FSNamesystem.getFileInfo(..) + * @throws StandbyException */ public static HdfsFileStatus getFileInfo(final FSNamesystem namesystem, final String src, final boolean resolveLink - ) throws AccessControlException, UnresolvedLinkException { + ) throws AccessControlException, UnresolvedLinkException, StandbyException { return namesystem.getFileInfo(src, resolveLink); } diff --git a/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java b/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java index dabb73564a3..6e1d7f79175 100644 --- a/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java +++ b/hadoop-mapreduce-project/src/contrib/raid/src/java/org/apache/hadoop/raid/BlockFixer.java @@ -622,8 +622,8 @@ public abstract class BlockFixer extends Configured implements Runnable { int idx = rand.nextInt(live.length); chosen = live[idx]; for (DatanodeInfo avoid: locationsToAvoid) { - if (chosen.name.equals(avoid.name)) { - LOG.info("Avoiding " + avoid.name); + if (chosen.getName().equals(avoid.getName())) { + LOG.info("Avoiding " + avoid.getName()); chosen = null; break; } @@ -632,7 +632,7 @@ public abstract class BlockFixer extends Configured implements Runnable { if (chosen == null) { throw new IOException("Could not choose datanode"); } - LOG.info("Choosing datanode " + chosen.name); + LOG.info("Choosing datanode " + chosen.getName()); return chosen; } @@ -736,7 +736,7 @@ public abstract class BlockFixer extends Configured implements Runnable { DataInputStream metadataIn, LocatedBlock block, long blockSize) throws IOException { - InetSocketAddress target = NetUtils.createSocketAddr(datanode.name); + InetSocketAddress target = NetUtils.createSocketAddr(datanode.getName()); Socket sock = SocketChannel.open().socket(); int readTimeout = @@ -785,7 +785,7 @@ public abstract class BlockFixer extends Configured implements Runnable { 1, 0L, blockSize, 0L, DataChecksum.newDataChecksum(metadataIn)); blockSender.sendBlock(out, baseStream); - LOG.info("Sent block " + block.getBlock() + " to " + datanode.name); + LOG.info("Sent block " + block.getBlock() + " to " + datanode.getName()); } finally { out.close(); }