From 1a8139e6addf7df6459b397c813ba3bf2c74e3c2 Mon Sep 17 00:00:00 2001 From: Jing Zhao Date: Wed, 29 Apr 2015 11:35:58 -0700 Subject: [PATCH] Fix merge conflicts. --- .../org/apache/hadoop/hdfs/DFSInputStream.java | 7 +++---- .../hadoop/hdfs/DFSStripedOutputStream.java | 15 ++++----------- .../apache/hadoop/hdfs/StripedDataStreamer.java | 7 ++++--- 3 files changed, 11 insertions(+), 18 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java index 6649f4c87ca..6e58cd60019 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSInputStream.java @@ -1119,7 +1119,7 @@ implements ByteBufferReadable, CanSetDropBehind, CanSetReadahead, /** * Read data from one DataNode. * @param datanode the datanode from which to read data - * @param block the block to read + * @param blockStartOffset starting offset in the file * @param startInBlk the startInBlk offset of the block * @param endInBlk the endInBlk offset of the block * @param buf the given byte array into which the data is read @@ -1149,7 +1149,7 @@ implements ByteBufferReadable, CanSetDropBehind, CanSetReadahead, BlockReader reader = null; try { DFSClientFaultInjector.get().fetchFromDatanodeException(); - reader = getBlockReader(block, start, len, datanode.addr, + reader = getBlockReader(block, startInBlk, len, datanode.addr, datanode.storageType, datanode.info); for (int i = 0; i < offsets.length; i++) { int nread = reader.readAll(buf, offsets[i], lengths[i]); @@ -1206,8 +1206,7 @@ implements ByteBufferReadable, CanSetDropBehind, CanSetReadahead, * with each other. */ private void checkReadPortions(int[] offsets, int[] lengths, int totalLen) { - Preconditions.checkArgument(offsets.length == lengths.length && - offsets.length > 0); + Preconditions.checkArgument(offsets.length == lengths.length && offsets.length > 0); int sum = 0; for (int i = 0; i < lengths.length; i++) { if (i > 0) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java index 68422675a1e..c9301877b8d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java @@ -124,10 +124,7 @@ public class DFSStripedOutputStream extends DFSOutputStream { for (short i = 0; i < numAllBlocks; i++) { StripedDataStreamer streamer = new StripedDataStreamer(stat, null, dfsClient, src, progress, checksum, cachingStrategy, byteArrayManager, - i, stripeBlocks); - if (favoredNodes != null && favoredNodes.length != 0) { - streamer.setFavoredNodes(favoredNodes); - } + i, stripeBlocks, favoredNodes); s.add(streamer); } streamers = Collections.unmodifiableList(s); @@ -316,7 +313,7 @@ public class DFSStripedOutputStream extends DFSOutputStream { return; } for (StripedDataStreamer streamer : streamers) { - streamer.setLastException(new IOException("Lease timeout of " + streamer.getLastException().set(new IOException("Lease timeout of " + (dfsClient.getConf().getHdfsTimeout()/1000) + " seconds expired.")); } @@ -414,12 +411,8 @@ public class DFSStripedOutputStream extends DFSOutputStream { @Override protected synchronized void closeImpl() throws IOException { if (isClosed()) { - IOException e = getLeadingStreamer().getLastException().getAndSet(null); - if (e != null) { - throw e; - } else { - return; - } + getLeadingStreamer().getLastException().check(); + return; } try { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StripedDataStreamer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StripedDataStreamer.java index 19c205e27b8..ef7e2a687a6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StripedDataStreamer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StripedDataStreamer.java @@ -58,9 +58,10 @@ public class StripedDataStreamer extends DataStreamer { Progressable progress, DataChecksum checksum, AtomicReference cachingStrategy, ByteArrayManager byteArrayManage, short index, - List> stripedBlocks) { - super(stat,block, dfsClient, src, progress, checksum, cachingStrategy, - byteArrayManage); + List> stripedBlocks, + String[] favoredNodes) { + super(stat, block, dfsClient, src, progress, checksum, cachingStrategy, + byteArrayManage, favoredNodes); this.index = index; this.stripedBlocks = stripedBlocks; }