diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StatefulStripeReader.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StatefulStripeReader.java index b37501d2e2b..98a6ba13b02 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StatefulStripeReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StatefulStripeReader.java @@ -52,7 +52,9 @@ class StatefulStripeReader extends StripeReader { cur = dfsStripedInputStream.getCurStripeBuf().duplicate(); } - this.decodeInputs = new ECChunk[dataBlkNum + parityBlkNum]; + if (this.decodeInputs == null) { + this.decodeInputs = new ECChunk[dataBlkNum + parityBlkNum]; + } int bufLen = (int) alignedStripe.getSpanInBlock(); int bufOff = (int) alignedStripe.getOffsetInBlock(); for (int i = 0; i < dataBlkNum; i++) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReadStripedFileWithDecoding.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReadStripedFileWithDecoding.java index 2fb9212f354..99ea6b687eb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReadStripedFileWithDecoding.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestReadStripedFileWithDecoding.java @@ -43,6 +43,7 @@ import java.io.FileOutputStream; import java.io.IOException; import java.nio.ByteBuffer; +import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.BLOCK_SIZE; import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.CELL_SIZE; import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_DATA_UNITS; import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_PARITY_UNITS; @@ -162,4 +163,21 @@ public class TestReadStripedFileWithDecoding { DataNodeTestUtils.setHeartbeatsDisabledForTests(dn, false); } } + + @Test + public void testMoreThanOneCorruptedBlock() throws IOException { + final Path file = new Path("/corrupted"); + final int length = BLOCK_SIZE * NUM_DATA_UNITS; + final byte[] bytes = StripedFileTestUtil.generateBytes(length); + DFSTestUtil.writeFile(dfs, file, bytes); + + // read the file with more than one corrupted data block + byte[] buffer = new byte[length + 100]; + for (int count = 2; count < NUM_PARITY_UNITS; ++count) { + ReadStripedFileWithDecodingHelper.corruptBlocks(cluster, dfs, file, count, 0, + false); + StripedFileTestUtil.verifyStatefulRead(dfs, file, length, bytes, + buffer); + } + } }