HDFS-16538. EC decoding failed due to not enough valid inputs (#4167)

Co-authored-by: liubingxing <liubingxing@bigo.sg>
(cherry picked from commit 52e152f8b0)
This commit is contained in:
qinyuren 2022-04-19 12:37:28 +08:00 committed by Takanobu Asanuma
parent cfe2d8aa79
commit c913dc3072
2 changed files with 21 additions and 1 deletions

View File

@ -52,7 +52,9 @@ void prepareDecodeInputs() {
cur = dfsStripedInputStream.getCurStripeBuf().duplicate(); cur = dfsStripedInputStream.getCurStripeBuf().duplicate();
} }
this.decodeInputs = new ECChunk[dataBlkNum + parityBlkNum]; if (this.decodeInputs == null) {
this.decodeInputs = new ECChunk[dataBlkNum + parityBlkNum];
}
int bufLen = (int) alignedStripe.getSpanInBlock(); int bufLen = (int) alignedStripe.getSpanInBlock();
int bufOff = (int) alignedStripe.getOffsetInBlock(); int bufOff = (int) alignedStripe.getOffsetInBlock();
for (int i = 0; i < dataBlkNum; i++) { for (int i = 0; i < dataBlkNum; i++) {

View File

@ -44,6 +44,7 @@
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.BLOCK_SIZE;
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.CELL_SIZE; import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.CELL_SIZE;
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_DATA_UNITS; import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_DATA_UNITS;
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_PARITY_UNITS; import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_PARITY_UNITS;
@ -165,4 +166,21 @@ public void testInvalidateBlock() throws IOException, InterruptedException {
DataNodeTestUtils.setHeartbeatsDisabledForTests(dn, false); DataNodeTestUtils.setHeartbeatsDisabledForTests(dn, false);
} }
} }
@Test
public void testMoreThanOneCorruptedBlock() throws IOException {
final Path file = new Path("/corrupted");
final int length = BLOCK_SIZE * NUM_DATA_UNITS;
final byte[] bytes = StripedFileTestUtil.generateBytes(length);
DFSTestUtil.writeFile(dfs, file, bytes);
// read the file with more than one corrupted data block
byte[] buffer = new byte[length + 100];
for (int count = 2; count < NUM_PARITY_UNITS; ++count) {
ReadStripedFileWithDecodingHelper.corruptBlocks(cluster, dfs, file, count, 0,
false);
StripedFileTestUtil.verifyStatefulRead(dfs, file, length, bytes,
buffer);
}
}
} }