HDFS-16538. EC decoding failed due to not enough valid inputs (#4167)
Co-authored-by: liubingxing <liubingxing@bigo.sg>
(cherry picked from commit 52e152f8b0
)
This commit is contained in:
parent
ed83a278c2
commit
d993d22038
|
@ -52,7 +52,9 @@ class StatefulStripeReader extends StripeReader {
|
|||
cur = dfsStripedInputStream.getCurStripeBuf().duplicate();
|
||||
}
|
||||
|
||||
if (this.decodeInputs == null) {
|
||||
this.decodeInputs = new ECChunk[dataBlkNum + parityBlkNum];
|
||||
}
|
||||
int bufLen = (int) alignedStripe.getSpanInBlock();
|
||||
int bufOff = (int) alignedStripe.getOffsetInBlock();
|
||||
for (int i = 0; i < dataBlkNum; i++) {
|
||||
|
|
|
@ -43,6 +43,7 @@ import java.io.FileOutputStream;
|
|||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
||||
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.BLOCK_SIZE;
|
||||
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.CELL_SIZE;
|
||||
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_DATA_UNITS;
|
||||
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_PARITY_UNITS;
|
||||
|
@ -162,4 +163,21 @@ public class TestReadStripedFileWithDecoding {
|
|||
DataNodeTestUtils.setHeartbeatsDisabledForTests(dn, false);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMoreThanOneCorruptedBlock() throws IOException {
|
||||
final Path file = new Path("/corrupted");
|
||||
final int length = BLOCK_SIZE * NUM_DATA_UNITS;
|
||||
final byte[] bytes = StripedFileTestUtil.generateBytes(length);
|
||||
DFSTestUtil.writeFile(dfs, file, bytes);
|
||||
|
||||
// read the file with more than one corrupted data block
|
||||
byte[] buffer = new byte[length + 100];
|
||||
for (int count = 2; count < NUM_PARITY_UNITS; ++count) {
|
||||
ReadStripedFileWithDecodingHelper.corruptBlocks(cluster, dfs, file, count, 0,
|
||||
false);
|
||||
StripedFileTestUtil.verifyStatefulRead(dfs, file, length, bytes,
|
||||
buffer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue