diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java index 66eec7a360f..3eb7e678f99 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java @@ -198,7 +198,7 @@ public class DFSStripedOutputStream extends DFSOutputStream private final ByteBuffer[] buffers; private final byte[][] checksumArrays; - CellBuffers(int numParityBlocks) throws InterruptedException{ + CellBuffers(int numParityBlocks) { if (cellSize % bytesPerChecksum != 0) { throw new HadoopIllegalArgumentException("Invalid values: " + HdfsClientConfigKeys.DFS_BYTES_PER_CHECKSUM_KEY + " (=" @@ -304,12 +304,7 @@ public class DFSStripedOutputStream extends DFSOutputStream ecPolicy.getCodecName(), coderOptions); coordinator = new Coordinator(numAllBlocks); - try { - cellBuffers = new CellBuffers(numParityBlocks); - } catch (InterruptedException ie) { - throw DFSUtilClient.toInterruptedIOException( - "Failed to create cell buffers", ie); - } + cellBuffers = new CellBuffers(numParityBlocks); streamers = new ArrayList<>(numAllBlocks); for (short i = 0; i < numAllBlocks; i++) {