HADOOP-15869. BlockDecompressorStream#decompress should not return -1 in case of IOException. Contributed by Surendra Singh Lilhore

This commit is contained in:
Surendra Singh Lilhore 2018-11-13 20:22:58 +05:30
parent e7b63baca1
commit 75291e6d53
2 changed files with 31 additions and 2 deletions

View File

@ -71,8 +71,8 @@ public class BlockDecompressorStream extends DecompressorStream {
if (noUncompressedBytes == originalBlockSize) {
// Get original data size
try {
originalBlockSize = rawReadInt();
} catch (IOException ioe) {
originalBlockSize = rawReadInt();
} catch (EOFException e) {
return -1;
}
noUncompressedBytes = 0;

View File

@ -18,11 +18,15 @@
package org.apache.hadoop.io.compress;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import org.junit.Test;
@ -74,4 +78,29 @@ public class TestBlockDecompressorStream {
fail("unexpected IOException : " + e);
}
}
@Test
public void testReadWhenIoExceptionOccure() throws IOException {
File file = new File("testReadWhenIOException");
try {
file.createNewFile();
InputStream io = new FileInputStream(file) {
@Override
public int read() throws IOException {
throw new IOException("File blocks missing");
}
};
try (BlockDecompressorStream blockDecompressorStream =
new BlockDecompressorStream(io, new FakeDecompressor(), 1024)) {
int byteRead = blockDecompressorStream.read();
fail("Should not return -1 in case of IOException. Byte read "
+ byteRead);
} catch (IOException e) {
assertTrue(e.getMessage().contains("File blocks missing"));
}
} finally {
file.delete();
}
}
}