HADOOP-6925. BZip2Codec incorrectly implements read(). Contributed by Todd Lipcon.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@988748 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2010-08-24 22:40:05 +00:00
parent a7887e1851
commit 19e46e358e
3 changed files with 24 additions and 5 deletions

View File

@ -211,6 +211,9 @@ Trunk (unreleased changes)
HADOOP-6453. Hadoop wrapper script shouldn't ignore an existing HADOOP-6453. Hadoop wrapper script shouldn't ignore an existing
JAVA_LIBRARY_PATH. (Chad Metcalf via jghoman) JAVA_LIBRARY_PATH. (Chad Metcalf via jghoman)
HADOOP-6925. BZip2Codec incorrectly implements read().
(Todd Lipcon via Eli Collins)
Release 0.21.0 - Unreleased Release 0.21.0 - Unreleased
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -443,7 +443,7 @@ public class BZip2Codec implements SplittableCompressionCodec {
public int read() throws IOException { public int read() throws IOException {
byte b[] = new byte[1]; byte b[] = new byte[1];
int result = this.read(b, 0, 1); int result = this.read(b, 0, 1);
return (result < 0) ? result : b[0]; return (result < 0) ? result : (b[0] & 0xff);
} }
private void internalReset() throws IOException { private void internalReset() throws IOException {

View File

@ -131,10 +131,6 @@ public class TestCodec {
key.write(data); key.write(data);
value.write(data); value.write(data);
} }
DataInputBuffer originalData = new DataInputBuffer();
DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
originalData.reset(data.getData(), 0, data.getLength());
LOG.info("Generated " + count + " records"); LOG.info("Generated " + count + " records");
// Compress data // Compress data
@ -158,6 +154,9 @@ public class TestCodec {
new DataInputStream(new BufferedInputStream(inflateFilter)); new DataInputStream(new BufferedInputStream(inflateFilter));
// Check // Check
DataInputBuffer originalData = new DataInputBuffer();
originalData.reset(data.getData(), 0, data.getLength());
DataInputStream originalIn = new DataInputStream(new BufferedInputStream(originalData));
for(int i=0; i < count; ++i) { for(int i=0; i < count; ++i) {
RandomDatum k1 = new RandomDatum(); RandomDatum k1 = new RandomDatum();
RandomDatum v1 = new RandomDatum(); RandomDatum v1 = new RandomDatum();
@ -171,6 +170,23 @@ public class TestCodec {
assertTrue("original and compressed-then-decompressed-output not equal", assertTrue("original and compressed-then-decompressed-output not equal",
k1.equals(k2) && v1.equals(v2)); k1.equals(k2) && v1.equals(v2));
} }
// De-compress data byte-at-a-time
originalData.reset(data.getData(), 0, data.getLength());
deCompressedDataBuffer.reset(compressedDataBuffer.getData(), 0,
compressedDataBuffer.getLength());
inflateFilter =
codec.createInputStream(deCompressedDataBuffer);
// Check
originalIn = new DataInputStream(new BufferedInputStream(originalData));
int expected;
do {
expected = originalIn.read();
assertEquals("Inflated stream read by byte does not match",
expected, inflateFilter.read());
} while (expected != -1);
LOG.info("SUCCESS! Completed checking " + count + " records"); LOG.info("SUCCESS! Completed checking " + count + " records");
} }