diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 984f7acac7a..1d9febccf2e 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -148,6 +148,8 @@ Trunk (Unreleased) BUG FIXES + HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang) + HADOOP-9041. FsUrlStreamHandlerFactory could cause an infinite loop in FileSystem initialization. (Yanbo Liang and Radim Kolar via llu) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java index 520205e1660..6ac692c14e7 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/GzipCodec.java @@ -40,14 +40,74 @@ public class GzipCodec extends DefaultCodec { protected static class GzipOutputStream extends CompressorStream { private static class ResetableGZIPOutputStream extends GZIPOutputStream { - + private static final int TRAILER_SIZE = 8; + public static final String JVMVendor= System.getProperty("java.vendor"); + public static final String JVMVersion= System.getProperty("java.version"); + private static final boolean HAS_BROKEN_FINISH = + (JVMVendor.contains("IBM") && JVMVersion.contains("1.6.0")); + public ResetableGZIPOutputStream(OutputStream out) throws IOException { super(out); } - + public void resetState() throws IOException { def.reset(); } + + /** + * Override this method for HADOOP-8419. + * Override because IBM implementation calls def.end() which + * causes problem when reseting the stream for reuse. + * + */ + @Override + public void finish() throws IOException { + if (HAS_BROKEN_FINISH) { + if (!def.finished()) { + def.finish(); + while (!def.finished()) { + int i = def.deflate(this.buf, 0, this.buf.length); + if ((def.finished()) && (i <= this.buf.length - TRAILER_SIZE)) { + writeTrailer(this.buf, i); + i += TRAILER_SIZE; + out.write(this.buf, 0, i); + + return; + } + if (i > 0) { + out.write(this.buf, 0, i); + } + } + + byte[] arrayOfByte = new byte[TRAILER_SIZE]; + writeTrailer(arrayOfByte, 0); + out.write(arrayOfByte); + } + } else { + super.finish(); + } + } + + /** re-implement for HADOOP-8419 because the relative method in jdk is invisible */ + private void writeTrailer(byte[] paramArrayOfByte, int paramInt) + throws IOException { + writeInt((int)this.crc.getValue(), paramArrayOfByte, paramInt); + writeInt(this.def.getTotalIn(), paramArrayOfByte, paramInt + 4); + } + + /** re-implement for HADOOP-8419 because the relative method in jdk is invisible */ + private void writeInt(int paramInt1, byte[] paramArrayOfByte, int paramInt2) + throws IOException { + writeShort(paramInt1 & 0xFFFF, paramArrayOfByte, paramInt2); + writeShort(paramInt1 >> 16 & 0xFFFF, paramArrayOfByte, paramInt2 + 2); + } + + /** re-implement for HADOOP-8419 because the relative method in jdk is invisible */ + private void writeShort(int paramInt1, byte[] paramArrayOfByte, int paramInt2) + throws IOException { + paramArrayOfByte[paramInt2] = (byte)(paramInt1 & 0xFF); + paramArrayOfByte[(paramInt2 + 1)] = (byte)(paramInt1 >> 8 & 0xFF); + } } public GzipOutputStream(OutputStream out) throws IOException {