HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1431739 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eric Yang 2013-01-10 23:58:11 +00:00
parent be5509c537
commit a8d60f4190
2 changed files with 64 additions and 2 deletions

View File

@ -148,6 +148,8 @@ Trunk (Unreleased)
BUG FIXES BUG FIXES
HADOOP-8419. Fixed GzipCode NPE reset for IBM JDK. (Yu Li via eyang)
HADOOP-9041. FsUrlStreamHandlerFactory could cause an infinite loop in HADOOP-9041. FsUrlStreamHandlerFactory could cause an infinite loop in
FileSystem initialization. (Yanbo Liang and Radim Kolar via llu) FileSystem initialization. (Yanbo Liang and Radim Kolar via llu)

View File

@ -40,6 +40,11 @@ public class GzipCodec extends DefaultCodec {
protected static class GzipOutputStream extends CompressorStream { protected static class GzipOutputStream extends CompressorStream {
private static class ResetableGZIPOutputStream extends GZIPOutputStream { private static class ResetableGZIPOutputStream extends GZIPOutputStream {
private static final int TRAILER_SIZE = 8;
public static final String JVMVendor= System.getProperty("java.vendor");
public static final String JVMVersion= System.getProperty("java.version");
private static final boolean HAS_BROKEN_FINISH =
(JVMVendor.contains("IBM") && JVMVersion.contains("1.6.0"));
public ResetableGZIPOutputStream(OutputStream out) throws IOException { public ResetableGZIPOutputStream(OutputStream out) throws IOException {
super(out); super(out);
@ -48,6 +53,61 @@ public class GzipCodec extends DefaultCodec {
public void resetState() throws IOException { public void resetState() throws IOException {
def.reset(); def.reset();
} }
/**
* Override this method for HADOOP-8419.
* Override because IBM implementation calls def.end() which
* causes problem when reseting the stream for reuse.
*
*/
@Override
public void finish() throws IOException {
if (HAS_BROKEN_FINISH) {
if (!def.finished()) {
def.finish();
while (!def.finished()) {
int i = def.deflate(this.buf, 0, this.buf.length);
if ((def.finished()) && (i <= this.buf.length - TRAILER_SIZE)) {
writeTrailer(this.buf, i);
i += TRAILER_SIZE;
out.write(this.buf, 0, i);
return;
}
if (i > 0) {
out.write(this.buf, 0, i);
}
}
byte[] arrayOfByte = new byte[TRAILER_SIZE];
writeTrailer(arrayOfByte, 0);
out.write(arrayOfByte);
}
} else {
super.finish();
}
}
/** re-implement for HADOOP-8419 because the relative method in jdk is invisible */
private void writeTrailer(byte[] paramArrayOfByte, int paramInt)
throws IOException {
writeInt((int)this.crc.getValue(), paramArrayOfByte, paramInt);
writeInt(this.def.getTotalIn(), paramArrayOfByte, paramInt + 4);
}
/** re-implement for HADOOP-8419 because the relative method in jdk is invisible */
private void writeInt(int paramInt1, byte[] paramArrayOfByte, int paramInt2)
throws IOException {
writeShort(paramInt1 & 0xFFFF, paramArrayOfByte, paramInt2);
writeShort(paramInt1 >> 16 & 0xFFFF, paramArrayOfByte, paramInt2 + 2);
}
/** re-implement for HADOOP-8419 because the relative method in jdk is invisible */
private void writeShort(int paramInt1, byte[] paramArrayOfByte, int paramInt2)
throws IOException {
paramArrayOfByte[paramInt2] = (byte)(paramInt1 & 0xFF);
paramArrayOfByte[(paramInt2 + 1)] = (byte)(paramInt1 >> 8 & 0xFF);
}
} }
public GzipOutputStream(OutputStream out) throws IOException { public GzipOutputStream(OutputStream out) throws IOException {