diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index cdaac9ce0be..2d41e57399d 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -89,10 +89,21 @@ Release 2.0.3-alpha - Unreleased HADOOP-8881. FileBasedKeyStoresFactory initialization logging should be debug not info. (tucu) + HADOOP-8913. hadoop-metrics2.properties should give units in comment + for sampling period. (Sandy Ryza via suresh) + + HADOOP-8878. Uppercase namenode hostname causes hadoop dfs calls with + webhdfs filesystem and fsck to fail when security is on. + (Arpit Gupta via suresh) + HADOOP-8901. GZip and Snappy support may not work without unversioned libraries (Colin Patrick McCabe via todd) - HADOOP-8883. Anonymous fallback in KerberosAuthenticator is broken. (rkanter via tucu) + HADOOP-8883. Anonymous fallback in KerberosAuthenticator is broken. + (rkanter via tucu) + + HADOOP-8900. BuiltInGzipDecompressor throws IOException - stored gzip size + doesn't match decompressed size. (Slavik Krassovsky via suresh) Release 2.0.2-alpha - 2012-09-07 diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java index 41f8036fda4..38b8895c6d0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/BuiltInGzipDecompressor.java @@ -387,7 +387,7 @@ public class BuiltInGzipDecompressor implements Decompressor { copyBytesToLocal(n); // modifies userBufLen, etc. if (localBufOff >= 4) { // should be strictly == long inputSize = readUIntLE(localBuf, 0); - if (inputSize != (inflater.getBytesWritten() & 0xffffffff)) { + if (inputSize != (inflater.getBytesWritten() & 0xffffffffL)) { throw new IOException( "stored gzip size doesn't match decompressed size"); } @@ -571,7 +571,7 @@ public class BuiltInGzipDecompressor implements Decompressor { return ((((long)(b[off+3] & 0xff) << 24) | ((long)(b[off+2] & 0xff) << 16) | ((long)(b[off+1] & 0xff) << 8) | - ((long)(b[off] & 0xff) )) & 0xffffffff); + ((long)(b[off] & 0xff) )) & 0xffffffffL); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java index 474bfe1d6c2..a5c6c617283 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java @@ -719,6 +719,55 @@ public class TestCodec { } } + @Test + public void testGzipLongOverflow() throws IOException { + LOG.info("testGzipLongOverflow"); + + // Don't use native libs for this test. + Configuration conf = new Configuration(); + conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false); + assertFalse("ZlibFactory is using native libs against request", + ZlibFactory.isNativeZlibLoaded(conf)); + + // Ensure that the CodecPool has a BuiltInZlibInflater in it. + Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf); + assertNotNull("zlibDecompressor is null!", zlibDecompressor); + assertTrue("ZlibFactory returned unexpected inflator", + zlibDecompressor instanceof BuiltInZlibInflater); + CodecPool.returnDecompressor(zlibDecompressor); + + // Now create a GZip text file. + String tmpDir = System.getProperty("test.build.data", "/tmp/"); + Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz"); + BufferedWriter bw = new BufferedWriter(new OutputStreamWriter( + new GZIPOutputStream(new FileOutputStream(f.toString())))); + + final int NBUF = 1024 * 4 + 1; + final char[] buf = new char[1024 * 1024]; + for (int i = 0; i < buf.length; i++) buf[i] = '\0'; + for (int i = 0; i < NBUF; i++) { + bw.write(buf); + } + bw.close(); + + // Now read it back, using the CodecPool to establish the + // decompressor to use. + CompressionCodecFactory ccf = new CompressionCodecFactory(conf); + CompressionCodec codec = ccf.getCodec(f); + Decompressor decompressor = CodecPool.getDecompressor(codec); + FileSystem fs = FileSystem.getLocal(conf); + InputStream is = fs.open(f); + is = codec.createInputStream(is, decompressor); + BufferedReader br = new BufferedReader(new InputStreamReader(is)); + for (int j = 0; j < NBUF; j++) { + int n = br.read(buf); + assertEquals("got wrong read length!", n, buf.length); + for (int i = 0; i < buf.length; i++) + assertEquals("got wrong byte!", buf[i], '\0'); + } + br.close(); + } + public void testGzipCodecWrite(boolean useNative) throws IOException { // Create a gzipped file using a compressor from the CodecPool, // and try to read it back via the regular GZIPInputStream. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java index b71adac0565..34dcaa16d07 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestVLong.java @@ -141,7 +141,7 @@ public class TestVLong extends TestCase { int shift = rng.nextInt(Long.SIZE) + 1; long mask = (1L << shift) - 1; long a = ((long) rng.nextInt()) << 32; - long b = ((long) rng.nextInt()) & 0xffffffff; + long b = ((long) rng.nextInt()) & 0xffffffffL; data[i] = (a + b) & mask; }