HADOOP-8900. Merging change r1399377 from trunk.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1399393 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2012-10-17 19:01:30 +00:00
parent 32b27ebec1
commit 0f49f79ceb
4 changed files with 64 additions and 4 deletions

View File

@ -89,10 +89,21 @@ Release 2.0.3-alpha - Unreleased
HADOOP-8881. FileBasedKeyStoresFactory initialization logging should be debug not info. (tucu) HADOOP-8881. FileBasedKeyStoresFactory initialization logging should be debug not info. (tucu)
HADOOP-8913. hadoop-metrics2.properties should give units in comment
for sampling period. (Sandy Ryza via suresh)
HADOOP-8878. Uppercase namenode hostname causes hadoop dfs calls with
webhdfs filesystem and fsck to fail when security is on.
(Arpit Gupta via suresh)
HADOOP-8901. GZip and Snappy support may not work without unversioned HADOOP-8901. GZip and Snappy support may not work without unversioned
libraries (Colin Patrick McCabe via todd) libraries (Colin Patrick McCabe via todd)
HADOOP-8883. Anonymous fallback in KerberosAuthenticator is broken. (rkanter via tucu) HADOOP-8883. Anonymous fallback in KerberosAuthenticator is broken.
(rkanter via tucu)
HADOOP-8900. BuiltInGzipDecompressor throws IOException - stored gzip size
doesn't match decompressed size. (Slavik Krassovsky via suresh)
Release 2.0.2-alpha - 2012-09-07 Release 2.0.2-alpha - 2012-09-07

View File

@ -387,7 +387,7 @@ public class BuiltInGzipDecompressor implements Decompressor {
copyBytesToLocal(n); // modifies userBufLen, etc. copyBytesToLocal(n); // modifies userBufLen, etc.
if (localBufOff >= 4) { // should be strictly == if (localBufOff >= 4) { // should be strictly ==
long inputSize = readUIntLE(localBuf, 0); long inputSize = readUIntLE(localBuf, 0);
if (inputSize != (inflater.getBytesWritten() & 0xffffffff)) { if (inputSize != (inflater.getBytesWritten() & 0xffffffffL)) {
throw new IOException( throw new IOException(
"stored gzip size doesn't match decompressed size"); "stored gzip size doesn't match decompressed size");
} }
@ -571,7 +571,7 @@ public class BuiltInGzipDecompressor implements Decompressor {
return ((((long)(b[off+3] & 0xff) << 24) | return ((((long)(b[off+3] & 0xff) << 24) |
((long)(b[off+2] & 0xff) << 16) | ((long)(b[off+2] & 0xff) << 16) |
((long)(b[off+1] & 0xff) << 8) | ((long)(b[off+1] & 0xff) << 8) |
((long)(b[off] & 0xff) )) & 0xffffffff); ((long)(b[off] & 0xff) )) & 0xffffffffL);
} }
} }

View File

@ -719,6 +719,55 @@ public class TestCodec {
} }
} }
@Test
public void testGzipLongOverflow() throws IOException {
LOG.info("testGzipLongOverflow");
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
assertTrue("ZlibFactory returned unexpected inflator",
zlibDecompressor instanceof BuiltInZlibInflater);
CodecPool.returnDecompressor(zlibDecompressor);
// Now create a GZip text file.
String tmpDir = System.getProperty("test.build.data", "/tmp/");
Path f = new Path(new Path(tmpDir), "testGzipLongOverflow.bin.gz");
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(
new GZIPOutputStream(new FileOutputStream(f.toString()))));
final int NBUF = 1024 * 4 + 1;
final char[] buf = new char[1024 * 1024];
for (int i = 0; i < buf.length; i++) buf[i] = '\0';
for (int i = 0; i < NBUF; i++) {
bw.write(buf);
}
bw.close();
// Now read it back, using the CodecPool to establish the
// decompressor to use.
CompressionCodecFactory ccf = new CompressionCodecFactory(conf);
CompressionCodec codec = ccf.getCodec(f);
Decompressor decompressor = CodecPool.getDecompressor(codec);
FileSystem fs = FileSystem.getLocal(conf);
InputStream is = fs.open(f);
is = codec.createInputStream(is, decompressor);
BufferedReader br = new BufferedReader(new InputStreamReader(is));
for (int j = 0; j < NBUF; j++) {
int n = br.read(buf);
assertEquals("got wrong read length!", n, buf.length);
for (int i = 0; i < buf.length; i++)
assertEquals("got wrong byte!", buf[i], '\0');
}
br.close();
}
public void testGzipCodecWrite(boolean useNative) throws IOException { public void testGzipCodecWrite(boolean useNative) throws IOException {
// Create a gzipped file using a compressor from the CodecPool, // Create a gzipped file using a compressor from the CodecPool,
// and try to read it back via the regular GZIPInputStream. // and try to read it back via the regular GZIPInputStream.

View File

@ -141,7 +141,7 @@ public class TestVLong extends TestCase {
int shift = rng.nextInt(Long.SIZE) + 1; int shift = rng.nextInt(Long.SIZE) + 1;
long mask = (1L << shift) - 1; long mask = (1L << shift) - 1;
long a = ((long) rng.nextInt()) << 32; long a = ((long) rng.nextInt()) << 32;
long b = ((long) rng.nextInt()) & 0xffffffff; long b = ((long) rng.nextInt()) & 0xffffffffL;
data[i] = (a + b) & mask; data[i] = (a + b) & mask;
} }