HADOOP-6669. Respect compression configuration when creating DefaultCodec

instances. Contributed by Koji Noguchi


git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@940989 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Christopher Douglas 2010-05-04 18:21:48 +00:00
parent 322f9ac7b1
commit 06a13750a5
3 changed files with 62 additions and 1 deletions

View File

@ -7,6 +7,9 @@ Trunk (unreleased changes)
HADOOP-6730. Bug in FileContext#copy and provide base class for FileContext
tests. (Ravi Phulari via jghoman)
HADOOP-6669. Respect compression configuration when creating DefaultCodec
instances. (Koji Noguchi via cdouglas)
Release 0.21.0 - Unreleased
INCOMPATIBLE CHANGES

View File

@ -86,7 +86,8 @@ public class ZlibFactory {
*/
public static Compressor getZlibCompressor(Configuration conf) {
return (isNativeZlibLoaded(conf)) ?
new ZlibCompressor() : new BuiltInZlibDeflater();
new ZlibCompressor(conf) :
new BuiltInZlibDeflater(ZlibFactory.getCompressionLevel(conf).compressionLevel());
}
/**

View File

@ -166,6 +166,8 @@ public class TestCodec {
RandomDatum v2 = new RandomDatum();
k2.readFields(inflateIn);
v2.readFields(inflateIn);
assertTrue("original and compressed-then-decompressed-output not equal",
k1.equals(k2) && v1.equals(v2));
}
LOG.info("SUCCESS! Completed checking " + count + " records");
}
@ -322,6 +324,61 @@ public class TestCodec {
outbytes.length >= b.length);
}
private static void codecTestWithNOCompression (Configuration conf,
String codecClass) throws IOException {
// Create a compressor with NO_COMPRESSION and make sure that
// output is not compressed by comparing the size with the
// original input
CompressionCodec codec = null;
ZlibFactory.setCompressionLevel(conf, CompressionLevel.NO_COMPRESSION);
try {
codec = (CompressionCodec)
ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf);
} catch (ClassNotFoundException cnfe) {
throw new IOException("Illegal codec!");
}
Compressor c = codec.createCompressor();
// ensure same compressor placed earlier
ByteArrayOutputStream bos = new ByteArrayOutputStream();
CompressionOutputStream cos = null;
// write trivially compressable data
byte[] b = new byte[1 << 15];
Arrays.fill(b, (byte) 43);
try {
cos = codec.createOutputStream(bos, c);
cos.write(b);
} finally {
if (cos != null) {
cos.close();
}
}
byte[] outbytes = bos.toByteArray();
// verify data were not compressed
assertTrue("Compressed bytes contrary to configuration(NO_COMPRESSION)",
outbytes.length >= b.length);
}
@Test
public void testCodecInitWithCompressionLevel() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean("io.native.lib.available", true);
if (ZlibFactory.isNativeZlibLoaded(conf)) {
LOG.info("testCodecInitWithCompressionLevel with native");
codecTestWithNOCompression(conf,
"org.apache.hadoop.io.compress.GzipCodec");
codecTestWithNOCompression(conf,
"org.apache.hadoop.io.compress.DefaultCodec");
} else {
LOG.warn("testCodecInitWithCompressionLevel for native skipped"
+ ": native libs not loaded");
}
conf = new Configuration();
conf.setBoolean("io.native.lib.available", false);
codecTestWithNOCompression( conf,
"org.apache.hadoop.io.compress.DefaultCodec");
}
@Test
public void testCodecPoolCompressorReinit() throws Exception {
Configuration conf = new Configuration();