HADOOP-11627. Remove io.native.lib.available. Contributed by Brahma Reddy Battula.

This commit is contained in:
Akira Ajisaka 2015-04-24 08:08:55 +09:00
parent ef4e9963b2
commit ac281e3fc8
12 changed files with 86 additions and 115 deletions

View File

@ -23,6 +23,9 @@ Trunk (Unreleased)
HADOOP-11731. Rework the changelog and releasenotes (aw) HADOOP-11731. Rework the changelog and releasenotes (aw)
HADOOP-11627. Remove io.native.lib.available.
(Brahma Reddy Battula via aajisaka)
NEW FEATURES NEW FEATURES
HADOOP-6590. Add a username check for hadoop sub-commands (John Smith via HADOOP-6590. Add a username check for hadoop sub-commands (John Smith via

View File

@ -447,8 +447,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
CommonConfigurationKeys.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY), CommonConfigurationKeys.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY),
new DeprecationDelta("dfs.df.interval", new DeprecationDelta("dfs.df.interval",
CommonConfigurationKeys.FS_DF_INTERVAL_KEY), CommonConfigurationKeys.FS_DF_INTERVAL_KEY),
new DeprecationDelta("hadoop.native.lib",
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY),
new DeprecationDelta("fs.default.name", new DeprecationDelta("fs.default.name",
CommonConfigurationKeys.FS_DEFAULT_NAME_KEY), CommonConfigurationKeys.FS_DEFAULT_NAME_KEY),
new DeprecationDelta("dfs.umaskmode", new DeprecationDelta("dfs.umaskmode",

View File

@ -38,11 +38,6 @@ public class CommonConfigurationKeysPublic {
// The Keys // The Keys
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IO_NATIVE_LIB_AVAILABLE_KEY =
"io.native.lib.available";
/** Default value for IO_NATIVE_LIB_AVAILABLE_KEY */
public static final boolean IO_NATIVE_LIB_AVAILABLE_DEFAULT = true;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY = public static final String NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY =
"net.topology.script.number.args"; "net.topology.script.number.args";
/** Default value for NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY */ /** Default value for NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY */

View File

@ -21,15 +21,9 @@ package org.apache.hadoop.io.compress.bzip2;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.io.compress.Compressor; import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.io.compress.Decompressor; import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.bzip2.Bzip2Compressor; import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor;
import org.apache.hadoop.io.compress.bzip2.BZip2DummyCompressor;
import org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor;
/** /**
* A collection of factories to create the right * A collection of factories to create the right
@ -58,10 +52,7 @@ public class Bzip2Factory {
bzip2LibraryName = libname; bzip2LibraryName = libname;
if (libname.equals("java-builtin")) { if (libname.equals("java-builtin")) {
LOG.info("Using pure-Java version of bzip2 library"); LOG.info("Using pure-Java version of bzip2 library");
} else if (conf.getBoolean( } else if (NativeCodeLoader.isNativeCodeLoaded()) {
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT) &&
NativeCodeLoader.isNativeCodeLoaded()) {
try { try {
// Initialize the native library. // Initialize the native library.
Bzip2Compressor.initSymbols(libname); Bzip2Compressor.initSymbols(libname);

View File

@ -27,7 +27,8 @@ import org.apache.hadoop.io.compress.DirectDecompressor;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel; import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy; import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import com.google.common.annotations.VisibleForTesting;
/** /**
* A collection of factories to create the right * A collection of factories to create the right
@ -41,6 +42,15 @@ public class ZlibFactory {
private static boolean nativeZlibLoaded = false; private static boolean nativeZlibLoaded = false;
static { static {
loadNativeZLib();
}
/**
* Load native library and set the flag whether to use native library. The
* method is also used for reset the flag modified by setNativeZlibLoaded
*/
@VisibleForTesting
public static void loadNativeZLib() {
if (NativeCodeLoader.isNativeCodeLoaded()) { if (NativeCodeLoader.isNativeCodeLoaded()) {
nativeZlibLoaded = ZlibCompressor.isNativeZlibLoaded() && nativeZlibLoaded = ZlibCompressor.isNativeZlibLoaded() &&
ZlibDecompressor.isNativeZlibLoaded(); ZlibDecompressor.isNativeZlibLoaded();
@ -53,6 +63,15 @@ public class ZlibFactory {
} }
} }
/**
* Set the flag whether to use native library. Used for testing non-native
* libraries
*
*/
@VisibleForTesting
public static void setNativeZlibLoaded(final boolean isLoaded) {
ZlibFactory.nativeZlibLoaded = isLoaded;
}
/** /**
* Check if native-zlib code is loaded & initialized correctly and * Check if native-zlib code is loaded & initialized correctly and
* can be loaded for this job. * can be loaded for this job.
@ -62,9 +81,7 @@ public class ZlibFactory {
* and can be loaded for this job, else <code>false</code> * and can be loaded for this job, else <code>false</code>
*/ */
public static boolean isNativeZlibLoaded(Configuration conf) { public static boolean isNativeZlibLoaded(Configuration conf) {
return nativeZlibLoaded && conf.getBoolean( return nativeZlibLoaded;
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT);
} }
public static String getLibraryName() { public static String getLibraryName() {

View File

@ -22,8 +22,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
/** /**
* A helper to load the native hadoop code i.e. libhadoop.so. * A helper to load the native hadoop code i.e. libhadoop.so.
@ -86,28 +84,4 @@ public class NativeCodeLoader {
public static native String getLibraryName(); public static native String getLibraryName();
/**
* Return if native hadoop libraries, if present, can be used for this job.
* @param conf configuration
*
* @return <code>true</code> if native hadoop libraries, if present, can be
* used for this job; <code>false</code> otherwise.
*/
public boolean getLoadNativeLibraries(Configuration conf) {
return conf.getBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT);
}
/**
* Set if native hadoop libraries, if present, can be used for this job.
*
* @param conf configuration
* @param loadNativeLibraries can native hadoop libraries be loaded
*/
public void setLoadNativeLibraries(Configuration conf,
boolean loadNativeLibraries) {
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
loadNativeLibraries);
}
} }

View File

@ -38,15 +38,6 @@
<description>A base for other temporary directories.</description> <description>A base for other temporary directories.</description>
</property> </property>
<property>
<name>io.native.lib.available</name>
<value>true</value>
<description>Controls whether to use native libraries for bz2 and zlib
compression codecs or not. The property does not control any other native
libraries.
</description>
</property>
<property> <property>
<name>hadoop.http.filter.initializers</name> <name>hadoop.http.filter.initializers</name>
<value>org.apache.hadoop.http.lib.StaticUserWebFilter</value> <value>org.apache.hadoop.http.lib.StaticUserWebFilter</value>

View File

@ -283,5 +283,6 @@ The following table lists additional changes to some configuration properties:
|:---- |:---- | |:---- |:---- |
| mapred.create.symlink | NONE - symlinking is always on | | mapred.create.symlink | NONE - symlinking is always on |
| mapreduce.job.cache.symlink.create | NONE - symlinking is always on | | mapreduce.job.cache.symlink.create | NONE - symlinking is always on |
| io.native.lib.available | NONE - Always use native libraries if available. |

View File

@ -17,6 +17,14 @@
*/ */
package org.apache.hadoop.io.compress; package org.apache.hadoop.io.compress;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
import java.io.BufferedOutputStream; import java.io.BufferedOutputStream;
import java.io.BufferedReader; import java.io.BufferedReader;
@ -40,6 +48,9 @@ import java.util.Random;
import java.util.zip.GZIPInputStream; import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream; import java.util.zip.GZIPOutputStream;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
@ -51,9 +62,10 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.MapFile; import org.apache.hadoop.io.MapFile;
import org.apache.hadoop.io.RandomDatum; import org.apache.hadoop.io.RandomDatum;
import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.compress.bzip2.Bzip2Factory;
import org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor; import org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor;
import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater; import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater;
import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater; import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater;
@ -61,20 +73,13 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel; import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy; import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
import org.apache.hadoop.io.compress.zlib.ZlibFactory; import org.apache.hadoop.io.compress.zlib.ZlibFactory;
import org.apache.hadoop.io.compress.bzip2.Bzip2Factory;
import org.apache.hadoop.util.LineReader; import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.NativeCodeLoader; import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.junit.After;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Assume; import org.junit.Assume;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assume.*;
public class TestCodec { public class TestCodec {
@ -84,6 +89,10 @@ public class TestCodec {
private int count = 10000; private int count = 10000;
private int seed = new Random().nextInt(); private int seed = new Random().nextInt();
@After
public void after() {
ZlibFactory.loadNativeZLib();
}
@Test @Test
public void testDefaultCodec() throws IOException { public void testDefaultCodec() throws IOException {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.DefaultCodec"); codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.DefaultCodec");
@ -364,7 +373,6 @@ public class TestCodec {
@Test @Test
public void testCodecPoolGzipReuse() throws Exception { public void testCodecPoolGzipReuse() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
assumeTrue(ZlibFactory.isNativeZlibLoaded(conf)); assumeTrue(ZlibFactory.isNativeZlibLoaded(conf));
GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf); GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf); DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
@ -446,7 +454,6 @@ public class TestCodec {
@Test @Test
public void testCodecInitWithCompressionLevel() throws Exception { public void testCodecInitWithCompressionLevel() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (ZlibFactory.isNativeZlibLoaded(conf)) { if (ZlibFactory.isNativeZlibLoaded(conf)) {
LOG.info("testCodecInitWithCompressionLevel with native"); LOG.info("testCodecInitWithCompressionLevel with native");
codecTestWithNOCompression(conf, codecTestWithNOCompression(conf,
@ -458,7 +465,8 @@ public class TestCodec {
+ ": native libs not loaded"); + ": native libs not loaded");
} }
conf = new Configuration(); conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false); // don't use native libs
ZlibFactory.setNativeZlibLoaded(false);
codecTestWithNOCompression( conf, codecTestWithNOCompression( conf,
"org.apache.hadoop.io.compress.DefaultCodec"); "org.apache.hadoop.io.compress.DefaultCodec");
} }
@ -466,14 +474,14 @@ public class TestCodec {
@Test @Test
public void testCodecPoolCompressorReinit() throws Exception { public void testCodecPoolCompressorReinit() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (ZlibFactory.isNativeZlibLoaded(conf)) { if (ZlibFactory.isNativeZlibLoaded(conf)) {
GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf); GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
gzipReinitTest(conf, gzc); gzipReinitTest(conf, gzc);
} else { } else {
LOG.warn("testCodecPoolCompressorReinit skipped: native libs not loaded"); LOG.warn("testCodecPoolCompressorReinit skipped: native libs not loaded");
} }
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false); // don't use native libs
ZlibFactory.setNativeZlibLoaded(false);
DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf); DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
gzipReinitTest(conf, dfc); gzipReinitTest(conf, dfc);
} }
@ -660,7 +668,8 @@ public class TestCodec {
gzbuf.reset(dflbuf.getData(), dflbuf.getLength()); gzbuf.reset(dflbuf.getData(), dflbuf.getLength());
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false); // don't use native libs
ZlibFactory.setNativeZlibLoaded(false);
CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf); CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf);
Decompressor decom = codec.createDecompressor(); Decompressor decom = codec.createDecompressor();
assertNotNull(decom); assertNotNull(decom);
@ -713,14 +722,14 @@ public class TestCodec {
@Test @Test
public void testBuiltInGzipConcat() throws IOException { public void testBuiltInGzipConcat() throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false); // don't use native libs
ZlibFactory.setNativeZlibLoaded(false);
GzipConcatTest(conf, BuiltInGzipDecompressor.class); GzipConcatTest(conf, BuiltInGzipDecompressor.class);
} }
@Test @Test
public void testNativeGzipConcat() throws IOException { public void testNativeGzipConcat() throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
assumeTrue(ZlibFactory.isNativeZlibLoaded(conf)); assumeTrue(ZlibFactory.isNativeZlibLoaded(conf));
GzipConcatTest(conf, GzipCodec.GzipZlibDecompressor.class); GzipConcatTest(conf, GzipCodec.GzipZlibDecompressor.class);
} }
@ -732,10 +741,7 @@ public class TestCodec {
// Don't use native libs for this test. // Don't use native libs for this test.
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false); ZlibFactory.setNativeZlibLoaded(false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
// Ensure that the CodecPool has a BuiltInZlibInflater in it. // Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf); Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor); assertNotNull("zlibDecompressor is null!", zlibDecompressor);
@ -784,7 +790,7 @@ public class TestCodec {
// Don't use native libs for this test. // Don't use native libs for this test.
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false); ZlibFactory.setNativeZlibLoaded(false);
assertFalse("ZlibFactory is using native libs against request", assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf)); ZlibFactory.isNativeZlibLoaded(conf));
@ -833,7 +839,6 @@ public class TestCodec {
// Use native libs per the parameter // Use native libs per the parameter
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, useNative);
if (useNative) { if (useNative) {
assumeTrue(ZlibFactory.isNativeZlibLoaded(conf)); assumeTrue(ZlibFactory.isNativeZlibLoaded(conf));
} else { } else {
@ -887,6 +892,8 @@ public class TestCodec {
@Test @Test
public void testGzipCodecWriteJava() throws IOException { public void testGzipCodecWriteJava() throws IOException {
// don't use native libs
ZlibFactory.setNativeZlibLoaded(false);
testGzipCodecWrite(false); testGzipCodecWrite(false);
} }
@ -901,8 +908,7 @@ public class TestCodec {
// Don't use native libs for this test. // Don't use native libs for this test.
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, ZlibFactory.setNativeZlibLoaded(false);
false);
assertFalse("ZlibFactory is using native libs against request", assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf)); ZlibFactory.isNativeZlibLoaded(conf));

View File

@ -100,7 +100,6 @@ public class TestZlibCompressorDecompressor {
@Test @Test
public void testZlibCompressorDecompressorWithConfiguration() { public void testZlibCompressorDecompressorWithConfiguration() {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (ZlibFactory.isNativeZlibLoaded(conf)) { if (ZlibFactory.isNativeZlibLoaded(conf)) {
byte[] rawData; byte[] rawData;
int tryNumber = 5; int tryNumber = 5;
@ -214,7 +213,6 @@ public class TestZlibCompressorDecompressor {
@Test @Test
public void testZlibCompressorDecompressorSetDictionary() { public void testZlibCompressorDecompressorSetDictionary() {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (ZlibFactory.isNativeZlibLoaded(conf)) { if (ZlibFactory.isNativeZlibLoaded(conf)) {
Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf); Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf); Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);

View File

@ -247,8 +247,6 @@ public class TestTFileSeqFileComparison extends TestCase {
public SeqFileAppendable(FileSystem fs, Path path, int osBufferSize, public SeqFileAppendable(FileSystem fs, Path path, int osBufferSize,
String compress, int minBlkSize) throws IOException { String compress, int minBlkSize) throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
true);
CompressionCodec codec = null; CompressionCodec codec = null;
if ("lzo".equals(compress)) { if ("lzo".equals(compress)) {

View File

@ -18,30 +18,34 @@
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream; import java.io.OutputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.zip.Inflater; import java.util.zip.Inflater;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.*;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.BZip2Codec;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionInputStream;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
@Ignore @Ignore
public class TestConcatenatedCompressedInput { public class TestConcatenatedCompressedInput {
private static final Log LOG = private static final Log LOG =
@ -76,6 +80,10 @@ public class TestConcatenatedCompressedInput {
} }
} }
@After
public void after() {
ZlibFactory.loadNativeZLib();
}
private static Path workDir = private static Path workDir =
new Path(new Path(System.getProperty("test.build.data", "/tmp")), new Path(new Path(System.getProperty("test.build.data", "/tmp")),
"TestConcatenatedCompressedInput").makeQualified(localFs); "TestConcatenatedCompressedInput").makeQualified(localFs);
@ -302,12 +310,12 @@ public class TestConcatenatedCompressedInput {
@Test @Test
public void testBuiltInGzipDecompressor() throws IOException { public void testBuiltInGzipDecompressor() throws IOException {
JobConf jobConf = new JobConf(defaultConf); JobConf jobConf = new JobConf(defaultConf);
jobConf.setBoolean("io.native.lib.available", false);
CompressionCodec gzip = new GzipCodec(); CompressionCodec gzip = new GzipCodec();
ReflectionUtils.setConf(gzip, jobConf); ReflectionUtils.setConf(gzip, jobConf);
localFs.delete(workDir, true); localFs.delete(workDir, true);
// Don't use native libs for this test
ZlibFactory.setNativeZlibLoaded(false);
assertEquals("[non-native (Java) codec]", assertEquals("[non-native (Java) codec]",
org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class, org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class,
gzip.getDecompressorType()); gzip.getDecompressorType());
@ -351,9 +359,7 @@ public class TestConcatenatedCompressedInput {
assertEquals("total uncompressed lines in concatenated test file", assertEquals("total uncompressed lines in concatenated test file",
84, lineNum); 84, lineNum);
// test BuiltInGzipDecompressor with lots of different input-buffer sizes ZlibFactory.loadNativeZLib();
doMultipleGzipBufferSizes(jobConf, false);
// test GzipZlibDecompressor (native), just to be sure // test GzipZlibDecompressor (native), just to be sure
// (FIXME? could move this call to testGzip(), but would need filename // (FIXME? could move this call to testGzip(), but would need filename
// setup above) (alternatively, maybe just nuke testGzip() and extend this?) // setup above) (alternatively, maybe just nuke testGzip() and extend this?)
@ -370,7 +376,6 @@ public class TestConcatenatedCompressedInput {
(useNative? "GzipZlibDecompressor" : "BuiltInGzipDecompressor") + (useNative? "GzipZlibDecompressor" : "BuiltInGzipDecompressor") +
COLOR_NORMAL); COLOR_NORMAL);
jConf.setBoolean("io.native.lib.available", useNative);
int bufferSize; int bufferSize;
@ -575,23 +580,17 @@ public class TestConcatenatedCompressedInput {
*/ */
// test CBZip2InputStream with lots of different input-buffer sizes // test CBZip2InputStream with lots of different input-buffer sizes
doMultipleBzip2BufferSizes(jobConf, false); doMultipleBzip2BufferSizes(jobConf);
// no native version of bzip2 codec (yet?)
//doMultipleBzip2BufferSizes(jobConf, true);
} }
// this tests either the native or the non-native gzip decoder with more than // this tests native bzip2 decoder with more than
// three dozen input-buffer sizes in order to try to catch any parser/state- // three dozen input-buffer sizes in order to try to catch any parser/state-
// machine errors at buffer boundaries // machine errors at buffer boundaries
private static void doMultipleBzip2BufferSizes(JobConf jConf, private static void doMultipleBzip2BufferSizes(JobConf jConf)
boolean useNative)
throws IOException { throws IOException {
System.out.println(COLOR_MAGENTA + "doMultipleBzip2BufferSizes() using " + System.out.println(COLOR_MAGENTA + "doMultipleBzip2BufferSizes() using " +
"default bzip2 decompressor" + COLOR_NORMAL); "default bzip2 decompressor" + COLOR_NORMAL);
jConf.setBoolean("io.native.lib.available", useNative);
int bufferSize; int bufferSize;
// ideally would add some offsets/shifts in here (e.g., via extra header // ideally would add some offsets/shifts in here (e.g., via extra header