HADOOP-11627. Remove io.native.lib.available. Contributed by Brahma Reddy Battula.

This commit is contained in:
Akira Ajisaka 2015-04-24 08:08:55 +09:00
parent ef4e9963b2
commit ac281e3fc8
12 changed files with 86 additions and 115 deletions

View File

@ -23,6 +23,9 @@ Trunk (Unreleased)
HADOOP-11731. Rework the changelog and releasenotes (aw)
HADOOP-11627. Remove io.native.lib.available.
(Brahma Reddy Battula via aajisaka)
NEW FEATURES
HADOOP-6590. Add a username check for hadoop sub-commands (John Smith via

View File

@ -447,8 +447,6 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
CommonConfigurationKeys.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY),
new DeprecationDelta("dfs.df.interval",
CommonConfigurationKeys.FS_DF_INTERVAL_KEY),
new DeprecationDelta("hadoop.native.lib",
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY),
new DeprecationDelta("fs.default.name",
CommonConfigurationKeys.FS_DEFAULT_NAME_KEY),
new DeprecationDelta("dfs.umaskmode",

View File

@ -38,11 +38,6 @@ public class CommonConfigurationKeysPublic {
// The Keys
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IO_NATIVE_LIB_AVAILABLE_KEY =
"io.native.lib.available";
/** Default value for IO_NATIVE_LIB_AVAILABLE_KEY */
public static final boolean IO_NATIVE_LIB_AVAILABLE_DEFAULT = true;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY =
"net.topology.script.number.args";
/** Default value for NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY */

View File

@ -21,15 +21,9 @@ package org.apache.hadoop.io.compress.bzip2;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.bzip2.Bzip2Compressor;
import org.apache.hadoop.io.compress.bzip2.Bzip2Decompressor;
import org.apache.hadoop.io.compress.bzip2.BZip2DummyCompressor;
import org.apache.hadoop.io.compress.bzip2.BZip2DummyDecompressor;
import org.apache.hadoop.util.NativeCodeLoader;
/**
* A collection of factories to create the right
@ -58,10 +52,7 @@ public class Bzip2Factory {
bzip2LibraryName = libname;
if (libname.equals("java-builtin")) {
LOG.info("Using pure-Java version of bzip2 library");
} else if (conf.getBoolean(
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT) &&
NativeCodeLoader.isNativeCodeLoaded()) {
} else if (NativeCodeLoader.isNativeCodeLoaded()) {
try {
// Initialize the native library.
Bzip2Compressor.initSymbols(libname);

View File

@ -27,7 +27,8 @@ import org.apache.hadoop.io.compress.DirectDecompressor;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import com.google.common.annotations.VisibleForTesting;
/**
* A collection of factories to create the right
@ -41,6 +42,15 @@ public class ZlibFactory {
private static boolean nativeZlibLoaded = false;
static {
loadNativeZLib();
}
/**
* Load native library and set the flag whether to use native library. The
* method is also used for reset the flag modified by setNativeZlibLoaded
*/
@VisibleForTesting
public static void loadNativeZLib() {
if (NativeCodeLoader.isNativeCodeLoaded()) {
nativeZlibLoaded = ZlibCompressor.isNativeZlibLoaded() &&
ZlibDecompressor.isNativeZlibLoaded();
@ -53,6 +63,15 @@ public class ZlibFactory {
}
}
/**
* Set the flag whether to use native library. Used for testing non-native
* libraries
*
*/
@VisibleForTesting
public static void setNativeZlibLoaded(final boolean isLoaded) {
ZlibFactory.nativeZlibLoaded = isLoaded;
}
/**
* Check if native-zlib code is loaded & initialized correctly and
* can be loaded for this job.
@ -62,9 +81,7 @@ public class ZlibFactory {
* and can be loaded for this job, else <code>false</code>
*/
public static boolean isNativeZlibLoaded(Configuration conf) {
return nativeZlibLoaded && conf.getBoolean(
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT);
return nativeZlibLoaded;
}
public static String getLibraryName() {

View File

@ -22,8 +22,6 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
/**
* A helper to load the native hadoop code i.e. libhadoop.so.
@ -86,28 +84,4 @@ public class NativeCodeLoader {
public static native String getLibraryName();
/**
* Return if native hadoop libraries, if present, can be used for this job.
* @param conf configuration
*
* @return <code>true</code> if native hadoop libraries, if present, can be
* used for this job; <code>false</code> otherwise.
*/
public boolean getLoadNativeLibraries(Configuration conf) {
return conf.getBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_DEFAULT);
}
/**
* Set if native hadoop libraries, if present, can be used for this job.
*
* @param conf configuration
* @param loadNativeLibraries can native hadoop libraries be loaded
*/
public void setLoadNativeLibraries(Configuration conf,
boolean loadNativeLibraries) {
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
loadNativeLibraries);
}
}

View File

@ -38,15 +38,6 @@
<description>A base for other temporary directories.</description>
</property>
<property>
<name>io.native.lib.available</name>
<value>true</value>
<description>Controls whether to use native libraries for bz2 and zlib
compression codecs or not. The property does not control any other native
libraries.
</description>
</property>
<property>
<name>hadoop.http.filter.initializers</name>
<value>org.apache.hadoop.http.lib.StaticUserWebFilter</value>

View File

@ -283,5 +283,6 @@ The following table lists additional changes to some configuration properties:
|:---- |:---- |
| mapred.create.symlink | NONE - symlinking is always on |
| mapreduce.job.cache.symlink.create | NONE - symlinking is always on |
| io.native.lib.available | NONE - Always use native libraries if available. |

View File

@ -17,6 +17,14 @@
*/
package org.apache.hadoop.io.compress;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
@ -40,6 +48,9 @@ import java.util.Random;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FileStatus;
@ -51,9 +62,10 @@ import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.MapFile;
import org.apache.hadoop.io.RandomDatum;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.bzip2.Bzip2Factory;
import org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor;
import org.apache.hadoop.io.compress.zlib.BuiltInZlibDeflater;
import org.apache.hadoop.io.compress.zlib.BuiltInZlibInflater;
@ -61,20 +73,13 @@ import org.apache.hadoop.io.compress.zlib.ZlibCompressor;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
import org.apache.hadoop.io.compress.bzip2.Bzip2Factory;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.junit.After;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assume.*;
public class TestCodec {
@ -84,6 +89,10 @@ public class TestCodec {
private int count = 10000;
private int seed = new Random().nextInt();
@After
public void after() {
ZlibFactory.loadNativeZLib();
}
@Test
public void testDefaultCodec() throws IOException {
codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.DefaultCodec");
@ -364,7 +373,6 @@ public class TestCodec {
@Test
public void testCodecPoolGzipReuse() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
assumeTrue(ZlibFactory.isNativeZlibLoaded(conf));
GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
@ -446,7 +454,6 @@ public class TestCodec {
@Test
public void testCodecInitWithCompressionLevel() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (ZlibFactory.isNativeZlibLoaded(conf)) {
LOG.info("testCodecInitWithCompressionLevel with native");
codecTestWithNOCompression(conf,
@ -458,7 +465,8 @@ public class TestCodec {
+ ": native libs not loaded");
}
conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
// don't use native libs
ZlibFactory.setNativeZlibLoaded(false);
codecTestWithNOCompression( conf,
"org.apache.hadoop.io.compress.DefaultCodec");
}
@ -466,14 +474,14 @@ public class TestCodec {
@Test
public void testCodecPoolCompressorReinit() throws Exception {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (ZlibFactory.isNativeZlibLoaded(conf)) {
GzipCodec gzc = ReflectionUtils.newInstance(GzipCodec.class, conf);
gzipReinitTest(conf, gzc);
} else {
LOG.warn("testCodecPoolCompressorReinit skipped: native libs not loaded");
}
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
// don't use native libs
ZlibFactory.setNativeZlibLoaded(false);
DefaultCodec dfc = ReflectionUtils.newInstance(DefaultCodec.class, conf);
gzipReinitTest(conf, dfc);
}
@ -660,7 +668,8 @@ public class TestCodec {
gzbuf.reset(dflbuf.getData(), dflbuf.getLength());
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
// don't use native libs
ZlibFactory.setNativeZlibLoaded(false);
CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf);
Decompressor decom = codec.createDecompressor();
assertNotNull(decom);
@ -713,14 +722,14 @@ public class TestCodec {
@Test
public void testBuiltInGzipConcat() throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
// don't use native libs
ZlibFactory.setNativeZlibLoaded(false);
GzipConcatTest(conf, BuiltInGzipDecompressor.class);
}
@Test
public void testNativeGzipConcat() throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
assumeTrue(ZlibFactory.isNativeZlibLoaded(conf));
GzipConcatTest(conf, GzipCodec.GzipZlibDecompressor.class);
}
@ -732,10 +741,7 @@ public class TestCodec {
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
ZlibFactory.setNativeZlibLoaded(false);
// Ensure that the CodecPool has a BuiltInZlibInflater in it.
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);
assertNotNull("zlibDecompressor is null!", zlibDecompressor);
@ -784,7 +790,7 @@ public class TestCodec {
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
ZlibFactory.setNativeZlibLoaded(false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));
@ -833,7 +839,6 @@ public class TestCodec {
// Use native libs per the parameter
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, useNative);
if (useNative) {
assumeTrue(ZlibFactory.isNativeZlibLoaded(conf));
} else {
@ -887,6 +892,8 @@ public class TestCodec {
@Test
public void testGzipCodecWriteJava() throws IOException {
// don't use native libs
ZlibFactory.setNativeZlibLoaded(false);
testGzipCodecWrite(false);
}
@ -901,8 +908,7 @@ public class TestCodec {
// Don't use native libs for this test.
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
false);
ZlibFactory.setNativeZlibLoaded(false);
assertFalse("ZlibFactory is using native libs against request",
ZlibFactory.isNativeZlibLoaded(conf));

View File

@ -100,7 +100,6 @@ public class TestZlibCompressorDecompressor {
@Test
public void testZlibCompressorDecompressorWithConfiguration() {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (ZlibFactory.isNativeZlibLoaded(conf)) {
byte[] rawData;
int tryNumber = 5;
@ -214,7 +213,6 @@ public class TestZlibCompressorDecompressor {
@Test
public void testZlibCompressorDecompressorSetDictionary() {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, true);
if (ZlibFactory.isNativeZlibLoaded(conf)) {
Compressor zlibCompressor = ZlibFactory.getZlibCompressor(conf);
Decompressor zlibDecompressor = ZlibFactory.getZlibDecompressor(conf);

View File

@ -247,8 +247,6 @@ public class TestTFileSeqFileComparison extends TestCase {
public SeqFileAppendable(FileSystem fs, Path path, int osBufferSize,
String compress, int minBlkSize) throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY,
true);
CompressionCodec codec = null;
if ("lzo".equals(compress)) {

View File

@ -18,30 +18,34 @@
package org.apache.hadoop.mapred;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayInputStream;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.Inflater;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.*;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.BZip2Codec;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionInputStream;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.compress.zlib.ZlibFactory;
import org.apache.hadoop.util.LineReader;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
@Ignore
public class TestConcatenatedCompressedInput {
private static final Log LOG =
@ -76,6 +80,10 @@ public class TestConcatenatedCompressedInput {
}
}
@After
public void after() {
ZlibFactory.loadNativeZLib();
}
private static Path workDir =
new Path(new Path(System.getProperty("test.build.data", "/tmp")),
"TestConcatenatedCompressedInput").makeQualified(localFs);
@ -302,12 +310,12 @@ public class TestConcatenatedCompressedInput {
@Test
public void testBuiltInGzipDecompressor() throws IOException {
JobConf jobConf = new JobConf(defaultConf);
jobConf.setBoolean("io.native.lib.available", false);
CompressionCodec gzip = new GzipCodec();
ReflectionUtils.setConf(gzip, jobConf);
localFs.delete(workDir, true);
// Don't use native libs for this test
ZlibFactory.setNativeZlibLoaded(false);
assertEquals("[non-native (Java) codec]",
org.apache.hadoop.io.compress.zlib.BuiltInGzipDecompressor.class,
gzip.getDecompressorType());
@ -351,9 +359,7 @@ public class TestConcatenatedCompressedInput {
assertEquals("total uncompressed lines in concatenated test file",
84, lineNum);
// test BuiltInGzipDecompressor with lots of different input-buffer sizes
doMultipleGzipBufferSizes(jobConf, false);
ZlibFactory.loadNativeZLib();
// test GzipZlibDecompressor (native), just to be sure
// (FIXME? could move this call to testGzip(), but would need filename
// setup above) (alternatively, maybe just nuke testGzip() and extend this?)
@ -370,7 +376,6 @@ public class TestConcatenatedCompressedInput {
(useNative? "GzipZlibDecompressor" : "BuiltInGzipDecompressor") +
COLOR_NORMAL);
jConf.setBoolean("io.native.lib.available", useNative);
int bufferSize;
@ -575,23 +580,17 @@ public class TestConcatenatedCompressedInput {
*/
// test CBZip2InputStream with lots of different input-buffer sizes
doMultipleBzip2BufferSizes(jobConf, false);
// no native version of bzip2 codec (yet?)
//doMultipleBzip2BufferSizes(jobConf, true);
doMultipleBzip2BufferSizes(jobConf);
}
// this tests either the native or the non-native gzip decoder with more than
// this tests native bzip2 decoder with more than
// three dozen input-buffer sizes in order to try to catch any parser/state-
// machine errors at buffer boundaries
private static void doMultipleBzip2BufferSizes(JobConf jConf,
boolean useNative)
private static void doMultipleBzip2BufferSizes(JobConf jConf)
throws IOException {
System.out.println(COLOR_MAGENTA + "doMultipleBzip2BufferSizes() using " +
"default bzip2 decompressor" + COLOR_NORMAL);
jConf.setBoolean("io.native.lib.available", useNative);
int bufferSize;
// ideally would add some offsets/shifts in here (e.g., via extra header