HADOOP-11658. Externalize io.compression.codecs property. Contributed by Kai Zheng.
This commit is contained in:
parent
cbb492578e
commit
ca1c00bf81
|
@ -642,6 +642,9 @@ Release 2.7.0 - UNRELEASED
|
|||
HADOOP-10976. moving the source code of hadoop-tools docs to the
|
||||
directory under hadoop-tools (Masatake Iwasaki via aw)
|
||||
|
||||
HADOOP-11658. Externalize io.compression.codecs property.
|
||||
(Kai Zheng via aajisaka)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
HADOOP-11323. WritableComparator#compare keeps reference to byte array.
|
||||
|
|
|
@ -91,17 +91,24 @@ public class CommonConfigurationKeys extends CommonConfigurationKeysPublic {
|
|||
public static final String IPC_CALLQUEUE_IMPL_KEY = "callqueue.impl";
|
||||
public static final String IPC_CALLQUEUE_IDENTITY_PROVIDER_KEY = "identity-provider.impl";
|
||||
|
||||
/** Internal buffer size for Lzo compressor/decompressors */
|
||||
public static final String IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_KEY =
|
||||
"io.compression.codec.lzo.buffersize";
|
||||
/** Default value for IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_KEY */
|
||||
public static final int IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_DEFAULT =
|
||||
64*1024;
|
||||
/** This is for specifying the implementation for the mappings from
|
||||
* hostnames to the racks they belong to
|
||||
*/
|
||||
public static final String NET_TOPOLOGY_CONFIGURED_NODE_MAPPING_KEY =
|
||||
"net.topology.configured.node.mapping";
|
||||
"net.topology.configured.node.mapping";
|
||||
|
||||
/**
|
||||
* Supported compression codec classes
|
||||
*/
|
||||
public static final String IO_COMPRESSION_CODECS_KEY = "io.compression.codecs";
|
||||
|
||||
/** Internal buffer size for Lzo compressor/decompressors */
|
||||
public static final String IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_KEY =
|
||||
"io.compression.codec.lzo.buffersize";
|
||||
|
||||
/** Default value for IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_KEY */
|
||||
public static final int IO_COMPRESSION_CODEC_LZO_BUFFERSIZE_DEFAULT =
|
||||
64*1024;
|
||||
|
||||
/** Internal buffer size for Snappy compressor/decompressors */
|
||||
public static final String IO_COMPRESSION_CODEC_SNAPPY_BUFFERSIZE_KEY =
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
|
||||
|
@ -106,7 +107,8 @@ public class CompressionCodecFactory {
|
|||
* @param conf the configuration to look in
|
||||
* @return a list of the {@link CompressionCodec} classes
|
||||
*/
|
||||
public static List<Class<? extends CompressionCodec>> getCodecClasses(Configuration conf) {
|
||||
public static List<Class<? extends CompressionCodec>> getCodecClasses(
|
||||
Configuration conf) {
|
||||
List<Class<? extends CompressionCodec>> result
|
||||
= new ArrayList<Class<? extends CompressionCodec>>();
|
||||
// Add codec classes discovered via service loading
|
||||
|
@ -118,7 +120,8 @@ public class CompressionCodecFactory {
|
|||
}
|
||||
}
|
||||
// Add codec classes from configuration
|
||||
String codecsString = conf.get("io.compression.codecs");
|
||||
String codecsString = conf.get(
|
||||
CommonConfigurationKeys.IO_COMPRESSION_CODECS_KEY);
|
||||
if (codecsString != null) {
|
||||
StringTokenizer codecSplit = new StringTokenizer(codecsString, ",");
|
||||
while (codecSplit.hasMoreElements()) {
|
||||
|
@ -161,7 +164,7 @@ public class CompressionCodecFactory {
|
|||
buf.append(itr.next().getName());
|
||||
}
|
||||
}
|
||||
conf.set("io.compression.codecs", buf.toString());
|
||||
conf.set(CommonConfigurationKeys.IO_COMPRESSION_CODECS_KEY, buf.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -172,7 +175,8 @@ public class CompressionCodecFactory {
|
|||
codecs = new TreeMap<String, CompressionCodec>();
|
||||
codecsByClassName = new HashMap<String, CompressionCodec>();
|
||||
codecsByName = new HashMap<String, CompressionCodec>();
|
||||
List<Class<? extends CompressionCodec>> codecClasses = getCodecClasses(conf);
|
||||
List<Class<? extends CompressionCodec>> codecClasses =
|
||||
getCodecClasses(conf);
|
||||
if (codecClasses == null || codecClasses.isEmpty()) {
|
||||
addCodec(new GzipCodec());
|
||||
addCodec(new DefaultCodec());
|
||||
|
@ -193,7 +197,8 @@ public class CompressionCodecFactory {
|
|||
CompressionCodec result = null;
|
||||
if (codecs != null) {
|
||||
String filename = file.getName();
|
||||
String reversedFilename = new StringBuilder(filename).reverse().toString();
|
||||
String reversedFilename =
|
||||
new StringBuilder(filename).reverse().toString();
|
||||
SortedMap<String, CompressionCodec> subMap =
|
||||
codecs.headMap(reversedFilename);
|
||||
if (!subMap.isEmpty()) {
|
||||
|
@ -239,7 +244,8 @@ public class CompressionCodecFactory {
|
|||
}
|
||||
CompressionCodec codec = getCodecByClassName(codecName);
|
||||
if (codec == null) {
|
||||
// trying to get the codec by name in case the name was specified instead a class
|
||||
// trying to get the codec by name in case the name was specified
|
||||
// instead a class
|
||||
codec = codecsByName.get(codecName.toLowerCase());
|
||||
}
|
||||
return codec;
|
||||
|
@ -260,7 +266,8 @@ public class CompressionCodecFactory {
|
|||
* @param codecName the canonical class name of the codec
|
||||
* @return the codec class
|
||||
*/
|
||||
public Class<? extends CompressionCodec> getCodecClassByName(String codecName) {
|
||||
public Class<? extends CompressionCodec> getCodecClassByName(
|
||||
String codecName) {
|
||||
CompressionCodec codec = getCodecByName(codecName);
|
||||
if (codec == null) {
|
||||
return null;
|
||||
|
|
|
@ -24,6 +24,7 @@ import java.io.OutputStream;
|
|||
import java.util.*;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
|
@ -258,7 +259,7 @@ public class TestCodecFactory extends TestCase {
|
|||
checkCodec("overridden factory for gzip codec", NewGzipCodec.class, codec);
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
conf.set("io.compression.codecs",
|
||||
conf.set(CommonConfigurationKeys.IO_COMPRESSION_CODECS_KEY,
|
||||
" org.apache.hadoop.io.compress.GzipCodec , " +
|
||||
" org.apache.hadoop.io.compress.DefaultCodec , " +
|
||||
" org.apache.hadoop.io.compress.BZip2Codec ");
|
||||
|
|
Loading…
Reference in New Issue