HBASE-1392 change how we build/configure lzocodec

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@773033 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2009-05-08 15:40:11 +00:00
parent f7add789cd
commit 90dffef122
2 changed files with 13 additions and 13 deletions

View File

@ -212,6 +212,7 @@ Release 0.20.0 - Unreleased
HBASE-1383 hbase shell needs to warn on deleting multi-region table
HBASE-1286 Thrift should support next(nbRow) like functionality
(Alex Newman via Stack)
HBASE-1392 change how we build/configure lzocodec (Ryan Rawson via Stack)
OPTIMIZATIONS

View File

@ -26,6 +26,7 @@ import java.io.OutputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionInputStream;
@ -34,6 +35,7 @@ import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.util.ReflectionUtils;
/**
* Compression related stuff.
@ -74,24 +76,19 @@ public final class Compression {
public static enum Algorithm {
LZO("lzo") {
// Use base type to avoid compile-time dependencies.
private DefaultCodec lzoCodec;
private CompressionCodec lzoCodec;
@Override
DefaultCodec getCodec() {
CompressionCodec getCodec() {
if (lzoCodec == null) {
Configuration conf = new Configuration();
conf.setBoolean("hadoop.native.lib", true);
try {
Class externalCodec =
ClassLoader.getSystemClassLoader().loadClass("com.hadoop.compression.lzo.LzoCodec");
lzoCodec = (DefaultCodec) externalCodec.newInstance();
lzoCodec.setConf(conf);
lzoCodec = (CompressionCodec) ReflectionUtils.newInstance(externalCodec, conf);
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
}
return lzoCodec;
@ -156,15 +153,16 @@ public final class Compression {
this.compressName = name;
}
abstract DefaultCodec getCodec();
abstract CompressionCodec getCodec();
public InputStream createDecompressionStream(
InputStream downStream, Decompressor decompressor,
int downStreamBufferSize) throws IOException {
DefaultCodec codec = getCodec();
CompressionCodec codec = getCodec();
// Set the internal buffer size to read from down stream.
if (downStreamBufferSize > 0) {
codec.getConf().setInt("io.file.buffer.size", downStreamBufferSize);
Configurable c = (Configurable) codec;
c.getConf().setInt("io.file.buffer.size", downStreamBufferSize);
}
CompressionInputStream cis =
codec.createInputStream(downStream, decompressor);
@ -176,7 +174,7 @@ public final class Compression {
public OutputStream createCompressionStream(
OutputStream downStream, Compressor compressor, int downStreamBufferSize)
throws IOException {
DefaultCodec codec = getCodec();
CompressionCodec codec = getCodec();
OutputStream bos1 = null;
if (downStreamBufferSize > 0) {
bos1 = new BufferedOutputStream(downStream, downStreamBufferSize);
@ -184,7 +182,8 @@ public final class Compression {
else {
bos1 = downStream;
}
codec.getConf().setInt("io.file.buffer.size", 32 * 1024);
Configurable c = (Configurable) codec;
c.getConf().setInt("io.file.buffer.size", 32 * 1024);
CompressionOutputStream cos =
codec.createOutputStream(bos1, compressor);
BufferedOutputStream bos2 =