HBASE-7178 Compression tests
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1410496 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c5b57b4558
commit
91b304eefe
|
@ -51,7 +51,10 @@ import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
import org.apache.hadoop.hbase.MediumTests;
|
import org.apache.hadoop.hbase.MediumTests;
|
||||||
import org.apache.hadoop.hbase.client.Get;
|
import org.apache.hadoop.hbase.client.Get;
|
||||||
|
import org.apache.hadoop.hbase.io.compress.Compression;
|
||||||
|
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
||||||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||||
|
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||||
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
|
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
|
||||||
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
|
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
|
||||||
import org.apache.hadoop.hbase.regionserver.wal.HLog;
|
import org.apache.hadoop.hbase.regionserver.wal.HLog;
|
||||||
|
@ -151,6 +154,35 @@ public class TestStore extends TestCase {
|
||||||
store = new HStore(basedir, region, hcd, fs, conf);
|
store = new HStore(basedir, region, hcd, fs, conf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify that compression and data block encoding are respected by the
|
||||||
|
* Store.createWriterInTmp() method, used on store flush.
|
||||||
|
*/
|
||||||
|
public void testCreateWriter() throws Exception {
|
||||||
|
Configuration conf = HBaseConfiguration.create();
|
||||||
|
FileSystem fs = FileSystem.get(conf);
|
||||||
|
|
||||||
|
HColumnDescriptor hcd = new HColumnDescriptor(family);
|
||||||
|
hcd.setCompressionType(Compression.Algorithm.GZ);
|
||||||
|
hcd.setDataBlockEncoding(DataBlockEncoding.DIFF);
|
||||||
|
init(getName(), conf, hcd);
|
||||||
|
|
||||||
|
// Test createWriterInTmp()
|
||||||
|
StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompression(), false);
|
||||||
|
Path path = writer.getPath();
|
||||||
|
writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1)));
|
||||||
|
writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2)));
|
||||||
|
writer.append(new KeyValue(row2, family, qf1, Bytes.toBytes(3)));
|
||||||
|
writer.append(new KeyValue(row2, family, qf2, Bytes.toBytes(4)));
|
||||||
|
writer.close();
|
||||||
|
|
||||||
|
// Verify that compression and encoding settings are respected
|
||||||
|
HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf));
|
||||||
|
assertEquals(hcd.getCompressionType(), reader.getCompressionAlgorithm());
|
||||||
|
assertEquals(hcd.getDataBlockEncoding(), reader.getEncodingOnDisk());
|
||||||
|
reader.close();
|
||||||
|
}
|
||||||
|
|
||||||
public void testDeleteExpiredStoreFiles() throws Exception {
|
public void testDeleteExpiredStoreFiles() throws Exception {
|
||||||
int storeFileNum = 4;
|
int storeFileNum = 4;
|
||||||
int ttl = 4;
|
int ttl = 4;
|
||||||
|
|
|
@ -19,6 +19,8 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hbase.util;
|
package org.apache.hadoop.hbase.util;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.SmallTests;
|
import org.apache.hadoop.hbase.SmallTests;
|
||||||
import org.apache.hadoop.hbase.io.compress.Compression;
|
import org.apache.hadoop.hbase.io.compress.Compression;
|
||||||
|
@ -38,10 +40,10 @@ import static org.junit.Assert.*;
|
||||||
|
|
||||||
@Category(SmallTests.class)
|
@Category(SmallTests.class)
|
||||||
public class TestCompressionTest {
|
public class TestCompressionTest {
|
||||||
|
static final Log LOG = LogFactory.getLog(TestCompressionTest.class);
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTestCompression() {
|
public void testExceptionCaching() {
|
||||||
|
|
||||||
// This test will fail if you run the tests with LZO compression available.
|
// This test will fail if you run the tests with LZO compression available.
|
||||||
try {
|
try {
|
||||||
CompressionTest.testCompression(Compression.Algorithm.LZO);
|
CompressionTest.testCompression(Compression.Algorithm.LZO);
|
||||||
|
@ -60,59 +62,23 @@ public class TestCompressionTest {
|
||||||
assertNull(e.getCause());
|
assertNull(e.getCause());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
assertFalse(CompressionTest.testCompression("LZO"));
|
assertFalse(CompressionTest.testCompression("LZO"));
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testTestCompression() {
|
||||||
assertTrue(CompressionTest.testCompression("NONE"));
|
assertTrue(CompressionTest.testCompression("NONE"));
|
||||||
assertTrue(CompressionTest.testCompression("GZ"));
|
assertTrue(CompressionTest.testCompression("GZ"));
|
||||||
|
|
||||||
if (isCompressionAvailable("org.apache.hadoop.io.compress.SnappyCodec")) {
|
|
||||||
if (NativeCodeLoader.isNativeCodeLoaded()) {
|
if (NativeCodeLoader.isNativeCodeLoaded()) {
|
||||||
try {
|
nativeCodecTest("LZO", "lzo2", "com.hadoop.compression.lzo.LzoCodec");
|
||||||
System.loadLibrary("snappy");
|
nativeCodecTest("LZ4", null, "org.apache.hadoop.io.compress.Lz4Codec");
|
||||||
|
nativeCodecTest("SNAPPY", "snappy", "org.apache.hadoop.io.compress.SnappyCodec");
|
||||||
try {
|
} else {
|
||||||
Configuration conf = new Configuration();
|
|
||||||
CompressionCodec codec = (CompressionCodec)
|
|
||||||
ReflectionUtils.newInstance(
|
|
||||||
conf.getClassByName("org.apache.hadoop.io.compress.SnappyCodec"), conf);
|
|
||||||
|
|
||||||
DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
|
|
||||||
CompressionOutputStream deflateFilter =
|
|
||||||
codec.createOutputStream(compressedDataBuffer);
|
|
||||||
|
|
||||||
byte[] data = new byte[1024];
|
|
||||||
DataOutputStream deflateOut = new DataOutputStream(
|
|
||||||
new BufferedOutputStream(deflateFilter));
|
|
||||||
deflateOut.write(data, 0, data.length);
|
|
||||||
deflateOut.flush();
|
|
||||||
deflateFilter.finish();
|
|
||||||
|
|
||||||
// Snappy Codec class, Snappy nativelib and Hadoop nativelib with
|
|
||||||
// Snappy JNIs are present
|
|
||||||
assertTrue(CompressionTest.testCompression("SNAPPY"));
|
|
||||||
}
|
|
||||||
catch (UnsatisfiedLinkError ex) {
|
|
||||||
// Hadoop nativelib does not have Snappy JNIs
|
|
||||||
|
|
||||||
// cannot assert the codec here because the current logic of
|
|
||||||
// CompressionTest checks only classloading, not the codec
|
|
||||||
// usage.
|
|
||||||
}
|
|
||||||
catch (Exception ex) {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (UnsatisfiedLinkError ex) {
|
|
||||||
// Snappy nativelib is not available
|
|
||||||
assertFalse(CompressionTest.testCompression("SNAPPY"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Hadoop nativelib is not available
|
// Hadoop nativelib is not available
|
||||||
assertFalse(CompressionTest.testCompression("SNAPPY"));
|
LOG.debug("Native code not loaded");
|
||||||
}
|
assertFalse(CompressionTest.testCompression("LZO"));
|
||||||
}
|
assertFalse(CompressionTest.testCompression("LZ4"));
|
||||||
else {
|
|
||||||
// Snappy Codec class is not available
|
|
||||||
assertFalse(CompressionTest.testCompression("SNAPPY"));
|
assertFalse(CompressionTest.testCompression("SNAPPY"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -121,12 +87,56 @@ public class TestCompressionTest {
|
||||||
try {
|
try {
|
||||||
Thread.currentThread().getContextClassLoader().loadClass(codecClassName);
|
Thread.currentThread().getContextClassLoader().loadClass(codecClassName);
|
||||||
return true;
|
return true;
|
||||||
}
|
} catch (Exception ex) {
|
||||||
catch (Exception ex) {
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verify CompressionTest.testCompression() on a native codec.
|
||||||
|
*/
|
||||||
|
private void nativeCodecTest(String codecName, String libName, String codecClassName) {
|
||||||
|
if (isCompressionAvailable(codecClassName)) {
|
||||||
|
try {
|
||||||
|
if (libName != null) {
|
||||||
|
System.loadLibrary(libName);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
Configuration conf = new Configuration();
|
||||||
|
CompressionCodec codec = (CompressionCodec)
|
||||||
|
ReflectionUtils.newInstance(conf.getClassByName(codecClassName), conf);
|
||||||
|
|
||||||
|
DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
|
||||||
|
CompressionOutputStream deflateFilter = codec.createOutputStream(compressedDataBuffer);
|
||||||
|
|
||||||
|
byte[] data = new byte[1024];
|
||||||
|
DataOutputStream deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
|
||||||
|
deflateOut.write(data, 0, data.length);
|
||||||
|
deflateOut.flush();
|
||||||
|
deflateFilter.finish();
|
||||||
|
|
||||||
|
// Codec class, codec nativelib and Hadoop nativelib with codec JNIs are present
|
||||||
|
assertTrue(CompressionTest.testCompression(codecName));
|
||||||
|
} catch (UnsatisfiedLinkError e) {
|
||||||
|
// Hadoop nativelib does not have codec JNIs.
|
||||||
|
// cannot assert the codec here because the current logic of
|
||||||
|
// CompressionTest checks only classloading, not the codec
|
||||||
|
// usage.
|
||||||
|
LOG.debug("No JNI for codec '" + codecName + "' " + e.getMessage());
|
||||||
|
} catch (Exception e) {
|
||||||
|
LOG.error(codecName, e);
|
||||||
|
}
|
||||||
|
} catch (UnsatisfiedLinkError e) {
|
||||||
|
// nativelib is not available
|
||||||
|
LOG.debug("Native lib not available: " + codecName);
|
||||||
|
assertFalse(CompressionTest.testCompression(codecName));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Compression Codec class is not available
|
||||||
|
LOG.debug("Codec class not available: " + codecName);
|
||||||
|
assertFalse(CompressionTest.testCompression(codecName));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue