HBASE-7178 Compression tests

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1410496 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2012-11-16 17:29:19 +00:00
parent c5b57b4558
commit 91b304eefe
2 changed files with 95 additions and 53 deletions

View File

@ -51,7 +51,10 @@ import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
@ -151,6 +154,35 @@ public class TestStore extends TestCase {
store = new HStore(basedir, region, hcd, fs, conf);
}
/**
* Verify that compression and data block encoding are respected by the
* Store.createWriterInTmp() method, used on store flush.
*/
public void testCreateWriter() throws Exception {
Configuration conf = HBaseConfiguration.create();
FileSystem fs = FileSystem.get(conf);
HColumnDescriptor hcd = new HColumnDescriptor(family);
hcd.setCompressionType(Compression.Algorithm.GZ);
hcd.setDataBlockEncoding(DataBlockEncoding.DIFF);
init(getName(), conf, hcd);
// Test createWriterInTmp()
StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompression(), false);
Path path = writer.getPath();
writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1)));
writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2)));
writer.append(new KeyValue(row2, family, qf1, Bytes.toBytes(3)));
writer.append(new KeyValue(row2, family, qf2, Bytes.toBytes(4)));
writer.close();
// Verify that compression and encoding settings are respected
HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf));
assertEquals(hcd.getCompressionType(), reader.getCompressionAlgorithm());
assertEquals(hcd.getDataBlockEncoding(), reader.getEncodingOnDisk());
reader.close();
}
public void testDeleteExpiredStoreFiles() throws Exception {
int storeFileNum = 4;
int ttl = 4;

View File

@ -19,6 +19,8 @@
package org.apache.hadoop.hbase.util;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.SmallTests;
import org.apache.hadoop.hbase.io.compress.Compression;
@ -38,10 +40,10 @@ import static org.junit.Assert.*;
@Category(SmallTests.class)
public class TestCompressionTest {
static final Log LOG = LogFactory.getLog(TestCompressionTest.class);
@Test
public void testTestCompression() {
public void testExceptionCaching() {
// This test will fail if you run the tests with LZO compression available.
try {
CompressionTest.testCompression(Compression.Algorithm.LZO);
@ -60,59 +62,23 @@ public class TestCompressionTest {
assertNull(e.getCause());
}
assertFalse(CompressionTest.testCompression("LZO"));
}
@Test
public void testTestCompression() {
assertTrue(CompressionTest.testCompression("NONE"));
assertTrue(CompressionTest.testCompression("GZ"));
if (isCompressionAvailable("org.apache.hadoop.io.compress.SnappyCodec")) {
if (NativeCodeLoader.isNativeCodeLoaded()) {
try {
System.loadLibrary("snappy");
try {
Configuration conf = new Configuration();
CompressionCodec codec = (CompressionCodec)
ReflectionUtils.newInstance(
conf.getClassByName("org.apache.hadoop.io.compress.SnappyCodec"), conf);
DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
CompressionOutputStream deflateFilter =
codec.createOutputStream(compressedDataBuffer);
byte[] data = new byte[1024];
DataOutputStream deflateOut = new DataOutputStream(
new BufferedOutputStream(deflateFilter));
deflateOut.write(data, 0, data.length);
deflateOut.flush();
deflateFilter.finish();
// Snappy Codec class, Snappy nativelib and Hadoop nativelib with
// Snappy JNIs are present
assertTrue(CompressionTest.testCompression("SNAPPY"));
}
catch (UnsatisfiedLinkError ex) {
// Hadoop nativelib does not have Snappy JNIs
// cannot assert the codec here because the current logic of
// CompressionTest checks only classloading, not the codec
// usage.
}
catch (Exception ex) {
}
}
catch (UnsatisfiedLinkError ex) {
// Snappy nativelib is not available
assertFalse(CompressionTest.testCompression("SNAPPY"));
}
}
else {
// Hadoop nativelib is not available
assertFalse(CompressionTest.testCompression("SNAPPY"));
}
}
else {
// Snappy Codec class is not available
if (NativeCodeLoader.isNativeCodeLoaded()) {
nativeCodecTest("LZO", "lzo2", "com.hadoop.compression.lzo.LzoCodec");
nativeCodecTest("LZ4", null, "org.apache.hadoop.io.compress.Lz4Codec");
nativeCodecTest("SNAPPY", "snappy", "org.apache.hadoop.io.compress.SnappyCodec");
} else {
// Hadoop nativelib is not available
LOG.debug("Native code not loaded");
assertFalse(CompressionTest.testCompression("LZO"));
assertFalse(CompressionTest.testCompression("LZ4"));
assertFalse(CompressionTest.testCompression("SNAPPY"));
}
}
@ -121,12 +87,56 @@ public class TestCompressionTest {
try {
Thread.currentThread().getContextClassLoader().loadClass(codecClassName);
return true;
}
catch (Exception ex) {
} catch (Exception ex) {
return false;
}
}
/**
* Verify CompressionTest.testCompression() on a native codec.
*/
private void nativeCodecTest(String codecName, String libName, String codecClassName) {
if (isCompressionAvailable(codecClassName)) {
try {
if (libName != null) {
System.loadLibrary(libName);
}
try {
Configuration conf = new Configuration();
CompressionCodec codec = (CompressionCodec)
ReflectionUtils.newInstance(conf.getClassByName(codecClassName), conf);
DataOutputBuffer compressedDataBuffer = new DataOutputBuffer();
CompressionOutputStream deflateFilter = codec.createOutputStream(compressedDataBuffer);
byte[] data = new byte[1024];
DataOutputStream deflateOut = new DataOutputStream(new BufferedOutputStream(deflateFilter));
deflateOut.write(data, 0, data.length);
deflateOut.flush();
deflateFilter.finish();
// Codec class, codec nativelib and Hadoop nativelib with codec JNIs are present
assertTrue(CompressionTest.testCompression(codecName));
} catch (UnsatisfiedLinkError e) {
// Hadoop nativelib does not have codec JNIs.
// cannot assert the codec here because the current logic of
// CompressionTest checks only classloading, not the codec
// usage.
LOG.debug("No JNI for codec '" + codecName + "' " + e.getMessage());
} catch (Exception e) {
LOG.error(codecName, e);
}
} catch (UnsatisfiedLinkError e) {
// nativelib is not available
LOG.debug("Native lib not available: " + codecName);
assertFalse(CompressionTest.testCompression(codecName));
}
} else {
// Compression Codec class is not available
LOG.debug("Codec class not available: " + codecName);
assertFalse(CompressionTest.testCompression(codecName));
}
}
}