HBASE-10058 Test for HBASE-9915 (avoid reading index blocks)
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1546658 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
3f68e316c1
commit
26b32fa593
|
@ -18,19 +18,20 @@
|
|||
package org.apache.hadoop.hbase.regionserver;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.KeyValueUtil;
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.io.compress.Compression;
|
||||
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
||||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||
import org.apache.hadoop.hbase.io.hfile.CacheStats;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
@ -39,7 +40,6 @@ import org.junit.experimental.categories.Category;
|
|||
@SuppressWarnings("deprecation")
|
||||
@Category(SmallTests.class)
|
||||
public class TestBlocksScanned extends HBaseTestCase {
|
||||
private static byte [] TABLE = Bytes.toBytes("TestBlocksScanned");
|
||||
private static byte [] FAMILY = Bytes.toBytes("family");
|
||||
private static byte [] COL = Bytes.toBytes("col");
|
||||
private static byte [] START_KEY = Bytes.toBytes("aaa");
|
||||
|
@ -47,31 +47,53 @@ public class TestBlocksScanned extends HBaseTestCase {
|
|||
private static int BLOCK_SIZE = 70;
|
||||
|
||||
private static HBaseTestingUtility TEST_UTIL = null;
|
||||
private static HTableDescriptor TESTTABLEDESC = null;
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
TEST_UTIL = new HBaseTestingUtility();
|
||||
TESTTABLEDESC = new HTableDescriptor(TableName.valueOf(TABLE));
|
||||
TEST_UTIL = new HBaseTestingUtility();
|
||||
}
|
||||
|
||||
TESTTABLEDESC.addFamily(
|
||||
new HColumnDescriptor(FAMILY)
|
||||
.setMaxVersions(10)
|
||||
.setBlockCacheEnabled(true)
|
||||
.setBlocksize(BLOCK_SIZE)
|
||||
.setCompressionType(Compression.Algorithm.NONE)
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
@Test
|
||||
public void testBlocksScanned() throws Exception {
|
||||
HRegion r = createNewHRegion(TESTTABLEDESC, START_KEY, END_KEY,
|
||||
byte [] tableName = Bytes.toBytes("TestBlocksScanned");
|
||||
HTableDescriptor table = new HTableDescriptor(TableName.valueOf(tableName));
|
||||
|
||||
table.addFamily(
|
||||
new HColumnDescriptor(FAMILY)
|
||||
.setMaxVersions(10)
|
||||
.setBlockCacheEnabled(true)
|
||||
.setBlocksize(BLOCK_SIZE)
|
||||
.setCompressionType(Compression.Algorithm.NONE)
|
||||
);
|
||||
_testBlocksScanned(table);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBlocksScannedWithEncoding() throws Exception {
|
||||
byte [] tableName = Bytes.toBytes("TestBlocksScannedWithEncoding");
|
||||
HTableDescriptor table = new HTableDescriptor(TableName.valueOf(tableName));
|
||||
|
||||
table.addFamily(
|
||||
new HColumnDescriptor(FAMILY)
|
||||
.setMaxVersions(10)
|
||||
.setBlockCacheEnabled(true)
|
||||
.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF)
|
||||
.setBlocksize(BLOCK_SIZE)
|
||||
.setCompressionType(Compression.Algorithm.NONE)
|
||||
);
|
||||
_testBlocksScanned(table);
|
||||
}
|
||||
|
||||
private void _testBlocksScanned(HTableDescriptor table) throws Exception {
|
||||
HRegion r = createNewHRegion(table, START_KEY, END_KEY,
|
||||
TEST_UTIL.getConfiguration());
|
||||
addContent(r, FAMILY, COL);
|
||||
r.flushcache();
|
||||
|
||||
CacheStats stats = new CacheConfig(TEST_UTIL.getConfiguration()).getBlockCache().getStats();
|
||||
long before = stats.getHitCount() + stats.getMissCount();
|
||||
// Do simple test of getting one row only first.
|
||||
Scan scan = new Scan(Bytes.toBytes("aaa"), Bytes.toBytes("aaz"));
|
||||
scan.addColumn(FAMILY, COL);
|
||||
|
@ -83,11 +105,15 @@ public class TestBlocksScanned extends HBaseTestCase {
|
|||
s.close();
|
||||
|
||||
int expectResultSize = 'z' - 'a';
|
||||
Assert.assertEquals(expectResultSize, results.size());
|
||||
assertEquals(expectResultSize, results.size());
|
||||
|
||||
int kvPerBlock = (int) Math.ceil(BLOCK_SIZE /
|
||||
(double) KeyValueUtil.ensureKeyValue(results.get(0)).getLength());
|
||||
Assert.assertEquals(2, kvPerBlock);
|
||||
}
|
||||
|
||||
long expectDataBlockRead = (long) Math.ceil(expectResultSize / (double) kvPerBlock);
|
||||
long expectIndexBlockRead = expectDataBlockRead;
|
||||
|
||||
assertEquals(expectIndexBlockRead+expectDataBlockRead, stats.getHitCount() + stats.getMissCount() - before);
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue