HBASE-21498 Master OOM when SplitTableRegionProcedure new CacheConfig and instantiate a new BlockCache

This commit is contained in:
Guanghao Zhang 2018-11-19 22:10:43 +08:00
parent 8339e44361
commit 27a0f205c5
19 changed files with 94 additions and 108 deletions

View File

@ -215,7 +215,7 @@ public class CacheConfig {
* @param family column family configuration * @param family column family configuration
*/ */
public CacheConfig(Configuration conf, ColumnFamilyDescriptor family) { public CacheConfig(Configuration conf, ColumnFamilyDescriptor family) {
this(CacheConfig.instantiateBlockCache(conf), this(GLOBAL_BLOCK_CACHE_INSTANCE,
conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ) conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ)
&& family.isBlockCacheEnabled(), && family.isBlockCacheEnabled(),
family.isInMemory(), family.isInMemory(),
@ -245,14 +245,10 @@ public class CacheConfig {
* @param conf hbase configuration * @param conf hbase configuration
*/ */
public CacheConfig(Configuration conf) { public CacheConfig(Configuration conf) {
this(conf, true); this(GLOBAL_BLOCK_CACHE_INSTANCE,
}
public CacheConfig(Configuration conf, boolean enableBlockCache) {
this(conf, enableBlockCache,
conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ), conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ),
DEFAULT_IN_MEMORY, // This is a family-level setting so can't be set DEFAULT_IN_MEMORY, // This is a family-level setting so can't be set
// strictly from conf // strictly from conf
conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE), conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE),
conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE), conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE),
conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE), conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE),
@ -263,28 +259,6 @@ public class CacheConfig {
LOG.info("Created cacheConfig: " + this); LOG.info("Created cacheConfig: " + this);
} }
private CacheConfig(Configuration conf, boolean enableBlockCache,
final boolean cacheDataOnRead, final boolean inMemory,
final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite,
final boolean cacheBloomsOnWrite, final boolean evictOnClose,
final boolean cacheDataCompressed, final boolean prefetchOnOpen,
final boolean dropBehindCompaction) {
if (enableBlockCache) {
this.blockCache = CacheConfig.instantiateBlockCache(conf);
} else {
this.blockCache = null;
}
this.cacheDataOnRead = cacheDataOnRead;
this.inMemory = inMemory;
this.cacheDataOnWrite = cacheDataOnWrite;
this.cacheIndexesOnWrite = cacheIndexesOnWrite;
this.cacheBloomsOnWrite = cacheBloomsOnWrite;
this.evictOnClose = evictOnClose;
this.cacheDataCompressed = cacheDataCompressed;
this.prefetchOnOpen = prefetchOnOpen;
this.dropBehindCompaction = dropBehindCompaction;
}
/** /**
* Create a block cache configuration with the specified cache and configuration parameters. * Create a block cache configuration with the specified cache and configuration parameters.
* @param blockCache reference to block cache, null if completely disabled * @param blockCache reference to block cache, null if completely disabled
@ -669,12 +643,18 @@ public class CacheConfig {
* @return The block cache or <code>null</code>. * @return The block cache or <code>null</code>.
*/ */
public static synchronized BlockCache instantiateBlockCache(Configuration conf) { public static synchronized BlockCache instantiateBlockCache(Configuration conf) {
if (GLOBAL_BLOCK_CACHE_INSTANCE != null) return GLOBAL_BLOCK_CACHE_INSTANCE; if (GLOBAL_BLOCK_CACHE_INSTANCE != null) {
if (blockCacheDisabled) return null; return GLOBAL_BLOCK_CACHE_INSTANCE;
}
if (blockCacheDisabled) {
return null;
}
LruBlockCache onHeapCache = getOnHeapCacheInternal(conf); LruBlockCache onHeapCache = getOnHeapCacheInternal(conf);
// blockCacheDisabled is set as a side-effect of getL1Internal(), so check it again after the // blockCacheDisabled is set as a side-effect of getL1Internal(), so check it again after the
// call. // call.
if (blockCacheDisabled) return null; if (blockCacheDisabled) {
return null;
}
boolean useExternal = conf.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT); boolean useExternal = conf.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT);
if (useExternal) { if (useExternal) {
L2_CACHE_INSTANCE = getExternalBlockcache(conf); L2_CACHE_INSTANCE = getExternalBlockcache(conf);

View File

@ -42,11 +42,6 @@ public class MobCacheConfig extends CacheConfig {
instantiateMobFileCache(conf); instantiateMobFileCache(conf);
} }
public MobCacheConfig(Configuration conf, boolean needBlockCache) {
super(conf, needBlockCache);
instantiateMobFileCache(conf);
}
/** /**
* Instantiates the MobFileCache. * Instantiates the MobFileCache.
* @param conf The current configuration. * @param conf The current configuration.

View File

@ -585,12 +585,17 @@ public class HRegionServer extends HasThread implements
// init superusers and add the server principal (if using security) // init superusers and add the server principal (if using security)
// or process owner as default super user. // or process owner as default super user.
Superusers.initialize(conf); Superusers.initialize(conf);
regionServerAccounting = new RegionServerAccounting(conf); regionServerAccounting = new RegionServerAccounting(conf);
boolean isMasterNotCarryTable = boolean isMasterNotCarryTable =
this instanceof HMaster && !LoadBalancer.isTablesOnMaster(conf); this instanceof HMaster && !LoadBalancer.isTablesOnMaster(conf);
cacheConfig = new CacheConfig(conf, !isMasterNotCarryTable); // no need to instantiate global block cache when master not carry table
mobCacheConfig = new MobCacheConfig(conf, !isMasterNotCarryTable); if (!isMasterNotCarryTable) {
CacheConfig.instantiateBlockCache(conf);
}
cacheConfig = new CacheConfig(conf);
mobCacheConfig = new MobCacheConfig(conf);
uncaughtExceptionHandler = new UncaughtExceptionHandler() { uncaughtExceptionHandler = new UncaughtExceptionHandler() {
@Override @Override
public void uncaughtException(Thread t, Throwable e) { public void uncaughtException(Thread t, Throwable e) {

View File

@ -112,6 +112,7 @@ public class TestEncodedSeekers {
if(includeTags) { if(includeTags) {
testUtil.getConfiguration().setInt(HFile.FORMAT_VERSION_KEY, 3); testUtil.getConfiguration().setInt(HFile.FORMAT_VERSION_KEY, 3);
} }
CacheConfig.instantiateBlockCache(testUtil.getConfiguration());
LruBlockCache cache = LruBlockCache cache =
(LruBlockCache)new CacheConfig(testUtil.getConfiguration()).getBlockCache(); (LruBlockCache)new CacheConfig(testUtil.getConfiguration()).getBlockCache();
cache.clearCache(); cache.clearCache();

View File

@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.io.hfile;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.databind.JsonMappingException;
import java.io.IOException; import java.io.IOException;
import java.util.Map; import java.util.Map;
import java.util.NavigableSet; import java.util.NavigableSet;
@ -84,9 +82,10 @@ public class TestBlockCacheReporting {
} }
@Test @Test
public void testBucketCache() throws JsonGenerationException, JsonMappingException, IOException { public void testBucketCache() throws IOException {
this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap"); this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap");
this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, 100); this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, 100);
CacheConfig.instantiateBlockCache(this.conf);
CacheConfig cc = new CacheConfig(this.conf); CacheConfig cc = new CacheConfig(this.conf);
assertTrue(cc.getBlockCache() instanceof CombinedBlockCache); assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);
logPerBlock(cc.getBlockCache()); logPerBlock(cc.getBlockCache());
@ -102,7 +101,8 @@ public class TestBlockCacheReporting {
} }
@Test @Test
public void testLruBlockCache() throws JsonGenerationException, JsonMappingException, IOException { public void testLruBlockCache() throws IOException {
CacheConfig.instantiateBlockCache(this.conf);
CacheConfig cc = new CacheConfig(this.conf); CacheConfig cc = new CacheConfig(this.conf);
assertTrue(cc.isBlockCacheEnabled()); assertTrue(cc.isBlockCacheEnabled());
assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory()); assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());
@ -131,8 +131,7 @@ public class TestBlockCacheReporting {
} }
} }
private void logPerFile(final BlockCacheUtil.CachedBlocksByFile cbsbf) private void logPerFile(final BlockCacheUtil.CachedBlocksByFile cbsbf) throws IOException {
throws JsonGenerationException, JsonMappingException, IOException {
for (Map.Entry<String, NavigableSet<CachedBlock>> e: for (Map.Entry<String, NavigableSet<CachedBlock>> e:
cbsbf.getCachedBlockStatsByFile().entrySet()) { cbsbf.getCachedBlockStatsByFile().entrySet()) {
int count = 0; int count = 0;
@ -154,10 +153,9 @@ public class TestBlockCacheReporting {
} }
} }
private BlockCacheUtil.CachedBlocksByFile logPerBlock(final BlockCache bc) private BlockCacheUtil.CachedBlocksByFile logPerBlock(final BlockCache bc) throws IOException {
throws JsonGenerationException, JsonMappingException, IOException {
BlockCacheUtil.CachedBlocksByFile cbsbf = new BlockCacheUtil.CachedBlocksByFile(); BlockCacheUtil.CachedBlocksByFile cbsbf = new BlockCacheUtil.CachedBlocksByFile();
for (CachedBlock cb: bc) { for (CachedBlock cb : bc) {
LOG.info(cb.toString()); LOG.info(cb.toString());
LOG.info(BlockCacheUtil.toJSON(bc)); LOG.info(BlockCacheUtil.toJSON(bc));
cbsbf.update(cb); cbsbf.update(cb);

View File

@ -209,6 +209,7 @@ public class TestCacheConfig {
@Test @Test
public void testDisableCacheDataBlock() throws IOException { public void testDisableCacheDataBlock() throws IOException {
Configuration conf = HBaseConfiguration.create(); Configuration conf = HBaseConfiguration.create();
CacheConfig.instantiateBlockCache(conf);
CacheConfig cacheConfig = new CacheConfig(conf); CacheConfig cacheConfig = new CacheConfig(conf);
assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA)); assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA));
assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA)); assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA));
@ -274,6 +275,7 @@ public class TestCacheConfig {
@Test @Test
public void testCacheConfigDefaultLRUBlockCache() { public void testCacheConfigDefaultLRUBlockCache() {
CacheConfig.instantiateBlockCache(this.conf);
CacheConfig cc = new CacheConfig(this.conf); CacheConfig cc = new CacheConfig(this.conf);
assertTrue(cc.isBlockCacheEnabled()); assertTrue(cc.isBlockCacheEnabled());
assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory()); assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory());
@ -307,6 +309,7 @@ public class TestCacheConfig {
private void doBucketCacheConfigTest() { private void doBucketCacheConfigTest() {
final int bcSize = 100; final int bcSize = 100;
this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize); this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);
CacheConfig.instantiateBlockCache(this.conf);
CacheConfig cc = new CacheConfig(this.conf); CacheConfig cc = new CacheConfig(this.conf);
basicBlockCacheOps(cc, false, false); basicBlockCacheOps(cc, false, false);
assertTrue(cc.getBlockCache() instanceof CombinedBlockCache); assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);
@ -338,6 +341,7 @@ public class TestCacheConfig {
long bcExpectedSize = 100 * 1024 * 1024; // MB. long bcExpectedSize = 100 * 1024 * 1024; // MB.
assertTrue(lruExpectedSize < bcExpectedSize); assertTrue(lruExpectedSize < bcExpectedSize);
this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize); this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize);
CacheConfig.instantiateBlockCache(this.conf);
CacheConfig cc = new CacheConfig(this.conf); CacheConfig cc = new CacheConfig(this.conf);
basicBlockCacheOps(cc, false, false); basicBlockCacheOps(cc, false, false);
assertTrue(cc.getBlockCache() instanceof CombinedBlockCache); assertTrue(cc.getBlockCache() instanceof CombinedBlockCache);

View File

@ -160,6 +160,7 @@ public class TestCacheOnWrite {
Configuration conf = TEST_UTIL.getConfiguration(); Configuration conf = TEST_UTIL.getConfiguration();
List<BlockCache> blockcaches = new ArrayList<>(); List<BlockCache> blockcaches = new ArrayList<>();
// default // default
CacheConfig.instantiateBlockCache(conf);
blockcaches.add(new CacheConfig(conf).getBlockCache()); blockcaches.add(new CacheConfig(conf).getBlockCache());
//set LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME to 2.0f due to HBASE-16287 //set LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME to 2.0f due to HBASE-16287
@ -228,7 +229,6 @@ public class TestCacheOnWrite {
conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, cacheCompressedData); conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, cacheCompressedData);
cowType.modifyConf(conf); cowType.modifyConf(conf);
fs = HFileSystem.get(conf); fs = HFileSystem.get(conf);
CacheConfig.GLOBAL_BLOCK_CACHE_INSTANCE = blockCache;
cacheConf = cacheConf =
new CacheConfig(blockCache, true, true, cowType.shouldBeCached(BlockType.DATA), new CacheConfig(blockCache, true, true, cowType.shouldBeCached(BlockType.DATA),
cowType.shouldBeCached(BlockType.LEAF_INDEX), cowType.shouldBeCached(BlockType.LEAF_INDEX),

View File

@ -106,6 +106,7 @@ public class TestForceCacheImportantBlocks {
// Make sure we make a new one each time. // Make sure we make a new one each time.
CacheConfig.clearGlobalInstances(); CacheConfig.clearGlobalInstances();
HFile.DATABLOCK_READ_COUNT.reset(); HFile.DATABLOCK_READ_COUNT.reset();
CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration());
} }
@Test @Test

View File

@ -524,67 +524,59 @@ public class TestHFileBlockIndex {
* @throws IOException * @throws IOException
*/ */
@Test @Test
public void testMidKeyOnLeafIndexBlockBoundary() throws IOException { public void testMidKeyOnLeafIndexBlockBoundary() throws IOException {
Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "hfile_for_midkey");
"hfile_for_midkey"); int maxChunkSize = 512;
int maxChunkSize = 512; conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize);
conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize); // should open hfile.block.index.cacheonwrite
// should open hfile.block.index.cacheonwrite conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true);
conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true); CacheConfig.instantiateBlockCache(conf);
CacheConfig cacheConf = new CacheConfig(conf);
BlockCache blockCache = cacheConf.getBlockCache();
// Evict all blocks that were cached-on-write by the previous invocation.
blockCache.evictBlocksByHfileName(hfilePath.getName());
// Write the HFile
HFileContext meta =
new HFileContextBuilder().withBlockSize(SMALL_BLOCK_SIZE).withCompression(Algorithm.NONE)
.withDataBlockEncoding(DataBlockEncoding.NONE).build();
HFile.Writer writer =
HFile.getWriterFactory(conf, cacheConf).withPath(fs, hfilePath).withFileContext(meta)
.create();
Random rand = new Random(19231737);
byte[] family = Bytes.toBytes("f");
byte[] qualifier = Bytes.toBytes("q");
int kvNumberToBeWritten = 16;
// the new generated hfile will contain 2 leaf-index blocks and 16 data blocks,
// midkey is just on the boundary of the first leaf-index block
for (int i = 0; i < kvNumberToBeWritten; ++i) {
byte[] row = RandomKeyValueUtil.randomOrderedFixedLengthKey(rand, i, 30);
CacheConfig cacheConf = new CacheConfig(conf); // Key will be interpreted by KeyValue.KEY_COMPARATOR
BlockCache blockCache = cacheConf.getBlockCache(); KeyValue kv = new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),
// Evict all blocks that were cached-on-write by the previous invocation. RandomKeyValueUtil.randomFixedLengthValue(rand, SMALL_BLOCK_SIZE));
blockCache.evictBlocksByHfileName(hfilePath.getName()); writer.append(kv);
// Write the HFile }
{ writer.close();
HFileContext meta = new HFileContextBuilder()
.withBlockSize(SMALL_BLOCK_SIZE)
.withCompression(Algorithm.NONE)
.withDataBlockEncoding(DataBlockEncoding.NONE)
.build();
HFile.Writer writer =
HFile.getWriterFactory(conf, cacheConf)
.withPath(fs, hfilePath)
.withFileContext(meta)
.create();
Random rand = new Random(19231737);
byte[] family = Bytes.toBytes("f");
byte[] qualifier = Bytes.toBytes("q");
int kvNumberToBeWritten = 16;
// the new generated hfile will contain 2 leaf-index blocks and 16 data blocks,
// midkey is just on the boundary of the first leaf-index block
for (int i = 0; i < kvNumberToBeWritten; ++i) {
byte[] row = RandomKeyValueUtil.randomOrderedFixedLengthKey(rand, i, 30);
// Key will be interpreted by KeyValue.KEY_COMPARATOR // close hfile.block.index.cacheonwrite
KeyValue kv = conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false);
new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(),
RandomKeyValueUtil.randomFixedLengthValue(rand, SMALL_BLOCK_SIZE));
writer.append(kv);
}
writer.close();
}
// close hfile.block.index.cacheonwrite // Read the HFile
conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false); HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf);
// Read the HFile boolean hasArrayIndexOutOfBoundsException = false;
HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf); try {
// get the mid-key.
reader.midKey();
} catch (ArrayIndexOutOfBoundsException e) {
hasArrayIndexOutOfBoundsException = true;
} finally {
reader.close();
}
boolean hasArrayIndexOutOfBoundsException = false; // to check if ArrayIndexOutOfBoundsException occurred
try { assertFalse(hasArrayIndexOutOfBoundsException);
// get the mid-key. }
reader.midKey();
} catch (ArrayIndexOutOfBoundsException e) {
hasArrayIndexOutOfBoundsException = true;
} finally {
reader.close();
}
// to check if ArrayIndexOutOfBoundsException occurred
assertFalse(hasArrayIndexOutOfBoundsException);
}
/** /**
* Testing block index through the HFile writer/reader APIs. Allows to test * Testing block index through the HFile writer/reader APIs. Allows to test
@ -597,6 +589,7 @@ public class TestHFileBlockIndex {
public void testHFileWriterAndReader() throws IOException { public void testHFileWriterAndReader() throws IOException {
Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), Path hfilePath = new Path(TEST_UTIL.getDataTestDir(),
"hfile_for_block_index"); "hfile_for_block_index");
CacheConfig.instantiateBlockCache(conf);
CacheConfig cacheConf = new CacheConfig(conf); CacheConfig cacheConf = new CacheConfig(conf);
BlockCache blockCache = cacheConf.getBlockCache(); BlockCache blockCache = cacheConf.getBlockCache();

View File

@ -64,6 +64,7 @@ public class TestPrefetch {
conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true); conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true);
fs = HFileSystem.get(conf); fs = HFileSystem.get(conf);
CacheConfig.blockCacheDisabled = false; CacheConfig.blockCacheDisabled = false;
CacheConfig.instantiateBlockCache(conf);
cacheConf = new CacheConfig(conf); cacheConf = new CacheConfig(conf);
} }

View File

@ -88,6 +88,7 @@ public class TestScannerFromBucketCache {
conf.setFloat("hbase.regionserver.global.memstore.size", 0.1f); conf.setFloat("hbase.regionserver.global.memstore.size", 0.1f);
} }
tableName = TableName.valueOf(name.getMethodName()); tableName = TableName.valueOf(name.getMethodName());
CacheConfig.instantiateBlockCache(conf);
} }
@After @After

View File

@ -124,6 +124,7 @@ public class TestScannerSelectionUsingKeyRange {
Scan scan = new Scan(Bytes.toBytes("aaa"), Bytes.toBytes("aaz")); Scan scan = new Scan(Bytes.toBytes("aaa"), Bytes.toBytes("aaz"));
CacheConfig.blockCacheDisabled = false; CacheConfig.blockCacheDisabled = false;
CacheConfig.instantiateBlockCache(conf);
CacheConfig cacheConf = new CacheConfig(conf); CacheConfig cacheConf = new CacheConfig(conf);
LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache(); LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache();
cache.clearCache(); cache.clearCache();

View File

@ -104,6 +104,7 @@ public class TestScannerSelectionUsingTTL {
@Test @Test
public void testScannerSelection() throws IOException { public void testScannerSelection() throws IOException {
Configuration conf = TEST_UTIL.getConfiguration(); Configuration conf = TEST_UTIL.getConfiguration();
CacheConfig.instantiateBlockCache(conf);
conf.setBoolean("hbase.store.delete.expired.storefile", false); conf.setBoolean("hbase.store.delete.expired.storefile", false);
HColumnDescriptor hcd = HColumnDescriptor hcd =
new HColumnDescriptor(FAMILY_BYTES) new HColumnDescriptor(FAMILY_BYTES)

View File

@ -76,6 +76,7 @@ public class TestBlocksRead {
public static void setUp() throws Exception { public static void setUp() throws Exception {
// disable compactions in this test. // disable compactions in this test.
TEST_UTIL.getConfiguration().setInt("hbase.hstore.compactionThreshold", 10000); TEST_UTIL.getConfiguration().setInt("hbase.hstore.compactionThreshold", 10000);
CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration());
} }
@AfterClass @AfterClass

View File

@ -61,8 +61,8 @@ public class TestBlocksScanned extends HBaseTestCase {
@Before @Before
public void setUp() throws Exception { public void setUp() throws Exception {
super.setUp(); super.setUp();
TEST_UTIL = new HBaseTestingUtility(); TEST_UTIL = new HBaseTestingUtility();
CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration());
} }
@Test @Test

View File

@ -161,6 +161,7 @@ public class TestCacheOnWriteInSchema {
conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false); conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false);
conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false); conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false);
conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, false); conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, false);
CacheConfig.instantiateBlockCache(conf);
fs = HFileSystem.get(conf); fs = HFileSystem.get(conf);

View File

@ -139,6 +139,7 @@ public class TestCompoundBloomFilter {
fs = FileSystem.get(conf); fs = FileSystem.get(conf);
CacheConfig.instantiateBlockCache(conf);
cacheConf = new CacheConfig(conf); cacheConf = new CacheConfig(conf);
blockCache = cacheConf.getBlockCache(); blockCache = cacheConf.getBlockCache();
assertNotNull(blockCache); assertNotNull(blockCache);

View File

@ -924,7 +924,6 @@ public class TestHStoreFile extends HBaseTestCase {
scan.setTimeRange(27, 50); scan.setTimeRange(27, 50);
scan.setColumnFamilyTimeRange(family, 7, 50); scan.setColumnFamilyTimeRange(family, 7, 50);
assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE)); assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE));
} }
@Test @Test
@ -935,6 +934,7 @@ public class TestHStoreFile extends HBaseTestCase {
Path baseDir = new Path(new Path(testDir, "7e0102"),"twoCOWEOC"); Path baseDir = new Path(new Path(testDir, "7e0102"),"twoCOWEOC");
// Grab the block cache and get the initial hit/miss counts // Grab the block cache and get the initial hit/miss counts
CacheConfig.instantiateBlockCache(conf);
BlockCache bc = new CacheConfig(conf).getBlockCache(); BlockCache bc = new CacheConfig(conf).getBlockCache();
assertNotNull(bc); assertNotNull(bc);
CacheStats cs = bc.getStats(); CacheStats cs = bc.getStats();

View File

@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.PrivateCellUtil;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
@ -80,6 +81,7 @@ public class TestRecoveredEdits {
@Test @Test
public void testReplayWorksThoughLotsOfFlushing() throws public void testReplayWorksThoughLotsOfFlushing() throws
IOException { IOException {
CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration());
for(MemoryCompactionPolicy policy : MemoryCompactionPolicy.values()) { for(MemoryCompactionPolicy policy : MemoryCompactionPolicy.values()) {
testReplayWorksWithMemoryCompactionPolicy(policy); testReplayWorksWithMemoryCompactionPolicy(policy);
} }