diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java index 147568e1957..a0225524219 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java @@ -215,7 +215,7 @@ public class CacheConfig { * @param family column family configuration */ public CacheConfig(Configuration conf, ColumnFamilyDescriptor family) { - this(CacheConfig.instantiateBlockCache(conf), + this(GLOBAL_BLOCK_CACHE_INSTANCE, conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ) && family.isBlockCacheEnabled(), family.isInMemory(), @@ -245,14 +245,10 @@ public class CacheConfig { * @param conf hbase configuration */ public CacheConfig(Configuration conf) { - this(conf, true); - } - - public CacheConfig(Configuration conf, boolean enableBlockCache) { - this(conf, enableBlockCache, + this(GLOBAL_BLOCK_CACHE_INSTANCE, conf.getBoolean(CACHE_DATA_ON_READ_KEY, DEFAULT_CACHE_DATA_ON_READ), DEFAULT_IN_MEMORY, // This is a family-level setting so can't be set - // strictly from conf + // strictly from conf conf.getBoolean(CACHE_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_DATA_ON_WRITE), conf.getBoolean(CACHE_INDEX_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_INDEXES_ON_WRITE), conf.getBoolean(CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, DEFAULT_CACHE_BLOOMS_ON_WRITE), @@ -263,28 +259,6 @@ public class CacheConfig { LOG.info("Created cacheConfig: " + this); } - private CacheConfig(Configuration conf, boolean enableBlockCache, - final boolean cacheDataOnRead, final boolean inMemory, - final boolean cacheDataOnWrite, final boolean cacheIndexesOnWrite, - final boolean cacheBloomsOnWrite, final boolean evictOnClose, - final boolean cacheDataCompressed, final boolean prefetchOnOpen, - final boolean dropBehindCompaction) { - if (enableBlockCache) { - this.blockCache = CacheConfig.instantiateBlockCache(conf); - } else { - this.blockCache = null; - } - this.cacheDataOnRead = cacheDataOnRead; - this.inMemory = inMemory; - this.cacheDataOnWrite = cacheDataOnWrite; - this.cacheIndexesOnWrite = cacheIndexesOnWrite; - this.cacheBloomsOnWrite = cacheBloomsOnWrite; - this.evictOnClose = evictOnClose; - this.cacheDataCompressed = cacheDataCompressed; - this.prefetchOnOpen = prefetchOnOpen; - this.dropBehindCompaction = dropBehindCompaction; - } - /** * Create a block cache configuration with the specified cache and configuration parameters. * @param blockCache reference to block cache, null if completely disabled @@ -669,12 +643,18 @@ public class CacheConfig { * @return The block cache or null. */ public static synchronized BlockCache instantiateBlockCache(Configuration conf) { - if (GLOBAL_BLOCK_CACHE_INSTANCE != null) return GLOBAL_BLOCK_CACHE_INSTANCE; - if (blockCacheDisabled) return null; + if (GLOBAL_BLOCK_CACHE_INSTANCE != null) { + return GLOBAL_BLOCK_CACHE_INSTANCE; + } + if (blockCacheDisabled) { + return null; + } LruBlockCache onHeapCache = getOnHeapCacheInternal(conf); // blockCacheDisabled is set as a side-effect of getL1Internal(), so check it again after the // call. - if (blockCacheDisabled) return null; + if (blockCacheDisabled) { + return null; + } boolean useExternal = conf.getBoolean(EXTERNAL_BLOCKCACHE_KEY, EXTERNAL_BLOCKCACHE_DEFAULT); if (useExternal) { L2_CACHE_INSTANCE = getExternalBlockcache(conf); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobCacheConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobCacheConfig.java index 971bb929630..2305ebac96f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobCacheConfig.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mob/MobCacheConfig.java @@ -42,11 +42,6 @@ public class MobCacheConfig extends CacheConfig { instantiateMobFileCache(conf); } - public MobCacheConfig(Configuration conf, boolean needBlockCache) { - super(conf, needBlockCache); - instantiateMobFileCache(conf); - } - /** * Instantiates the MobFileCache. * @param conf The current configuration. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index c6e3eee9904..b9d606d74b9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -595,12 +595,17 @@ public class HRegionServer extends HasThread implements // init superusers and add the server principal (if using security) // or process owner as default super user. Superusers.initialize(conf); - regionServerAccounting = new RegionServerAccounting(conf); + boolean isMasterNotCarryTable = this instanceof HMaster && !LoadBalancer.isTablesOnMaster(conf); - cacheConfig = new CacheConfig(conf, !isMasterNotCarryTable); - mobCacheConfig = new MobCacheConfig(conf, !isMasterNotCarryTable); + // no need to instantiate global block cache when master not carry table + if (!isMasterNotCarryTable) { + CacheConfig.instantiateBlockCache(conf); + } + cacheConfig = new CacheConfig(conf); + mobCacheConfig = new MobCacheConfig(conf); + uncaughtExceptionHandler = new UncaughtExceptionHandler() { @Override public void uncaughtException(Thread t, Throwable e) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java index b757e1cdf90..11d7bb4eb5a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/encoding/TestEncodedSeekers.java @@ -112,6 +112,7 @@ public class TestEncodedSeekers { if(includeTags) { testUtil.getConfiguration().setInt(HFile.FORMAT_VERSION_KEY, 3); } + CacheConfig.instantiateBlockCache(testUtil.getConfiguration()); LruBlockCache cache = (LruBlockCache)new CacheConfig(testUtil.getConfiguration()).getBlockCache(); cache.clearCache(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java index b8b5e883d26..19919e0d8fd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestBlockCacheReporting.java @@ -19,8 +19,6 @@ package org.apache.hadoop.hbase.io.hfile; import static org.junit.Assert.*; -import com.fasterxml.jackson.core.JsonGenerationException; -import com.fasterxml.jackson.databind.JsonMappingException; import java.io.IOException; import java.util.Map; import java.util.NavigableSet; @@ -84,9 +82,10 @@ public class TestBlockCacheReporting { } @Test - public void testBucketCache() throws JsonGenerationException, JsonMappingException, IOException { + public void testBucketCache() throws IOException { this.conf.set(HConstants.BUCKET_CACHE_IOENGINE_KEY, "offheap"); this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, 100); + CacheConfig.instantiateBlockCache(this.conf); CacheConfig cc = new CacheConfig(this.conf); assertTrue(cc.getBlockCache() instanceof CombinedBlockCache); logPerBlock(cc.getBlockCache()); @@ -102,7 +101,8 @@ public class TestBlockCacheReporting { } @Test - public void testLruBlockCache() throws JsonGenerationException, JsonMappingException, IOException { + public void testLruBlockCache() throws IOException { + CacheConfig.instantiateBlockCache(this.conf); CacheConfig cc = new CacheConfig(this.conf); assertTrue(cc.isBlockCacheEnabled()); assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory()); @@ -131,8 +131,7 @@ public class TestBlockCacheReporting { } } - private void logPerFile(final BlockCacheUtil.CachedBlocksByFile cbsbf) - throws JsonGenerationException, JsonMappingException, IOException { + private void logPerFile(final BlockCacheUtil.CachedBlocksByFile cbsbf) throws IOException { for (Map.Entry> e: cbsbf.getCachedBlockStatsByFile().entrySet()) { int count = 0; @@ -154,10 +153,9 @@ public class TestBlockCacheReporting { } } - private BlockCacheUtil.CachedBlocksByFile logPerBlock(final BlockCache bc) - throws JsonGenerationException, JsonMappingException, IOException { + private BlockCacheUtil.CachedBlocksByFile logPerBlock(final BlockCache bc) throws IOException { BlockCacheUtil.CachedBlocksByFile cbsbf = new BlockCacheUtil.CachedBlocksByFile(); - for (CachedBlock cb: bc) { + for (CachedBlock cb : bc) { LOG.info(cb.toString()); LOG.info(BlockCacheUtil.toJSON(bc)); cbsbf.update(cb); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java index f84a3194398..7b6bbb34b1c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheConfig.java @@ -209,6 +209,7 @@ public class TestCacheConfig { @Test public void testDisableCacheDataBlock() throws IOException { Configuration conf = HBaseConfiguration.create(); + CacheConfig.instantiateBlockCache(conf); CacheConfig cacheConfig = new CacheConfig(conf); assertTrue(cacheConfig.shouldCacheBlockOnRead(BlockCategory.DATA)); assertFalse(cacheConfig.shouldCacheCompressed(BlockCategory.DATA)); @@ -274,6 +275,7 @@ public class TestCacheConfig { @Test public void testCacheConfigDefaultLRUBlockCache() { + CacheConfig.instantiateBlockCache(this.conf); CacheConfig cc = new CacheConfig(this.conf); assertTrue(cc.isBlockCacheEnabled()); assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory()); @@ -307,6 +309,7 @@ public class TestCacheConfig { private void doBucketCacheConfigTest() { final int bcSize = 100; this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize); + CacheConfig.instantiateBlockCache(this.conf); CacheConfig cc = new CacheConfig(this.conf); basicBlockCacheOps(cc, false, false); assertTrue(cc.getBlockCache() instanceof CombinedBlockCache); @@ -338,6 +341,7 @@ public class TestCacheConfig { long bcExpectedSize = 100 * 1024 * 1024; // MB. assertTrue(lruExpectedSize < bcExpectedSize); this.conf.setInt(HConstants.BUCKET_CACHE_SIZE_KEY, bcSize); + CacheConfig.instantiateBlockCache(this.conf); CacheConfig cc = new CacheConfig(this.conf); basicBlockCacheOps(cc, false, false); assertTrue(cc.getBlockCache() instanceof CombinedBlockCache); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java index 19d845ccf7c..9c2f6df332b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestCacheOnWrite.java @@ -160,6 +160,7 @@ public class TestCacheOnWrite { Configuration conf = TEST_UTIL.getConfiguration(); List blockcaches = new ArrayList<>(); // default + CacheConfig.instantiateBlockCache(conf); blockcaches.add(new CacheConfig(conf).getBlockCache()); //set LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME to 2.0f due to HBASE-16287 @@ -228,7 +229,6 @@ public class TestCacheOnWrite { conf.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, cacheCompressedData); cowType.modifyConf(conf); fs = HFileSystem.get(conf); - CacheConfig.GLOBAL_BLOCK_CACHE_INSTANCE = blockCache; cacheConf = new CacheConfig(blockCache, true, true, cowType.shouldBeCached(BlockType.DATA), cowType.shouldBeCached(BlockType.LEAF_INDEX), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java index a90b572e38e..5612c1b5d63 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestForceCacheImportantBlocks.java @@ -106,6 +106,7 @@ public class TestForceCacheImportantBlocks { // Make sure we make a new one each time. CacheConfig.clearGlobalInstances(); HFile.DATABLOCK_READ_COUNT.reset(); + CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration()); } @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java index 890ea72d2c1..efe76aafe70 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestHFileBlockIndex.java @@ -524,67 +524,59 @@ public class TestHFileBlockIndex { * @throws IOException */ @Test - public void testMidKeyOnLeafIndexBlockBoundary() throws IOException { - Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), - "hfile_for_midkey"); - int maxChunkSize = 512; - conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize); - // should open hfile.block.index.cacheonwrite - conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true); + public void testMidKeyOnLeafIndexBlockBoundary() throws IOException { + Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "hfile_for_midkey"); + int maxChunkSize = 512; + conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, maxChunkSize); + // should open hfile.block.index.cacheonwrite + conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, true); + CacheConfig.instantiateBlockCache(conf); + CacheConfig cacheConf = new CacheConfig(conf); + BlockCache blockCache = cacheConf.getBlockCache(); + // Evict all blocks that were cached-on-write by the previous invocation. + blockCache.evictBlocksByHfileName(hfilePath.getName()); + // Write the HFile + HFileContext meta = + new HFileContextBuilder().withBlockSize(SMALL_BLOCK_SIZE).withCompression(Algorithm.NONE) + .withDataBlockEncoding(DataBlockEncoding.NONE).build(); + HFile.Writer writer = + HFile.getWriterFactory(conf, cacheConf).withPath(fs, hfilePath).withFileContext(meta) + .create(); + Random rand = new Random(19231737); + byte[] family = Bytes.toBytes("f"); + byte[] qualifier = Bytes.toBytes("q"); + int kvNumberToBeWritten = 16; + // the new generated hfile will contain 2 leaf-index blocks and 16 data blocks, + // midkey is just on the boundary of the first leaf-index block + for (int i = 0; i < kvNumberToBeWritten; ++i) { + byte[] row = RandomKeyValueUtil.randomOrderedFixedLengthKey(rand, i, 30); - CacheConfig cacheConf = new CacheConfig(conf); - BlockCache blockCache = cacheConf.getBlockCache(); - // Evict all blocks that were cached-on-write by the previous invocation. - blockCache.evictBlocksByHfileName(hfilePath.getName()); - // Write the HFile - { - HFileContext meta = new HFileContextBuilder() - .withBlockSize(SMALL_BLOCK_SIZE) - .withCompression(Algorithm.NONE) - .withDataBlockEncoding(DataBlockEncoding.NONE) - .build(); - HFile.Writer writer = - HFile.getWriterFactory(conf, cacheConf) - .withPath(fs, hfilePath) - .withFileContext(meta) - .create(); - Random rand = new Random(19231737); - byte[] family = Bytes.toBytes("f"); - byte[] qualifier = Bytes.toBytes("q"); - int kvNumberToBeWritten = 16; - // the new generated hfile will contain 2 leaf-index blocks and 16 data blocks, - // midkey is just on the boundary of the first leaf-index block - for (int i = 0; i < kvNumberToBeWritten; ++i) { - byte[] row = RandomKeyValueUtil.randomOrderedFixedLengthKey(rand, i, 30); + // Key will be interpreted by KeyValue.KEY_COMPARATOR + KeyValue kv = new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(), + RandomKeyValueUtil.randomFixedLengthValue(rand, SMALL_BLOCK_SIZE)); + writer.append(kv); + } + writer.close(); - // Key will be interpreted by KeyValue.KEY_COMPARATOR - KeyValue kv = - new KeyValue(row, family, qualifier, EnvironmentEdgeManager.currentTime(), - RandomKeyValueUtil.randomFixedLengthValue(rand, SMALL_BLOCK_SIZE)); - writer.append(kv); - } - writer.close(); - } + // close hfile.block.index.cacheonwrite + conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false); - // close hfile.block.index.cacheonwrite - conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false); + // Read the HFile + HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf); - // Read the HFile - HFile.Reader reader = HFile.createReader(fs, hfilePath, cacheConf, true, conf); + boolean hasArrayIndexOutOfBoundsException = false; + try { + // get the mid-key. + reader.midKey(); + } catch (ArrayIndexOutOfBoundsException e) { + hasArrayIndexOutOfBoundsException = true; + } finally { + reader.close(); + } - boolean hasArrayIndexOutOfBoundsException = false; - try { - // get the mid-key. - reader.midKey(); - } catch (ArrayIndexOutOfBoundsException e) { - hasArrayIndexOutOfBoundsException = true; - } finally { - reader.close(); - } - - // to check if ArrayIndexOutOfBoundsException occurred - assertFalse(hasArrayIndexOutOfBoundsException); - } + // to check if ArrayIndexOutOfBoundsException occurred + assertFalse(hasArrayIndexOutOfBoundsException); + } /** * Testing block index through the HFile writer/reader APIs. Allows to test @@ -597,6 +589,7 @@ public class TestHFileBlockIndex { public void testHFileWriterAndReader() throws IOException { Path hfilePath = new Path(TEST_UTIL.getDataTestDir(), "hfile_for_block_index"); + CacheConfig.instantiateBlockCache(conf); CacheConfig cacheConf = new CacheConfig(conf); BlockCache blockCache = cacheConf.getBlockCache(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java index 91a9238a504..811df144f5e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestPrefetch.java @@ -64,6 +64,7 @@ public class TestPrefetch { conf.setBoolean(CacheConfig.PREFETCH_BLOCKS_ON_OPEN_KEY, true); fs = HFileSystem.get(conf); CacheConfig.blockCacheDisabled = false; + CacheConfig.instantiateBlockCache(conf); cacheConf = new CacheConfig(conf); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java index 06d617aad4e..18e8e701ff2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerFromBucketCache.java @@ -88,6 +88,7 @@ public class TestScannerFromBucketCache { conf.setFloat("hbase.regionserver.global.memstore.size", 0.1f); } tableName = TableName.valueOf(name.getMethodName()); + CacheConfig.instantiateBlockCache(conf); } @After diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java index c576329c11a..d27b0412658 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingKeyRange.java @@ -124,6 +124,7 @@ public class TestScannerSelectionUsingKeyRange { Scan scan = new Scan(Bytes.toBytes("aaa"), Bytes.toBytes("aaz")); CacheConfig.blockCacheDisabled = false; + CacheConfig.instantiateBlockCache(conf); CacheConfig cacheConf = new CacheConfig(conf); LruBlockCache cache = (LruBlockCache) cacheConf.getBlockCache(); cache.clearCache(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java index 08a7be2cf2b..444102dc56c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/TestScannerSelectionUsingTTL.java @@ -104,6 +104,7 @@ public class TestScannerSelectionUsingTTL { @Test public void testScannerSelection() throws IOException { Configuration conf = TEST_UTIL.getConfiguration(); + CacheConfig.instantiateBlockCache(conf); conf.setBoolean("hbase.store.delete.expired.storefile", false); HColumnDescriptor hcd = new HColumnDescriptor(FAMILY_BYTES) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java index 73b596ad579..2cf3f8cf073 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksRead.java @@ -76,6 +76,7 @@ public class TestBlocksRead { public static void setUp() throws Exception { // disable compactions in this test. TEST_UTIL.getConfiguration().setInt("hbase.hstore.compactionThreshold", 10000); + CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration()); } @AfterClass diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java index 7db34ac2cbb..0ba4e978478 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestBlocksScanned.java @@ -61,8 +61,8 @@ public class TestBlocksScanned extends HBaseTestCase { @Before public void setUp() throws Exception { super.setUp(); - TEST_UTIL = new HBaseTestingUtility(); + CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration()); } @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java index 5c73a6fa0c7..dc51dae9a12 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java @@ -161,6 +161,7 @@ public class TestCacheOnWriteInSchema { conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, false); conf.setBoolean(CacheConfig.CACHE_INDEX_BLOCKS_ON_WRITE_KEY, false); conf.setBoolean(CacheConfig.CACHE_BLOOM_BLOCKS_ON_WRITE_KEY, false); + CacheConfig.instantiateBlockCache(conf); fs = HFileSystem.get(conf); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java index 424a78807ea..4c2d64535fc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java @@ -139,6 +139,7 @@ public class TestCompoundBloomFilter { fs = FileSystem.get(conf); + CacheConfig.instantiateBlockCache(conf); cacheConf = new CacheConfig(conf); blockCache = cacheConf.getBlockCache(); assertNotNull(blockCache); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index 4b8f5f200fb..30ee3b2178d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -924,7 +924,6 @@ public class TestHStoreFile extends HBaseTestCase { scan.setTimeRange(27, 50); scan.setColumnFamilyTimeRange(family, 7, 50); assertTrue(scanner.shouldUseScanner(scan, store, Long.MIN_VALUE)); - } @Test @@ -935,6 +934,7 @@ public class TestHStoreFile extends HBaseTestCase { Path baseDir = new Path(new Path(testDir, "7e0102"),"twoCOWEOC"); // Grab the block cache and get the initial hit/miss counts + CacheConfig.instantiateBlockCache(conf); BlockCache bc = new CacheConfig(conf).getBlockCache(); assertNotNull(bc); CacheStats cs = bc.getStats(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java index 34f6ca1ad7f..543126e5b55 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestRecoveredEdits.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hbase.PrivateCellUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; @@ -80,6 +81,7 @@ public class TestRecoveredEdits { @Test public void testReplayWorksThoughLotsOfFlushing() throws IOException { + CacheConfig.instantiateBlockCache(TEST_UTIL.getConfiguration()); for(MemoryCompactionPolicy policy : MemoryCompactionPolicy.values()) { testReplayWorksWithMemoryCompactionPolicy(policy); }