From 5f3672b7cfdda5ebc56903fbfd3deb74ba5c62d1 Mon Sep 17 00:00:00 2001 From: sershe Date: Thu, 19 Dec 2013 23:27:56 +0000 Subject: [PATCH] HBASE-10138 incorrect or confusing test value is used in block caches git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1552454 13f79535-47bb-0310-9956-ffa450edef68 --- .../org/apache/hadoop/hbase/io/hfile/CacheConfig.java | 8 ++++---- .../hadoop/hbase/io/hfile/bucket/BucketCache.java | 10 +++++----- .../apache/hadoop/hbase/regionserver/StoreFile.java | 4 ---- .../hadoop/hbase/io/hfile/bucket/TestBucketCache.java | 2 +- .../apache/hadoop/hbase/regionserver/TestStore.java | 3 ++- .../hadoop/hbase/regionserver/TestStoreFile.java | 10 ++++++---- 6 files changed, 18 insertions(+), 19 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java index 17988508c09..22765438bec 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheConfig.java @@ -397,7 +397,7 @@ public class CacheConfig { "hbase.bucketcache.ioengine.errors.tolerated.duration", BucketCache.DEFAULT_ERROR_TOLERATION_DURATION); bucketCache = new BucketCache(bucketCacheIOEngineName, - bucketCacheSize, writerThreads, writerQueueLen, persistentPath, + bucketCacheSize, blockSize, writerThreads, writerQueueLen, persistentPath, ioErrorsTolerationDuration); } catch (IOException ioex) { LOG.error("Can't instantiate bucket cache", ioex); @@ -406,7 +406,7 @@ public class CacheConfig { } LOG.info("Allocating LruBlockCache with maximum size " + StringUtils.humanReadableInt(lruCacheSize)); - LruBlockCache lruCache = new LruBlockCache(lruCacheSize, StoreFile.DEFAULT_BLOCKSIZE_SMALL); + LruBlockCache lruCache = new LruBlockCache(lruCacheSize, blockSize); lruCache.setVictimCache(bucketCache); if (bucketCache != null && combinedWithLru) { globalBlockCache = new CombinedBlockCache(lruCache, bucketCache); @@ -414,8 +414,8 @@ public class CacheConfig { globalBlockCache = lruCache; } } else { - globalBlockCache = new DoubleBlockCache(lruCacheSize, offHeapCacheSize, - StoreFile.DEFAULT_BLOCKSIZE_SMALL, blockSize, conf); + globalBlockCache = new DoubleBlockCache( + lruCacheSize, offHeapCacheSize, blockSize, blockSize, conf); } return globalBlockCache; } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java index cea808b3e83..1527c128966 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketCache.java @@ -197,20 +197,20 @@ public class BucketCache implements BlockCache, HeapSize { // Allocate or free space for the block private BucketAllocator bucketAllocator; - public BucketCache(String ioEngineName, long capacity, int writerThreadNum, + public BucketCache(String ioEngineName, long capacity, int blockSize, int writerThreadNum, int writerQLen, String persistencePath) throws FileNotFoundException, IOException { - this(ioEngineName, capacity, writerThreadNum, writerQLen, persistencePath, + this(ioEngineName, capacity, blockSize, writerThreadNum, writerQLen, persistencePath, DEFAULT_ERROR_TOLERATION_DURATION); } - public BucketCache(String ioEngineName, long capacity, int writerThreadNum, + public BucketCache(String ioEngineName, long capacity, int blockSize, int writerThreadNum, int writerQLen, String persistencePath, int ioErrorsTolerationDuration) throws FileNotFoundException, IOException { this.ioEngine = getIOEngineFromName(ioEngineName, capacity); this.writerThreads = new WriterThread[writerThreadNum]; this.cacheWaitSignals = new Object[writerThreadNum]; - long blockNumCapacity = capacity / 16384; + long blockNumCapacity = capacity / blockSize; if (blockNumCapacity >= Integer.MAX_VALUE) { // Enough for about 32TB of cache! throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now"); @@ -218,7 +218,7 @@ public class BucketCache implements BlockCache, HeapSize { this.cacheCapacity = capacity; this.persistencePath = persistencePath; - this.blockSize = StoreFile.DEFAULT_BLOCKSIZE_SMALL; + this.blockSize = blockSize; this.ioErrorsTolerationDuration = ioErrorsTolerationDuration; bucketAllocator = new BucketAllocator(capacity); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java index 6d2b327747e..66e522ce2b1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java @@ -111,10 +111,6 @@ public class StoreFile { /** Key for timestamp of earliest-put in metadata*/ public static final byte[] EARLIEST_PUT_TS = Bytes.toBytes("EARLIEST_PUT_TS"); - // Make default block size for StoreFiles 8k while testing. TODO: FIX! - // Need to make it 8k for testing. - public static final int DEFAULT_BLOCKSIZE_SMALL = 8 * 1024; - private final StoreFileInfo fileInfo; private final FileSystem fs; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java index de83b3747e5..63efb1e791d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/bucket/TestBucketCache.java @@ -65,7 +65,7 @@ public class TestBucketCache { int writerThreads, int writerQLen, String persistencePath) throws FileNotFoundException, IOException { - super(ioEngineName, capacity, writerThreads, writerQLen, persistencePath); + super(ioEngineName, capacity, 8192, writerThreads, writerQLen, persistencePath); super.wait_when_cache = true; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java index 07d094ef4f1..793b839d898 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java @@ -306,6 +306,7 @@ public class TestStore extends TestCase { // Get tests ////////////////////////////////////////////////////////////////////////////// + private static final int BLOCKSIZE_SMALL = 8192; /** * Test for hbase-1686. * @throws IOException @@ -323,7 +324,7 @@ public class TestStore extends TestCase { long seqid = f.getMaxSequenceId(); Configuration c = HBaseConfiguration.create(); FileSystem fs = FileSystem.get(c); - HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL).build(); + HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).build(); StoreFile.Writer w = new StoreFile.WriterBuilder(c, new CacheConfig(c), fs) .withOutputDir(storedir) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java index b70d064aedb..00340cfee29 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java @@ -488,6 +488,8 @@ public class TestStoreFile extends HBaseTestCase { + ", expected no more than " + maxFalsePos + ")", falsePos <= maxFalsePos); } + + private static final int BLOCKSIZE_SMALL = 8192; public void testBloomFilter() throws Exception { FileSystem fs = FileSystem.getLocal(conf); @@ -496,7 +498,7 @@ public class TestStoreFile extends HBaseTestCase { // write the file Path f = new Path(ROOT_DIR, getName()); - HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL) + HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE) .withBytesPerCheckSum(CKBYTES).build(); // Make a store file and write data to it. @@ -519,7 +521,7 @@ public class TestStoreFile extends HBaseTestCase { Path f = new Path(ROOT_DIR, getName()); HFileContext meta = new HFileContextBuilder() - .withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL) + .withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE) .withBytesPerCheckSum(CKBYTES).build(); // Make a store file and write data to it. @@ -617,7 +619,7 @@ public class TestStoreFile extends HBaseTestCase { for (int x : new int[]{0,1}) { // write the file Path f = new Path(ROOT_DIR, getName() + x); - HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL) + HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE) .withBytesPerCheckSum(CKBYTES).build(); // Make a store file and write data to it. @@ -992,7 +994,7 @@ public class TestStoreFile extends HBaseTestCase { new HFileDataBlockEncoderImpl( dataBlockEncoderAlgo); cacheConf = new CacheConfig(conf); - HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL) + HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE) .withBytesPerCheckSum(CKBYTES) .withDataBlockEncoding(dataBlockEncoderAlgo)