HBASE-10138 incorrect or confusing test value is used in block caches

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1552454 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
sershe 2013-12-19 23:27:56 +00:00
parent 5ef080e8a7
commit 5f3672b7cf
6 changed files with 18 additions and 19 deletions

View File

@ -397,7 +397,7 @@ public class CacheConfig {
"hbase.bucketcache.ioengine.errors.tolerated.duration", "hbase.bucketcache.ioengine.errors.tolerated.duration",
BucketCache.DEFAULT_ERROR_TOLERATION_DURATION); BucketCache.DEFAULT_ERROR_TOLERATION_DURATION);
bucketCache = new BucketCache(bucketCacheIOEngineName, bucketCache = new BucketCache(bucketCacheIOEngineName,
bucketCacheSize, writerThreads, writerQueueLen, persistentPath, bucketCacheSize, blockSize, writerThreads, writerQueueLen, persistentPath,
ioErrorsTolerationDuration); ioErrorsTolerationDuration);
} catch (IOException ioex) { } catch (IOException ioex) {
LOG.error("Can't instantiate bucket cache", ioex); LOG.error("Can't instantiate bucket cache", ioex);
@ -406,7 +406,7 @@ public class CacheConfig {
} }
LOG.info("Allocating LruBlockCache with maximum size " + LOG.info("Allocating LruBlockCache with maximum size " +
StringUtils.humanReadableInt(lruCacheSize)); StringUtils.humanReadableInt(lruCacheSize));
LruBlockCache lruCache = new LruBlockCache(lruCacheSize, StoreFile.DEFAULT_BLOCKSIZE_SMALL); LruBlockCache lruCache = new LruBlockCache(lruCacheSize, blockSize);
lruCache.setVictimCache(bucketCache); lruCache.setVictimCache(bucketCache);
if (bucketCache != null && combinedWithLru) { if (bucketCache != null && combinedWithLru) {
globalBlockCache = new CombinedBlockCache(lruCache, bucketCache); globalBlockCache = new CombinedBlockCache(lruCache, bucketCache);
@ -414,8 +414,8 @@ public class CacheConfig {
globalBlockCache = lruCache; globalBlockCache = lruCache;
} }
} else { } else {
globalBlockCache = new DoubleBlockCache(lruCacheSize, offHeapCacheSize, globalBlockCache = new DoubleBlockCache(
StoreFile.DEFAULT_BLOCKSIZE_SMALL, blockSize, conf); lruCacheSize, offHeapCacheSize, blockSize, blockSize, conf);
} }
return globalBlockCache; return globalBlockCache;
} }

View File

@ -197,20 +197,20 @@ public class BucketCache implements BlockCache, HeapSize {
// Allocate or free space for the block // Allocate or free space for the block
private BucketAllocator bucketAllocator; private BucketAllocator bucketAllocator;
public BucketCache(String ioEngineName, long capacity, int writerThreadNum, public BucketCache(String ioEngineName, long capacity, int blockSize, int writerThreadNum,
int writerQLen, String persistencePath) throws FileNotFoundException, int writerQLen, String persistencePath) throws FileNotFoundException,
IOException { IOException {
this(ioEngineName, capacity, writerThreadNum, writerQLen, persistencePath, this(ioEngineName, capacity, blockSize, writerThreadNum, writerQLen, persistencePath,
DEFAULT_ERROR_TOLERATION_DURATION); DEFAULT_ERROR_TOLERATION_DURATION);
} }
public BucketCache(String ioEngineName, long capacity, int writerThreadNum, public BucketCache(String ioEngineName, long capacity, int blockSize, int writerThreadNum,
int writerQLen, String persistencePath, int ioErrorsTolerationDuration) int writerQLen, String persistencePath, int ioErrorsTolerationDuration)
throws FileNotFoundException, IOException { throws FileNotFoundException, IOException {
this.ioEngine = getIOEngineFromName(ioEngineName, capacity); this.ioEngine = getIOEngineFromName(ioEngineName, capacity);
this.writerThreads = new WriterThread[writerThreadNum]; this.writerThreads = new WriterThread[writerThreadNum];
this.cacheWaitSignals = new Object[writerThreadNum]; this.cacheWaitSignals = new Object[writerThreadNum];
long blockNumCapacity = capacity / 16384; long blockNumCapacity = capacity / blockSize;
if (blockNumCapacity >= Integer.MAX_VALUE) { if (blockNumCapacity >= Integer.MAX_VALUE) {
// Enough for about 32TB of cache! // Enough for about 32TB of cache!
throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now"); throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");
@ -218,7 +218,7 @@ public class BucketCache implements BlockCache, HeapSize {
this.cacheCapacity = capacity; this.cacheCapacity = capacity;
this.persistencePath = persistencePath; this.persistencePath = persistencePath;
this.blockSize = StoreFile.DEFAULT_BLOCKSIZE_SMALL; this.blockSize = blockSize;
this.ioErrorsTolerationDuration = ioErrorsTolerationDuration; this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;
bucketAllocator = new BucketAllocator(capacity); bucketAllocator = new BucketAllocator(capacity);

View File

@ -111,10 +111,6 @@ public class StoreFile {
/** Key for timestamp of earliest-put in metadata*/ /** Key for timestamp of earliest-put in metadata*/
public static final byte[] EARLIEST_PUT_TS = Bytes.toBytes("EARLIEST_PUT_TS"); public static final byte[] EARLIEST_PUT_TS = Bytes.toBytes("EARLIEST_PUT_TS");
// Make default block size for StoreFiles 8k while testing. TODO: FIX!
// Need to make it 8k for testing.
public static final int DEFAULT_BLOCKSIZE_SMALL = 8 * 1024;
private final StoreFileInfo fileInfo; private final StoreFileInfo fileInfo;
private final FileSystem fs; private final FileSystem fs;

View File

@ -65,7 +65,7 @@ public class TestBucketCache {
int writerThreads, int writerThreads,
int writerQLen, String persistencePath) throws FileNotFoundException, int writerQLen, String persistencePath) throws FileNotFoundException,
IOException { IOException {
super(ioEngineName, capacity, writerThreads, writerQLen, persistencePath); super(ioEngineName, capacity, 8192, writerThreads, writerQLen, persistencePath);
super.wait_when_cache = true; super.wait_when_cache = true;
} }

View File

@ -306,6 +306,7 @@ public class TestStore extends TestCase {
// Get tests // Get tests
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
private static final int BLOCKSIZE_SMALL = 8192;
/** /**
* Test for hbase-1686. * Test for hbase-1686.
* @throws IOException * @throws IOException
@ -323,7 +324,7 @@ public class TestStore extends TestCase {
long seqid = f.getMaxSequenceId(); long seqid = f.getMaxSequenceId();
Configuration c = HBaseConfiguration.create(); Configuration c = HBaseConfiguration.create();
FileSystem fs = FileSystem.get(c); FileSystem fs = FileSystem.get(c);
HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).build();
StoreFile.Writer w = new StoreFile.WriterBuilder(c, new CacheConfig(c), StoreFile.Writer w = new StoreFile.WriterBuilder(c, new CacheConfig(c),
fs) fs)
.withOutputDir(storedir) .withOutputDir(storedir)

View File

@ -488,6 +488,8 @@ public class TestStoreFile extends HBaseTestCase {
+ ", expected no more than " + maxFalsePos + ")", + ", expected no more than " + maxFalsePos + ")",
falsePos <= maxFalsePos); falsePos <= maxFalsePos);
} }
private static final int BLOCKSIZE_SMALL = 8192;
public void testBloomFilter() throws Exception { public void testBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf); FileSystem fs = FileSystem.getLocal(conf);
@ -496,7 +498,7 @@ public class TestStoreFile extends HBaseTestCase {
// write the file // write the file
Path f = new Path(ROOT_DIR, getName()); Path f = new Path(ROOT_DIR, getName());
HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL) HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
.withChecksumType(CKTYPE) .withChecksumType(CKTYPE)
.withBytesPerCheckSum(CKBYTES).build(); .withBytesPerCheckSum(CKBYTES).build();
// Make a store file and write data to it. // Make a store file and write data to it.
@ -519,7 +521,7 @@ public class TestStoreFile extends HBaseTestCase {
Path f = new Path(ROOT_DIR, getName()); Path f = new Path(ROOT_DIR, getName());
HFileContext meta = new HFileContextBuilder() HFileContext meta = new HFileContextBuilder()
.withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL) .withBlockSize(BLOCKSIZE_SMALL)
.withChecksumType(CKTYPE) .withChecksumType(CKTYPE)
.withBytesPerCheckSum(CKBYTES).build(); .withBytesPerCheckSum(CKBYTES).build();
// Make a store file and write data to it. // Make a store file and write data to it.
@ -617,7 +619,7 @@ public class TestStoreFile extends HBaseTestCase {
for (int x : new int[]{0,1}) { for (int x : new int[]{0,1}) {
// write the file // write the file
Path f = new Path(ROOT_DIR, getName() + x); Path f = new Path(ROOT_DIR, getName() + x);
HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL) HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
.withChecksumType(CKTYPE) .withChecksumType(CKTYPE)
.withBytesPerCheckSum(CKBYTES).build(); .withBytesPerCheckSum(CKBYTES).build();
// Make a store file and write data to it. // Make a store file and write data to it.
@ -992,7 +994,7 @@ public class TestStoreFile extends HBaseTestCase {
new HFileDataBlockEncoderImpl( new HFileDataBlockEncoderImpl(
dataBlockEncoderAlgo); dataBlockEncoderAlgo);
cacheConf = new CacheConfig(conf); cacheConf = new CacheConfig(conf);
HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL) HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
.withChecksumType(CKTYPE) .withChecksumType(CKTYPE)
.withBytesPerCheckSum(CKBYTES) .withBytesPerCheckSum(CKBYTES)
.withDataBlockEncoding(dataBlockEncoderAlgo) .withDataBlockEncoding(dataBlockEncoderAlgo)