HBASE-10138 incorrect or confusing test value is used in block caches

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1552454 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
sershe 2013-12-19 23:27:56 +00:00
parent 5ef080e8a7
commit 5f3672b7cf
6 changed files with 18 additions and 19 deletions

View File

@ -397,7 +397,7 @@ public class CacheConfig {
"hbase.bucketcache.ioengine.errors.tolerated.duration",
BucketCache.DEFAULT_ERROR_TOLERATION_DURATION);
bucketCache = new BucketCache(bucketCacheIOEngineName,
bucketCacheSize, writerThreads, writerQueueLen, persistentPath,
bucketCacheSize, blockSize, writerThreads, writerQueueLen, persistentPath,
ioErrorsTolerationDuration);
} catch (IOException ioex) {
LOG.error("Can't instantiate bucket cache", ioex);
@ -406,7 +406,7 @@ public class CacheConfig {
}
LOG.info("Allocating LruBlockCache with maximum size " +
StringUtils.humanReadableInt(lruCacheSize));
LruBlockCache lruCache = new LruBlockCache(lruCacheSize, StoreFile.DEFAULT_BLOCKSIZE_SMALL);
LruBlockCache lruCache = new LruBlockCache(lruCacheSize, blockSize);
lruCache.setVictimCache(bucketCache);
if (bucketCache != null && combinedWithLru) {
globalBlockCache = new CombinedBlockCache(lruCache, bucketCache);
@ -414,8 +414,8 @@ public class CacheConfig {
globalBlockCache = lruCache;
}
} else {
globalBlockCache = new DoubleBlockCache(lruCacheSize, offHeapCacheSize,
StoreFile.DEFAULT_BLOCKSIZE_SMALL, blockSize, conf);
globalBlockCache = new DoubleBlockCache(
lruCacheSize, offHeapCacheSize, blockSize, blockSize, conf);
}
return globalBlockCache;
}

View File

@ -197,20 +197,20 @@ public class BucketCache implements BlockCache, HeapSize {
// Allocate or free space for the block
private BucketAllocator bucketAllocator;
public BucketCache(String ioEngineName, long capacity, int writerThreadNum,
public BucketCache(String ioEngineName, long capacity, int blockSize, int writerThreadNum,
int writerQLen, String persistencePath) throws FileNotFoundException,
IOException {
this(ioEngineName, capacity, writerThreadNum, writerQLen, persistencePath,
this(ioEngineName, capacity, blockSize, writerThreadNum, writerQLen, persistencePath,
DEFAULT_ERROR_TOLERATION_DURATION);
}
public BucketCache(String ioEngineName, long capacity, int writerThreadNum,
public BucketCache(String ioEngineName, long capacity, int blockSize, int writerThreadNum,
int writerQLen, String persistencePath, int ioErrorsTolerationDuration)
throws FileNotFoundException, IOException {
this.ioEngine = getIOEngineFromName(ioEngineName, capacity);
this.writerThreads = new WriterThread[writerThreadNum];
this.cacheWaitSignals = new Object[writerThreadNum];
long blockNumCapacity = capacity / 16384;
long blockNumCapacity = capacity / blockSize;
if (blockNumCapacity >= Integer.MAX_VALUE) {
// Enough for about 32TB of cache!
throw new IllegalArgumentException("Cache capacity is too large, only support 32TB now");
@ -218,7 +218,7 @@ public class BucketCache implements BlockCache, HeapSize {
this.cacheCapacity = capacity;
this.persistencePath = persistencePath;
this.blockSize = StoreFile.DEFAULT_BLOCKSIZE_SMALL;
this.blockSize = blockSize;
this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;
bucketAllocator = new BucketAllocator(capacity);

View File

@ -111,10 +111,6 @@ public class StoreFile {
/** Key for timestamp of earliest-put in metadata*/
public static final byte[] EARLIEST_PUT_TS = Bytes.toBytes("EARLIEST_PUT_TS");
// Make default block size for StoreFiles 8k while testing. TODO: FIX!
// Need to make it 8k for testing.
public static final int DEFAULT_BLOCKSIZE_SMALL = 8 * 1024;
private final StoreFileInfo fileInfo;
private final FileSystem fs;

View File

@ -65,7 +65,7 @@ public class TestBucketCache {
int writerThreads,
int writerQLen, String persistencePath) throws FileNotFoundException,
IOException {
super(ioEngineName, capacity, writerThreads, writerQLen, persistencePath);
super(ioEngineName, capacity, 8192, writerThreads, writerQLen, persistencePath);
super.wait_when_cache = true;
}

View File

@ -306,6 +306,7 @@ public class TestStore extends TestCase {
// Get tests
//////////////////////////////////////////////////////////////////////////////
private static final int BLOCKSIZE_SMALL = 8192;
/**
* Test for hbase-1686.
* @throws IOException
@ -323,7 +324,7 @@ public class TestStore extends TestCase {
long seqid = f.getMaxSequenceId();
Configuration c = HBaseConfiguration.create();
FileSystem fs = FileSystem.get(c);
HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL).build();
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).build();
StoreFile.Writer w = new StoreFile.WriterBuilder(c, new CacheConfig(c),
fs)
.withOutputDir(storedir)

View File

@ -488,6 +488,8 @@ public class TestStoreFile extends HBaseTestCase {
+ ", expected no more than " + maxFalsePos + ")",
falsePos <= maxFalsePos);
}
private static final int BLOCKSIZE_SMALL = 8192;
public void testBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf);
@ -496,7 +498,7 @@ public class TestStoreFile extends HBaseTestCase {
// write the file
Path f = new Path(ROOT_DIR, getName());
HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL)
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
.withChecksumType(CKTYPE)
.withBytesPerCheckSum(CKBYTES).build();
// Make a store file and write data to it.
@ -519,7 +521,7 @@ public class TestStoreFile extends HBaseTestCase {
Path f = new Path(ROOT_DIR, getName());
HFileContext meta = new HFileContextBuilder()
.withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL)
.withBlockSize(BLOCKSIZE_SMALL)
.withChecksumType(CKTYPE)
.withBytesPerCheckSum(CKBYTES).build();
// Make a store file and write data to it.
@ -617,7 +619,7 @@ public class TestStoreFile extends HBaseTestCase {
for (int x : new int[]{0,1}) {
// write the file
Path f = new Path(ROOT_DIR, getName() + x);
HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL)
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
.withChecksumType(CKTYPE)
.withBytesPerCheckSum(CKBYTES).build();
// Make a store file and write data to it.
@ -992,7 +994,7 @@ public class TestStoreFile extends HBaseTestCase {
new HFileDataBlockEncoderImpl(
dataBlockEncoderAlgo);
cacheConf = new CacheConfig(conf);
HFileContext meta = new HFileContextBuilder().withBlockSize(StoreFile.DEFAULT_BLOCKSIZE_SMALL)
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
.withChecksumType(CKTYPE)
.withBytesPerCheckSum(CKBYTES)
.withDataBlockEncoding(dataBlockEncoderAlgo)