HBASE-11550 Custom value for BUCKET_CACHE_BUCKETS_KEY should be sorted (Gustavo Anatoly)

Conflicts:
	hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/bucket/BucketAllocator.java
This commit is contained in:
Nick Dimiduk 2014-08-13 21:07:31 -07:00
parent 65375f8258
commit cd59a023c4
3 changed files with 11 additions and 14 deletions

View File

@ -484,7 +484,7 @@ public class CacheConfig {
if (configuredBucketSizes != null) { if (configuredBucketSizes != null) {
bucketSizes = new int[configuredBucketSizes.length]; bucketSizes = new int[configuredBucketSizes.length];
for (int i = 0; i < configuredBucketSizes.length; i++) { for (int i = 0; i < configuredBucketSizes.length; i++) {
bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i]); bucketSizes[i] = Integer.parseInt(configuredBucketSizes[i].trim());
} }
} }
BucketCache bucketCache = null; BucketCache bucketCache = null;

View File

@ -21,13 +21,11 @@
package org.apache.hadoop.hbase.io.hfile.bucket; package org.apache.hadoop.hbase.io.hfile.bucket;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -36,6 +34,10 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry; import org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.BucketEntry;
import org.codehaus.jackson.annotate.JsonIgnoreProperties; import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Ints;
/** /**
* This class is used to allocate a block with specified size and free the block * This class is used to allocate a block with specified size and free the block
* when evicting. It manages an array of buckets, each bucket is associated with * when evicting. It manages an array of buckets, each bucket is associated with
@ -299,11 +301,8 @@ public final class BucketAllocator {
BucketAllocator(long availableSpace, int[] bucketSizes) BucketAllocator(long availableSpace, int[] bucketSizes)
throws BucketAllocatorException { throws BucketAllocatorException {
this.bucketSizes = bucketSizes == null ? DEFAULT_BUCKET_SIZES : bucketSizes; this.bucketSizes = bucketSizes == null ? DEFAULT_BUCKET_SIZES : bucketSizes;
int largestBucket = this.bucketSizes[0]; Arrays.sort(this.bucketSizes);
for (int i : this.bucketSizes) { this.bigItemSize = Ints.max(this.bucketSizes);
largestBucket = Math.max(largestBucket, i);
}
this.bigItemSize = largestBucket;
this.bucketCapacity = FEWEST_ITEMS_IN_BUCKET * bigItemSize; this.bucketCapacity = FEWEST_ITEMS_IN_BUCKET * bigItemSize;
buckets = new Bucket[(int) (availableSpace / bucketCapacity)]; buckets = new Bucket[(int) (availableSpace / bucketCapacity)];
if (buckets.length < this.bucketSizes.length) if (buckets.length < this.bucketSizes.length)

View File

@ -52,6 +52,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.hfile.BlockCache; import org.apache.hadoop.hbase.io.hfile.BlockCache;
import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;
import org.apache.hadoop.hbase.io.hfile.BlockPriority; import org.apache.hadoop.hbase.io.hfile.BlockPriority;
import org.apache.hadoop.hbase.io.hfile.BlockType; import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.CacheStats; import org.apache.hadoop.hbase.io.hfile.CacheStats;
@ -59,7 +60,6 @@ import org.apache.hadoop.hbase.io.hfile.Cacheable;
import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer; import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager; import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;
import org.apache.hadoop.hbase.io.hfile.CachedBlock; import org.apache.hadoop.hbase.io.hfile.CachedBlock;
import org.apache.hadoop.hbase.io.hfile.BlockCacheUtil;
import org.apache.hadoop.hbase.io.hfile.CombinedBlockCache; import org.apache.hadoop.hbase.io.hfile.CombinedBlockCache;
import org.apache.hadoop.hbase.io.hfile.HFileBlock; import org.apache.hadoop.hbase.io.hfile.HFileBlock;
import org.apache.hadoop.hbase.util.ConcurrentIndex; import org.apache.hadoop.hbase.util.ConcurrentIndex;
@ -166,7 +166,6 @@ public class BucketCache implements BlockCache, HeapSize {
private long cacheCapacity; private long cacheCapacity;
/** Approximate block size */ /** Approximate block size */
private final long blockSize; private final long blockSize;
private final int[] bucketSizes;
/** Duration of IO errors tolerated before we disable cache, 1 min as default */ /** Duration of IO errors tolerated before we disable cache, 1 min as default */
private final int ioErrorsTolerationDuration; private final int ioErrorsTolerationDuration;
@ -228,7 +227,6 @@ public class BucketCache implements BlockCache, HeapSize {
this.cacheCapacity = capacity; this.cacheCapacity = capacity;
this.persistencePath = persistencePath; this.persistencePath = persistencePath;
this.blockSize = blockSize; this.blockSize = blockSize;
this.bucketSizes = bucketSizes;
this.ioErrorsTolerationDuration = ioErrorsTolerationDuration; this.ioErrorsTolerationDuration = ioErrorsTolerationDuration;
bucketAllocator = new BucketAllocator(capacity, bucketSizes); bucketAllocator = new BucketAllocator(capacity, bucketSizes);
@ -244,7 +242,7 @@ public class BucketCache implements BlockCache, HeapSize {
if (ioEngine.isPersistent() && persistencePath != null) { if (ioEngine.isPersistent() && persistencePath != null) {
try { try {
retrieveFromFile(); retrieveFromFile(bucketSizes);
} catch (IOException ioex) { } catch (IOException ioex) {
LOG.error("Can't restore from file because of", ioex); LOG.error("Can't restore from file because of", ioex);
} catch (ClassNotFoundException cnfe) { } catch (ClassNotFoundException cnfe) {
@ -866,7 +864,7 @@ public class BucketCache implements BlockCache, HeapSize {
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
private void retrieveFromFile() throws IOException, BucketAllocatorException, private void retrieveFromFile(int[] bucketSizes) throws IOException, BucketAllocatorException,
ClassNotFoundException { ClassNotFoundException {
File persistenceFile = new File(persistencePath); File persistenceFile = new File(persistencePath);
if (!persistenceFile.exists()) { if (!persistenceFile.exists()) {