Use a 1024 byte minimum weight for filter cache entries
This changes the weighing function for the filter cache to use a configurable minimum weight for each filter cached. This value defaults to 1kb and can be configured with the `indices.cache.filter.minimum_entry_weight` setting. This also fixes an issue with the filter cache where the concurrency level of the cache was exposed as a setting, but not used in cache construction. Relates to #8268
This commit is contained in:
parent
4ac7b02ce7
commit
42b6e01a37
|
@ -209,12 +209,19 @@ public class WeightedFilterCache extends AbstractIndexComponent implements Filte
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/** A weigher for the Guava filter cache that uses a minimum entry size */
|
||||||
public static class FilterCacheValueWeigher implements Weigher<WeightedFilterCache.FilterCacheKey, DocIdSet> {
|
public static class FilterCacheValueWeigher implements Weigher<WeightedFilterCache.FilterCacheKey, DocIdSet> {
|
||||||
|
|
||||||
|
private final int minimumEntrySize;
|
||||||
|
|
||||||
|
public FilterCacheValueWeigher(int minimumEntrySize) {
|
||||||
|
this.minimumEntrySize = minimumEntrySize;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int weigh(FilterCacheKey key, DocIdSet value) {
|
public int weigh(FilterCacheKey key, DocIdSet value) {
|
||||||
int weight = (int) Math.min(DocIdSets.sizeInBytes(value), Integer.MAX_VALUE);
|
int weight = (int) Math.min(DocIdSets.sizeInBytes(value), Integer.MAX_VALUE);
|
||||||
return weight == 0 ? 1 : weight;
|
return Math.max(weight, this.minimumEntrySize);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -55,15 +55,17 @@ public class IndicesFilterCache extends AbstractComponent implements RemovalList
|
||||||
private volatile int concurrencyLevel;
|
private volatile int concurrencyLevel;
|
||||||
|
|
||||||
private final TimeValue cleanInterval;
|
private final TimeValue cleanInterval;
|
||||||
|
private final int minimumEntryWeight;
|
||||||
|
|
||||||
private final Set<Object> readersKeysToClean = ConcurrentCollections.newConcurrentSet();
|
private final Set<Object> readersKeysToClean = ConcurrentCollections.newConcurrentSet();
|
||||||
|
|
||||||
private volatile boolean closed;
|
private volatile boolean closed;
|
||||||
|
|
||||||
|
|
||||||
public static final String INDICES_CACHE_FILTER_SIZE = "indices.cache.filter.size";
|
public static final String INDICES_CACHE_FILTER_SIZE = "indices.cache.filter.size";
|
||||||
public static final String INDICES_CACHE_FILTER_EXPIRE = "indices.cache.filter.expire";
|
public static final String INDICES_CACHE_FILTER_EXPIRE = "indices.cache.filter.expire";
|
||||||
public static final String INDICES_CACHE_FILTER_CONCURRENCY_LEVEL = "indices.cache.filter.concurrency_level";
|
public static final String INDICES_CACHE_FILTER_CONCURRENCY_LEVEL = "indices.cache.filter.concurrency_level";
|
||||||
|
public static final String INDICES_CACHE_FILTER_CLEAN_INTERVAL = "indices.cache.filter.clean_interval";
|
||||||
|
public static final String INDICES_CACHE_FILTER_MINIMUM_ENTRY_WEIGHT = "indices.cache.filter.minimum_entry_weight";
|
||||||
|
|
||||||
class ApplySettings implements NodeSettingsService.Listener {
|
class ApplySettings implements NodeSettingsService.Listener {
|
||||||
@Override
|
@Override
|
||||||
|
@ -71,13 +73,15 @@ public class IndicesFilterCache extends AbstractComponent implements RemovalList
|
||||||
boolean replace = false;
|
boolean replace = false;
|
||||||
String size = settings.get(INDICES_CACHE_FILTER_SIZE, IndicesFilterCache.this.size);
|
String size = settings.get(INDICES_CACHE_FILTER_SIZE, IndicesFilterCache.this.size);
|
||||||
if (!size.equals(IndicesFilterCache.this.size)) {
|
if (!size.equals(IndicesFilterCache.this.size)) {
|
||||||
logger.info("updating [indices.cache.filter.size] from [{}] to [{}]", IndicesFilterCache.this.size, size);
|
logger.info("updating [{}] from [{}] to [{}]",
|
||||||
|
INDICES_CACHE_FILTER_SIZE, IndicesFilterCache.this.size, size);
|
||||||
IndicesFilterCache.this.size = size;
|
IndicesFilterCache.this.size = size;
|
||||||
replace = true;
|
replace = true;
|
||||||
}
|
}
|
||||||
TimeValue expire = settings.getAsTime(INDICES_CACHE_FILTER_EXPIRE, IndicesFilterCache.this.expire);
|
TimeValue expire = settings.getAsTime(INDICES_CACHE_FILTER_EXPIRE, IndicesFilterCache.this.expire);
|
||||||
if (!Objects.equal(expire, IndicesFilterCache.this.expire)) {
|
if (!Objects.equal(expire, IndicesFilterCache.this.expire)) {
|
||||||
logger.info("updating [indices.cache.filter.expire] from [{}] to [{}]", IndicesFilterCache.this.expire, expire);
|
logger.info("updating [{}] from [{}] to [{}]",
|
||||||
|
INDICES_CACHE_FILTER_EXPIRE, IndicesFilterCache.this.expire, expire);
|
||||||
IndicesFilterCache.this.expire = expire;
|
IndicesFilterCache.this.expire = expire;
|
||||||
replace = true;
|
replace = true;
|
||||||
}
|
}
|
||||||
|
@ -86,7 +90,8 @@ public class IndicesFilterCache extends AbstractComponent implements RemovalList
|
||||||
throw new ElasticsearchIllegalArgumentException("concurrency_level must be > 0 but was: " + concurrencyLevel);
|
throw new ElasticsearchIllegalArgumentException("concurrency_level must be > 0 but was: " + concurrencyLevel);
|
||||||
}
|
}
|
||||||
if (!Objects.equal(concurrencyLevel, IndicesFilterCache.this.concurrencyLevel)) {
|
if (!Objects.equal(concurrencyLevel, IndicesFilterCache.this.concurrencyLevel)) {
|
||||||
logger.info("updating [indices.cache.filter.concurrency_level] from [{}] to [{}]", IndicesFilterCache.this.concurrencyLevel, concurrencyLevel);
|
logger.info("updating [{}] from [{}] to [{}]",
|
||||||
|
INDICES_CACHE_FILTER_CONCURRENCY_LEVEL, IndicesFilterCache.this.concurrencyLevel, concurrencyLevel);
|
||||||
IndicesFilterCache.this.concurrencyLevel = concurrencyLevel;
|
IndicesFilterCache.this.concurrencyLevel = concurrencyLevel;
|
||||||
replace = true;
|
replace = true;
|
||||||
}
|
}
|
||||||
|
@ -103,9 +108,14 @@ public class IndicesFilterCache extends AbstractComponent implements RemovalList
|
||||||
public IndicesFilterCache(Settings settings, ThreadPool threadPool, NodeSettingsService nodeSettingsService) {
|
public IndicesFilterCache(Settings settings, ThreadPool threadPool, NodeSettingsService nodeSettingsService) {
|
||||||
super(settings);
|
super(settings);
|
||||||
this.threadPool = threadPool;
|
this.threadPool = threadPool;
|
||||||
this.size = componentSettings.get("size", "10%");
|
this.size = settings.get(INDICES_CACHE_FILTER_SIZE, "10%");
|
||||||
this.expire = componentSettings.getAsTime("expire", null);
|
this.expire = settings.getAsTime(INDICES_CACHE_FILTER_EXPIRE, null);
|
||||||
this.cleanInterval = componentSettings.getAsTime("clean_interval", TimeValue.timeValueSeconds(60));
|
this.minimumEntryWeight = settings.getAsInt(INDICES_CACHE_FILTER_MINIMUM_ENTRY_WEIGHT, 1024); // 1k per entry minimum
|
||||||
|
if (minimumEntryWeight <= 0) {
|
||||||
|
throw new ElasticsearchIllegalArgumentException("minimum_entry_weight must be > 0 but was: " + minimumEntryWeight);
|
||||||
|
}
|
||||||
|
this.cleanInterval = settings.getAsTime(INDICES_CACHE_FILTER_CLEAN_INTERVAL, TimeValue.timeValueSeconds(60));
|
||||||
|
// defaults to 4, but this is a busy map for all indices, increase it a bit
|
||||||
this.concurrencyLevel = settings.getAsInt(INDICES_CACHE_FILTER_CONCURRENCY_LEVEL, 16);
|
this.concurrencyLevel = settings.getAsInt(INDICES_CACHE_FILTER_CONCURRENCY_LEVEL, 16);
|
||||||
if (concurrencyLevel <= 0) {
|
if (concurrencyLevel <= 0) {
|
||||||
throw new ElasticsearchIllegalArgumentException("concurrency_level must be > 0 but was: " + concurrencyLevel);
|
throw new ElasticsearchIllegalArgumentException("concurrency_level must be > 0 but was: " + concurrencyLevel);
|
||||||
|
@ -122,10 +132,9 @@ public class IndicesFilterCache extends AbstractComponent implements RemovalList
|
||||||
private void buildCache() {
|
private void buildCache() {
|
||||||
CacheBuilder<WeightedFilterCache.FilterCacheKey, DocIdSet> cacheBuilder = CacheBuilder.newBuilder()
|
CacheBuilder<WeightedFilterCache.FilterCacheKey, DocIdSet> cacheBuilder = CacheBuilder.newBuilder()
|
||||||
.removalListener(this)
|
.removalListener(this)
|
||||||
.maximumWeight(sizeInBytes).weigher(new WeightedFilterCache.FilterCacheValueWeigher());
|
.maximumWeight(sizeInBytes).weigher(new WeightedFilterCache.FilterCacheValueWeigher(minimumEntryWeight));
|
||||||
|
|
||||||
// defaults to 4, but this is a busy map for all indices, increase it a bit
|
cacheBuilder.concurrencyLevel(this.concurrencyLevel);
|
||||||
cacheBuilder.concurrencyLevel(16);
|
|
||||||
|
|
||||||
if (expire != null) {
|
if (expire != null) {
|
||||||
cacheBuilder.expireAfterAccess(expire.millis(), TimeUnit.MILLISECONDS);
|
cacheBuilder.expireAfterAccess(expire.millis(), TimeUnit.MILLISECONDS);
|
||||||
|
@ -145,6 +154,7 @@ public class IndicesFilterCache extends AbstractComponent implements RemovalList
|
||||||
public void close() {
|
public void close() {
|
||||||
closed = true;
|
closed = true;
|
||||||
cache.invalidateAll();
|
cache.invalidateAll();
|
||||||
|
cache.cleanUp();
|
||||||
}
|
}
|
||||||
|
|
||||||
public Cache<WeightedFilterCache.FilterCacheKey, DocIdSet> cache() {
|
public Cache<WeightedFilterCache.FilterCacheKey, DocIdSet> cache() {
|
||||||
|
|
Loading…
Reference in New Issue