Extend stats API to include filter's cache count and eviction count, closes #802.

This commit is contained in:
kimchy 2011-03-22 13:30:14 +02:00
parent e759b4c971
commit 8c9000c54c
9 changed files with 102 additions and 5 deletions

View File

@ -35,6 +35,8 @@ import java.io.IOException;
public class CacheStats implements Streamable, ToXContent {
long fieldEvictions;
long filterEvictions;
long filterCount;
long fieldSize = 0;
long filterSize = 0;
long bloomSize = 0;
@ -42,17 +44,21 @@ public class CacheStats implements Streamable, ToXContent {
public CacheStats() {
}
public CacheStats(long fieldEvictions, long fieldSize, long filterSize, long bloomSize) {
public CacheStats(long fieldEvictions, long filterEvictions, long fieldSize, long filterSize, long filterCount, long bloomSize) {
this.fieldEvictions = fieldEvictions;
this.filterEvictions = filterEvictions;
this.fieldSize = fieldSize;
this.filterSize = filterSize;
this.filterCount = filterCount;
this.bloomSize = bloomSize;
}
public void add(CacheStats stats) {
this.fieldEvictions += stats.fieldEvictions;
this.filterEvictions += stats.filterEvictions;
this.fieldSize += stats.fieldSize;
this.filterSize += stats.filterSize;
this.filterCount += stats.filterCount;
this.bloomSize += stats.bloomSize;
}
@ -64,6 +70,22 @@ public class CacheStats implements Streamable, ToXContent {
return this.fieldEvictions();
}
public long filterEvictions() {
return this.filterEvictions;
}
public long getFilterEvictions() {
return this.filterEvictions;
}
public long filterCount() {
return this.filterCount;
}
public long getFilterCount() {
return filterCount;
}
public long fieldSizeInBytes() {
return this.fieldSize;
}
@ -117,6 +139,8 @@ public class CacheStats implements Streamable, ToXContent {
builder.field(Fields.FIELD_EVICTIONS, fieldEvictions);
builder.field(Fields.FIELD_SIZE, fieldSize().toString());
builder.field(Fields.FIELD_SIZE_IN_BYTES, fieldSize);
builder.field(Fields.FILTER_COUNT, filterCount);
builder.field(Fields.FILTER_EVICTIONS, filterEvictions);
builder.field(Fields.FILTER_SIZE, filterSize().toString());
builder.field(Fields.FILTER_SIZE_IN_BYTES, filterSize);
builder.endObject();
@ -128,6 +152,8 @@ public class CacheStats implements Streamable, ToXContent {
static final XContentBuilderString FIELD_SIZE = new XContentBuilderString("field_size");
static final XContentBuilderString FIELD_SIZE_IN_BYTES = new XContentBuilderString("field_size_in_bytes");
static final XContentBuilderString FIELD_EVICTIONS = new XContentBuilderString("field_evictions");
static final XContentBuilderString FILTER_EVICTIONS = new XContentBuilderString("filter_evictions");
static final XContentBuilderString FILTER_COUNT = new XContentBuilderString("filter_count");
static final XContentBuilderString FILTER_SIZE = new XContentBuilderString("filter_size");
static final XContentBuilderString FILTER_SIZE_IN_BYTES = new XContentBuilderString("filter_size_in_bytes");
}
@ -140,15 +166,19 @@ public class CacheStats implements Streamable, ToXContent {
@Override public void readFrom(StreamInput in) throws IOException {
fieldEvictions = in.readVLong();
filterEvictions = in.readVLong();
fieldSize = in.readVLong();
filterSize = in.readVLong();
filterCount = in.readVLong();
bloomSize = in.readVLong();
}
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(fieldEvictions);
out.writeVLong(filterEvictions);
out.writeVLong(fieldSize);
out.writeVLong(filterSize);
out.writeVLong(filterCount);
out.writeVLong(bloomSize);
}
}

View File

@ -73,7 +73,7 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
}
public CacheStats stats() {
return new CacheStats(fieldDataCache.evictions(), fieldDataCache.sizeInBytes(), filterCache.sizeInBytes(), bloomCache.sizeInBytes());
return new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterCache.sizeInBytes(), filterCache.count(), bloomCache.sizeInBytes());
}
public FilterCache filter() {

View File

@ -46,5 +46,9 @@ public interface FilterCache extends IndexComponent, CloseableComponent {
*/
void clearUnreferenced();
long count();
long sizeInBytes();
long evictions();
}

View File

@ -70,7 +70,15 @@ public class NoneFilterCache extends AbstractIndexComponent implements FilterCac
// nothing to do here
}
@Override public long count() {
return 0;
}
@Override public long sizeInBytes() {
return 0;
}
@Override public long evictions() {
return 0;
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.cache.filter.resident;
import org.apache.lucene.search.Filter;
import org.elasticsearch.common.collect.MapEvictionListener;
import org.elasticsearch.common.collect.MapMaker;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.docset.DocSet;
@ -31,18 +32,21 @@ import org.elasticsearch.index.settings.IndexSettings;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
/**
* A resident reference based filter cache that has soft keys on the <tt>IndexReader</tt>.
*
* @author kimchy (shay.banon)
*/
public class ResidentFilterCache extends AbstractDoubleConcurrentMapFilterCache {
public class ResidentFilterCache extends AbstractDoubleConcurrentMapFilterCache implements MapEvictionListener<Filter, DocSet> {
private final int maxSize;
private final TimeValue expire;
private final AtomicLong evictions = new AtomicLong();
@Inject public ResidentFilterCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings);
this.maxSize = componentSettings.getAsInt("max_size", 1000);
@ -70,10 +74,19 @@ public class ResidentFilterCache extends AbstractDoubleConcurrentMapFilterCache
if (expire != null) {
mapMaker.expireAfterAccess(expire.nanos(), TimeUnit.NANOSECONDS);
}
mapMaker.evictionListener(this);
return mapMaker.makeMap();
}
@Override public String type() {
return "soft";
}
@Override public long evictions() {
return evictions.get();
}
@Override public void onEviction(Filter filter, DocSet docSet) {
evictions.incrementAndGet();
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.cache.filter.soft;
import org.apache.lucene.search.Filter;
import org.elasticsearch.common.collect.MapEvictionListener;
import org.elasticsearch.common.collect.MapMaker;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.docset.DocSet;
@ -31,18 +32,21 @@ import org.elasticsearch.index.settings.IndexSettings;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
/**
* A soft reference based filter cache that has soft keys on the <tt>IndexReader</tt>.
*
* @author kimchy (shay.banon)
*/
public class SoftFilterCache extends AbstractDoubleConcurrentMapFilterCache {
public class SoftFilterCache extends AbstractDoubleConcurrentMapFilterCache implements MapEvictionListener<Filter, DocSet> {
private final int maxSize;
private final TimeValue expire;
private final AtomicLong evictions = new AtomicLong();
@Inject public SoftFilterCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings);
this.maxSize = componentSettings.getAsInt("max_size", -1);
@ -72,10 +76,19 @@ public class SoftFilterCache extends AbstractDoubleConcurrentMapFilterCache {
if (expire != null) {
mapMaker.expireAfterAccess(expire.nanos(), TimeUnit.NANOSECONDS);
}
mapMaker.evictionListener(this);
return mapMaker.makeMap();
}
@Override public String type() {
return "soft";
}
@Override public long evictions() {
return evictions.get();
}
@Override public void onEviction(Filter filter, DocSet docSet) {
evictions.incrementAndGet();
}
}

View File

@ -98,6 +98,14 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
return sizeInBytes;
}
@Override public long count() {
long entries = 0;
for (ConcurrentMap<Filter, DocSet> map : cache.values()) {
entries += map.size();
}
return entries;
}
@Override public Filter cache(Filter filterToCache) {
if (isCached(filterToCache)) {
return filterToCache;

View File

@ -79,6 +79,14 @@ public abstract class AbstractDoubleConcurrentMapFilterCache extends AbstractInd
@Override public void clearUnreferenced() {
}
@Override public long count() {
long entries = 0;
for (ConcurrentMap<Filter, DocSet> map : cache.values()) {
entries += map.size();
}
return entries;
}
@Override public long sizeInBytes() {
long sizeInBytes = 0;
for (ConcurrentMap<Filter, DocSet> map : cache.values()) {

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.cache.filter.weak;
import org.apache.lucene.search.Filter;
import org.elasticsearch.common.collect.MapEvictionListener;
import org.elasticsearch.common.collect.MapMaker;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.docset.DocSet;
@ -31,18 +32,21 @@ import org.elasticsearch.index.settings.IndexSettings;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
/**
* A weak reference based filter cache that has weak keys on the <tt>IndexReader</tt>.
*
* @author kimchy (shay.banon)
*/
public class WeakFilterCache extends AbstractConcurrentMapFilterCache {
public class WeakFilterCache extends AbstractConcurrentMapFilterCache implements MapEvictionListener<Filter, DocSet> {
private final int maxSize;
private final TimeValue expire;
private final AtomicLong evictions = new AtomicLong();
@Inject public WeakFilterCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings);
this.maxSize = componentSettings.getAsInt("max_size", -1);
@ -59,10 +63,19 @@ public class WeakFilterCache extends AbstractConcurrentMapFilterCache {
if (expire != null) {
mapMaker.expireAfterAccess(expire.nanos(), TimeUnit.NANOSECONDS);
}
mapMaker.evictionListener(this);
return mapMaker.makeMap();
}
@Override public String type() {
return "weak";
}
@Override public long evictions() {
return evictions.get();
}
@Override public void onEviction(Filter filter, DocSet docSet) {
evictions.incrementAndGet();
}
}