Filter Cache: `soft` filter cache can cause bad memory behavior, closes #946.

This commit is contained in:
kimchy 2011-05-19 13:12:47 +03:00
parent ebd95b7eb8
commit 3b5c133c71
8 changed files with 26 additions and 77 deletions

View File

@ -36,7 +36,6 @@ public class CacheStats implements Streamable, ToXContent {
long fieldEvictions;
long filterEvictions;
long filterMemEvictions;
long filterCount;
long fieldSize = 0;
long filterSize = 0;
@ -45,10 +44,9 @@ public class CacheStats implements Streamable, ToXContent {
public CacheStats() {
}
public CacheStats(long fieldEvictions, long filterEvictions, long filterMemEvictions, long fieldSize, long filterSize, long filterCount, long bloomSize) {
public CacheStats(long fieldEvictions, long filterEvictions, long fieldSize, long filterSize, long filterCount, long bloomSize) {
this.fieldEvictions = fieldEvictions;
this.filterEvictions = filterEvictions;
this.filterMemEvictions = filterMemEvictions;
this.fieldSize = fieldSize;
this.filterSize = filterSize;
this.filterCount = filterCount;
@ -58,7 +56,6 @@ public class CacheStats implements Streamable, ToXContent {
public void add(CacheStats stats) {
this.fieldEvictions += stats.fieldEvictions;
this.filterEvictions += stats.filterEvictions;
this.filterMemEvictions += stats.filterMemEvictions;
this.fieldSize += stats.fieldSize;
this.filterSize += stats.filterSize;
this.filterCount += stats.filterCount;
@ -152,7 +149,6 @@ public class CacheStats implements Streamable, ToXContent {
builder.field(Fields.FIELD_SIZE_IN_BYTES, fieldSize);
builder.field(Fields.FILTER_COUNT, filterCount);
builder.field(Fields.FILTER_EVICTIONS, filterEvictions);
builder.field(Fields.FILTER_MEM_EVICTIONS, filterMemEvictions);
builder.field(Fields.FILTER_SIZE, filterSize().toString());
builder.field(Fields.FILTER_SIZE_IN_BYTES, filterSize);
builder.endObject();
@ -165,7 +161,6 @@ public class CacheStats implements Streamable, ToXContent {
static final XContentBuilderString FIELD_SIZE_IN_BYTES = new XContentBuilderString("field_size_in_bytes");
static final XContentBuilderString FIELD_EVICTIONS = new XContentBuilderString("field_evictions");
static final XContentBuilderString FILTER_EVICTIONS = new XContentBuilderString("filter_evictions");
static final XContentBuilderString FILTER_MEM_EVICTIONS = new XContentBuilderString("filter_mem_evictions");
static final XContentBuilderString FILTER_COUNT = new XContentBuilderString("filter_count");
static final XContentBuilderString FILTER_SIZE = new XContentBuilderString("filter_size");
static final XContentBuilderString FILTER_SIZE_IN_BYTES = new XContentBuilderString("filter_size_in_bytes");
@ -180,7 +175,6 @@ public class CacheStats implements Streamable, ToXContent {
@Override public void readFrom(StreamInput in) throws IOException {
fieldEvictions = in.readVLong();
filterEvictions = in.readVLong();
filterMemEvictions = in.readVLong();
fieldSize = in.readVLong();
filterSize = in.readVLong();
filterCount = in.readVLong();
@ -190,7 +184,6 @@ public class CacheStats implements Streamable, ToXContent {
@Override public void writeTo(StreamOutput out) throws IOException {
out.writeVLong(fieldEvictions);
out.writeVLong(filterEvictions);
out.writeVLong(filterMemEvictions);
out.writeVLong(fieldSize);
out.writeVLong(filterSize);
out.writeVLong(filterCount);

View File

@ -73,7 +73,8 @@ public class IndexCache extends AbstractIndexComponent implements CloseableCompo
}
public CacheStats stats() {
return new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), filterCache.memEvictions(), fieldDataCache.sizeInBytes(), filterCache.sizeInBytes(), filterCache.count(), bloomCache.sizeInBytes());
FilterCache.EntriesStats filterEntriesStats = filterCache.entriesStats();
return new CacheStats(fieldDataCache.evictions(), filterCache.evictions(), fieldDataCache.sizeInBytes(), filterEntriesStats.sizeInBytes, filterEntriesStats.count, bloomCache.sizeInBytes());
}
public FilterCache filter() {

View File

@ -29,6 +29,16 @@ import org.elasticsearch.index.IndexComponent;
*/
public interface FilterCache extends IndexComponent, CloseableComponent {
static class EntriesStats {
public final long sizeInBytes;
public final long count;
public EntriesStats(long sizeInBytes, long count) {
this.sizeInBytes = sizeInBytes;
this.count = count;
}
}
String type();
Filter cache(Filter filterToCache);
@ -39,11 +49,7 @@ public interface FilterCache extends IndexComponent, CloseableComponent {
void clear();
long count();
long sizeInBytes();
EntriesStats entriesStats();
long evictions();
long memEvictions();
}

View File

@ -62,19 +62,11 @@ public class NoneFilterCache extends AbstractIndexComponent implements FilterCac
// nothing to do here
}
@Override public long count() {
return 0;
}
@Override public long sizeInBytes() {
return 0;
@Override public EntriesStats entriesStats() {
return new EntriesStats(0, 0);
}
@Override public long evictions() {
return 0;
}
@Override public long memEvictions() {
return 0;
}
}

View File

@ -87,10 +87,6 @@ public class ResidentFilterCache extends AbstractConcurrentMapFilterCache implem
return evictions.get();
}
@Override public long memEvictions() {
return 0;
}
@Override public void onEviction(Filter filter, DocSet docSet) {
evictions.incrementAndGet();
}

View File

@ -49,7 +49,6 @@ public class SoftFilterCache extends AbstractConcurrentMapFilterCache implements
private volatile TimeValue expire;
private final AtomicLong evictions = new AtomicLong();
private AtomicLong memEvictions;
private final ApplySettings applySettings = new ApplySettings();
@ -69,17 +68,14 @@ public class SoftFilterCache extends AbstractConcurrentMapFilterCache implements
}
@Override protected ConcurrentMap<Object, ReaderValue> buildCache() {
memEvictions = new AtomicLong(); // we need to init it here, since its called from the super constructor
// better to have soft on the whole ReaderValue, simpler on the GC to clean it
MapMaker mapMaker = new MapMaker().weakKeys().softValues();
mapMaker.evictionListener(new CacheMapEvictionListener(memEvictions));
MapMaker mapMaker = new MapMaker().weakKeys();
return mapMaker.makeMap();
}
@Override protected ConcurrentMap<Filter, DocSet> buildFilterMap() {
// DocSet are not really stored with strong reference only when searching on them...
// Filter might be stored in query cache
MapMaker mapMaker = new MapMaker();
MapMaker mapMaker = new MapMaker().softValues();
if (maxSize != -1) {
mapMaker.maximumSize(maxSize);
}
@ -98,10 +94,6 @@ public class SoftFilterCache extends AbstractConcurrentMapFilterCache implements
return evictions.get();
}
@Override public long memEvictions() {
return memEvictions.get();
}
@Override public void onEviction(Filter filter, DocSet docSet) {
evictions.incrementAndGet();
}

View File

@ -25,7 +25,6 @@ import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.util.OpenBitSet;
import org.elasticsearch.common.RamUsage;
import org.elasticsearch.common.collect.MapEvictionListener;
import org.elasticsearch.common.collect.MapMaker;
import org.elasticsearch.common.lab.LongsLAB;
import org.elasticsearch.common.lucene.docset.DocSet;
@ -42,7 +41,6 @@ import org.elasticsearch.index.settings.IndexSettings;
import java.io.IOException;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.*;
@ -107,22 +105,18 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
}
}
@Override public long sizeInBytes() {
@Override public EntriesStats entriesStats() {
long sizeInBytes = 0;
long totalCount = 0;
int segmentsCount = 0;
for (ReaderValue readerValue : cache.values()) {
segmentsCount++;
for (DocSet docSet : readerValue.filters().values()) {
sizeInBytes += docSet.sizeInBytes();
totalCount++;
}
}
return sizeInBytes;
}
@Override public long count() {
long entries = 0;
for (ReaderValue readerValue : cache.values()) {
entries += readerValue.filters().size();
}
return entries;
return new EntriesStats(sizeInBytes, totalCount / segmentsCount);
}
@Override public Filter cache(Filter filterToCache) {
@ -250,21 +244,4 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
return longsLAB;
}
}
public static class CacheMapEvictionListener implements MapEvictionListener<Object, ReaderValue> {
private final AtomicLong evictions;
public CacheMapEvictionListener(AtomicLong evictions) {
this.evictions = evictions;
}
@Override public void onEviction(Object o, ReaderValue readerValue) {
evictions.incrementAndGet();
if (readerValue != null) {
// extra clean the map
readerValue.filters().clear();
}
}
}
}

View File

@ -49,7 +49,6 @@ public class WeakFilterCache extends AbstractConcurrentMapFilterCache implements
private volatile TimeValue expire;
private final AtomicLong evictions = new AtomicLong();
private AtomicLong memEvictions;
private final ApplySettings applySettings = new ApplySettings();
@ -69,15 +68,12 @@ public class WeakFilterCache extends AbstractConcurrentMapFilterCache implements
}
@Override protected ConcurrentMap<Object, ReaderValue> buildCache() {
memEvictions = new AtomicLong(); // we need to init it here, since its called from the super constructor
// better to have weak on the whole ReaderValue, simpler on the GC to clean it
MapMaker mapMaker = new MapMaker().weakKeys().softValues();
mapMaker.evictionListener(new CacheMapEvictionListener(memEvictions));
MapMaker mapMaker = new MapMaker().weakKeys();
return mapMaker.makeMap();
}
@Override protected ConcurrentMap<Filter, DocSet> buildFilterMap() {
MapMaker mapMaker = new MapMaker();
MapMaker mapMaker = new MapMaker().weakValues();
if (maxSize != -1) {
mapMaker.maximumSize(maxSize);
}
@ -96,10 +92,6 @@ public class WeakFilterCache extends AbstractConcurrentMapFilterCache implements
return evictions.get();
}
@Override public long memEvictions() {
return memEvictions.get();
}
@Override public void onEviction(Filter filter, DocSet docSet) {
evictions.incrementAndGet();
}