change how filter cache works, do weak/soft on IndexReader key, so we release the IndexReader on memory constraints
This commit is contained in:
parent
5fa3e40a4e
commit
5e4343b48b
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.cache.filter.soft;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -32,19 +33,17 @@ import org.elasticsearch.util.settings.Settings;
|
|||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* A soft reference based filter cache that has soft keys on the <tt>IndexReader</tt>.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class SoftFilterCache extends AbstractConcurrentMapFilterCache {
|
||||
|
||||
@Inject public SoftFilterCache(Index index, @IndexSettings Settings indexSettings, ThreadPool threadPool) {
|
||||
super(index, indexSettings, threadPool);
|
||||
super(index, indexSettings, threadPool, new MapMaker().softKeys().<IndexReader, ConcurrentMap<Filter, DocIdSet>>makeMap());
|
||||
}
|
||||
|
||||
@Override public String type() {
|
||||
return "soft";
|
||||
}
|
||||
|
||||
@Override protected ConcurrentMap<Filter, DocIdSet> buildMap() {
|
||||
return new MapMaker().softValues().makeMap();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -39,7 +39,9 @@ import static org.elasticsearch.util.concurrent.ConcurrentCollections.*;
|
|||
import static org.elasticsearch.util.lucene.docidset.DocIdSets.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* A base concurrent filter cache that accepts the actual cache to use.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComponent implements FilterCache {
|
||||
|
||||
|
@ -49,14 +51,15 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
|
|||
|
||||
private final Future scheduleFuture;
|
||||
|
||||
protected AbstractConcurrentMapFilterCache(Index index, @IndexSettings Settings indexSettings, ThreadPool threadPool) {
|
||||
protected AbstractConcurrentMapFilterCache(Index index, @IndexSettings Settings indexSettings, ThreadPool threadPool,
|
||||
ConcurrentMap<IndexReader, ConcurrentMap<Filter, DocIdSet>> cache) {
|
||||
super(index, indexSettings);
|
||||
this.cache = cache;
|
||||
|
||||
this.readerCleanerSchedule = componentSettings.getAsTime("reader_cleaner_schedule", TimeValue.timeValueMinutes(1));
|
||||
|
||||
logger.debug("Using [" + type() + "] filter cache with reader_cleaner_schedule[{}]", readerCleanerSchedule);
|
||||
logger.debug("Using [" + type() + "] filter cache with reader_cleaner_schedule [{}]", readerCleanerSchedule);
|
||||
|
||||
this.cache = newConcurrentMap();
|
||||
this.scheduleFuture = threadPool.scheduleWithFixedDelay(new IndexReaderCleaner(), readerCleanerSchedule);
|
||||
}
|
||||
|
||||
|
@ -88,7 +91,9 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
|
|||
}
|
||||
}
|
||||
|
||||
protected abstract ConcurrentMap<Filter, DocIdSet> buildMap();
|
||||
protected ConcurrentMap<Filter, DocIdSet> buildFilterMap() {
|
||||
return newConcurrentMap();
|
||||
}
|
||||
|
||||
private class FilterCacheFilterWrapper extends Filter {
|
||||
|
||||
|
@ -101,7 +106,7 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
|
|||
@Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
|
||||
ConcurrentMap<Filter, DocIdSet> cachedFilters = cache.get(reader);
|
||||
if (cachedFilters == null) {
|
||||
cachedFilters = buildMap();
|
||||
cachedFilters = buildFilterMap();
|
||||
cache.putIfAbsent(reader, cachedFilters);
|
||||
}
|
||||
DocIdSet docIdSet = cachedFilters.get(filter);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.cache.filter.weak;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -32,19 +33,17 @@ import org.elasticsearch.util.settings.Settings;
|
|||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* A weak reference based filter cache that has weak keys on the <tt>IndexReader</tt>.
|
||||
*
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class WeakFilterCache extends AbstractConcurrentMapFilterCache {
|
||||
|
||||
@Inject public WeakFilterCache(Index index, @IndexSettings Settings indexSettings, ThreadPool threadPool) {
|
||||
super(index, indexSettings, threadPool);
|
||||
super(index, indexSettings, threadPool, new MapMaker().weakKeys().<IndexReader, ConcurrentMap<Filter, DocIdSet>>makeMap());
|
||||
}
|
||||
|
||||
@Override public String type() {
|
||||
return "weak";
|
||||
}
|
||||
|
||||
@Override protected ConcurrentMap<Filter, DocIdSet> buildMap() {
|
||||
return new MapMaker().weakValues().makeMap();
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue