Filter Cache: Improved Caching, closes #182.

This commit is contained in:
kimchy 2010-05-19 21:05:43 +03:00
parent bd6b89f7ca
commit d1acef1e09
6 changed files with 43 additions and 35 deletions

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.cache.filter;
import org.elasticsearch.index.cache.filter.soft.SoftFilterCache;
import org.elasticsearch.index.cache.filter.weak.WeakFilterCache;
import org.elasticsearch.util.inject.AbstractModule;
import org.elasticsearch.util.inject.Scopes;
import org.elasticsearch.util.settings.Settings;
@ -41,7 +41,7 @@ public class FilterCacheModule extends AbstractModule {
@Override protected void configure() {
bind(FilterCache.class)
.to(settings.getAsClass(FilterCacheSettings.FILTER_CACHE_TYPE, SoftFilterCache.class, "org.elasticsearch.index.cache.filter.", "FilterCache"))
.to(settings.getAsClass(FilterCacheSettings.FILTER_CACHE_TYPE, WeakFilterCache.class, "org.elasticsearch.index.cache.filter.", "FilterCache"))
.in(Scopes.SINGLETON);
}
}

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.cache.filter.soft;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.elasticsearch.index.Index;
@ -39,7 +38,7 @@ import java.util.concurrent.ConcurrentMap;
public class SoftFilterCache extends AbstractConcurrentMapFilterCache {
@Inject public SoftFilterCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings, new MapMaker().softKeys().<IndexReader, ConcurrentMap<Filter, DocIdSet>>makeMap());
super(index, indexSettings, new MapMaker().softKeys().<Object, ConcurrentMap<Filter, DocIdSet>>makeMap());
}
@Override public String type() {

View File

@ -29,7 +29,6 @@ import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.util.settings.Settings;
import java.io.IOException;
import java.util.Iterator;
import java.util.concurrent.ConcurrentMap;
import static org.elasticsearch.util.concurrent.ConcurrentCollections.*;
@ -42,10 +41,10 @@ import static org.elasticsearch.util.lucene.docidset.DocIdSets.*;
*/
public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComponent implements FilterCache {
private final ConcurrentMap<IndexReader, ConcurrentMap<Filter, DocIdSet>> cache;
private final ConcurrentMap<Object, ConcurrentMap<Filter, DocIdSet>> cache;
protected AbstractConcurrentMapFilterCache(Index index, @IndexSettings Settings indexSettings,
ConcurrentMap<IndexReader, ConcurrentMap<Filter, DocIdSet>> cache) {
ConcurrentMap<Object, ConcurrentMap<Filter, DocIdSet>> cache) {
super(index, indexSettings);
this.cache = cache;
}
@ -59,22 +58,23 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
}
@Override public void clearUnreferenced() {
int totalCount = cache.size();
int cleaned = 0;
for (Iterator<IndexReader> readerIt = cache.keySet().iterator(); readerIt.hasNext();) {
IndexReader reader = readerIt.next();
if (reader.getRefCount() <= 0) {
readerIt.remove();
cleaned++;
}
}
if (logger.isDebugEnabled()) {
if (cleaned > 0) {
logger.debug("Cleaned [{}] out of estimated total [{}]", cleaned, totalCount);
}
} else if (logger.isTraceEnabled()) {
logger.trace("Cleaned [{}] out of estimated total [{}]", cleaned, totalCount);
}
// can't do this, since we cache on cacheKey...
// int totalCount = cache.size();
// int cleaned = 0;
// for (Iterator<IndexReader> readerIt = cache.keySet().iterator(); readerIt.hasNext();) {
// IndexReader reader = readerIt.next();
// if (reader.getRefCount() <= 0) {
// readerIt.remove();
// cleaned++;
// }
// }
// if (logger.isDebugEnabled()) {
// if (cleaned > 0) {
// logger.debug("Cleaned [{}] out of estimated total [{}]", cleaned, totalCount);
// }
// } else if (logger.isTraceEnabled()) {
// logger.trace("Cleaned [{}] out of estimated total [{}]", cleaned, totalCount);
// }
}
@Override public Filter cache(Filter filterToCache) {
@ -98,10 +98,10 @@ public abstract class AbstractConcurrentMapFilterCache extends AbstractIndexComp
}
@Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
ConcurrentMap<Filter, DocIdSet> cachedFilters = cache.get(reader);
ConcurrentMap<Filter, DocIdSet> cachedFilters = cache.get(reader.getFieldCacheKey());
if (cachedFilters == null) {
cachedFilters = buildFilterMap();
cache.putIfAbsent(reader, cachedFilters);
cache.putIfAbsent(reader.getFieldCacheKey(), cachedFilters);
}
DocIdSet docIdSet = cachedFilters.get(filter);
if (docIdSet != null) {

View File

@ -19,7 +19,6 @@
package org.elasticsearch.index.cache.filter.weak;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.elasticsearch.index.Index;
@ -39,7 +38,7 @@ import java.util.concurrent.ConcurrentMap;
public class WeakFilterCache extends AbstractConcurrentMapFilterCache {
@Inject public WeakFilterCache(Index index, @IndexSettings Settings indexSettings) {
super(index, indexSettings, new MapMaker().weakKeys().<IndexReader, ConcurrentMap<Filter, DocIdSet>>makeMap());
super(index, indexSettings, new MapMaker().weakKeys().<Object, ConcurrentMap<Filter, DocIdSet>>makeMap());
}
@Override public String type() {

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.query.xcontent;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.DeletionAwareConstantScoreQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.AbstractIndexComponent;
@ -77,11 +78,18 @@ public class ConstantScoreQueryParser extends AbstractIndexComponent implements
}
// cache the filter if possible
Query query;
if (cache) {
Filter nonCachedFilter = filter;
filter = parseContext.cacheFilterIfPossible(filter);
if (parseContext.indexEngine().readerClonedOnDeletion() && (filter != nonCachedFilter)) {
query = new DeletionAwareConstantScoreQuery(filter, true);
} else {
query = new ConstantScoreQuery(filter);
}
} else {
query = new ConstantScoreQuery(filter);
}
ConstantScoreQuery query = new ConstantScoreQuery(filter);
query.setBoost(boost);
return query;
}

View File

@ -22,10 +22,7 @@ package org.elasticsearch.index.cache.filter;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.index.Index;
@ -80,8 +77,13 @@ public class FilterCacheTests {
indexWriter.deleteDocuments(new Term("id", "1"));
reader = refreshReader(reader);
searcher = new IndexSearcher(reader);
assertThat(Lucene.count(searcher, new ConstantScoreQuery(filterCache.cache(new TermFilter(new Term("id", "1")))), -1), equalTo(0l));
assertThat(Lucene.count(searcher, new FilteredQuery(new MatchAllDocsQuery(), filterCache.cache(new TermFilter(new Term("id", "1")))), -1), equalTo(0l));
TermFilter filter = new TermFilter(new Term("id", "1"));
Filter cachedFilter = filterCache.cache(filter);
long constantScoreCount = filter == cachedFilter ? 0 : 1;
// sadly, when caching based on cacheKey with NRT, this fails, that's why we have DeletionAware one
assertThat(Lucene.count(searcher, new ConstantScoreQuery(cachedFilter), -1), equalTo(constantScoreCount));
assertThat(Lucene.count(searcher, new DeletionAwareConstantScoreQuery(cachedFilter, true), -1), equalTo(0l));
assertThat(Lucene.count(searcher, new FilteredQuery(new MatchAllDocsQuery(), cachedFilter), -1), equalTo(0l));
indexWriter.close();
}