Randomly use a cached filter.

Wrap the ChildrenQuery in a XFilteredQuery to also execute other code paths (scorer#advance)
This commit is contained in:
Martijn van Groningen 2013-11-28 17:22:51 +01:00
parent 2ca5fd64fe
commit 9fe2b8e074
3 changed files with 44 additions and 7 deletions

View File

@ -35,9 +35,11 @@ import org.elasticsearch.cache.recycler.CacheRecycler;
import org.elasticsearch.common.compress.CompressedString;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.filter.weighted.WeightedFilterCache;
import org.elasticsearch.index.cache.id.IdCache;
import org.elasticsearch.index.cache.id.SimpleIdCacheTests;
import org.elasticsearch.index.cache.id.simple.SimpleIdCache;
@ -48,9 +50,12 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.indices.cache.filter.IndicesFilterCache;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.hamcrest.Description;
import org.hamcrest.StringDescription;
import org.junit.AfterClass;
@ -267,15 +272,21 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
final Index index = new Index(indexName);
final IdCache idCache = new SimpleIdCache(index, ImmutableSettings.EMPTY);
final CacheRecycler cacheRecycler = new CacheRecycler(ImmutableSettings.EMPTY);
Settings settings = ImmutableSettings.EMPTY;
MapperService mapperService = new MapperService(
index, ImmutableSettings.EMPTY, new Environment(), new AnalysisService(index), null, null, null
index, settings, new Environment(), new AnalysisService(index), null, null, null
);
mapperService.merge(
childType, new CompressedString(PutMappingRequest.buildFromSimplifiedDef(childType, "_parent", "type=" + parentType).string()), true
);
final IndexService indexService = new SimpleIdCacheTests.StubIndexService(mapperService);
idCache.setIndexService(indexService);
return new TestSearchContext(cacheRecycler, idCache, indexService);
ThreadPool threadPool = new ThreadPool();
NodeSettingsService nodeSettingsService = new NodeSettingsService(settings);
IndicesFilterCache indicesFilterCache = new IndicesFilterCache(settings, threadPool, cacheRecycler, nodeSettingsService);
WeightedFilterCache filterCache = new WeightedFilterCache(index, settings, indicesFilterCache);
return new TestSearchContext(cacheRecycler, idCache, indexService, filterCache);
}
}

View File

@ -28,6 +28,8 @@ import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.common.lucene.search.NotFilter;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
@ -77,6 +79,7 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
ObjectObjectOpenHashMap<String, NavigableMap<String, FloatArrayList>> childValueToParentIds = new ObjectObjectOpenHashMap<String, NavigableMap<String, FloatArrayList>>();
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
boolean markParentAsDeleted = rarely();
boolean filterMe = rarely();
String parent = Integer.toString(parentDocId);
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.YES));
@ -84,6 +87,9 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
if (markParentAsDeleted) {
document.add(new StringField("delete", "me", Field.Store.NO));
}
if (filterMe) {
document.add(new StringField("filter", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
int numChildDocs;
@ -113,7 +119,7 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
} else {
childValueToParentIds.put(childValue, parentIdToChildScores = new TreeMap<String, FloatArrayList>());
}
if (!markParentAsDeleted) {
if (!markParentAsDeleted && !filterMe) {
FloatArrayList childScores = parentIdToChildScores.get(parent);
if (childScores == null) {
parentIdToChildScores.put(parent, childScores = new FloatArrayList());
@ -134,15 +140,33 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
ChildrenQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
Filter rawParentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me")));
int max = numUniqueChildValues / 4;
for (int i = 0; i < max; i++) {
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
// that deletes are applied at the top level when filters are cached.
Filter parentFilter;
if (random().nextBoolean()) {
parentFilter = SearchContext.current().filterCache().cache(rawParentFilter);
} else {
parentFilter = rawParentFilter;
}
// Using this in FQ, will invoke / test the Scorer#advance(..)
Filter filterMe;
if (random().nextBoolean()) {
filterMe = SearchContext.current().filterCache().cache(rawFilterMe);
} else {
filterMe = rawFilterMe;
}
String childValue = childValues[random().nextInt(numUniqueChildValues)];
Query childQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", childValue)));
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
Query query = new ChildrenQuery("parent", "child", parentFilter, childQuery, scoreType, shortCircuitParentDocSet);
query = new XFilteredQuery(query, filterMe);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
int numHits = 1 + random().nextInt(25);
TopScoreDocCollector actualTopDocsCollector = TopScoreDocCollector.create(numHits, false);

View File

@ -66,14 +66,16 @@ class TestSearchContext extends SearchContext {
final CacheRecycler cacheRecycler;
final IdCache idCache;
final IndexService indexService;
final FilterCache filterCache;
ContextIndexSearcher searcher;
int size;
TestSearchContext(CacheRecycler cacheRecycler, IdCache idCache, IndexService indexService) {
TestSearchContext(CacheRecycler cacheRecycler, IdCache idCache, IndexService indexService, FilterCache filterCache) {
this.cacheRecycler = cacheRecycler;
this.idCache = idCache;
this.indexService = indexService;
this.filterCache = filterCache;
}
@Override
@ -293,7 +295,7 @@ class TestSearchContext extends SearchContext {
@Override
public FilterCache filterCache() {
return null;
return filterCache;
}
@Override