Core: In the bitset cache only eagerly load bitsets for parent nested object fields.

Don't eagerly cache parent type filters in bitset cache or nested object fields that are leafs.
Also let parent/child queries not rely on FixedBitSetFilter, but rather on regular Filter

Closes #8440
This commit is contained in:
Martijn van Groningen 2014-11-14 21:00:14 +01:00
parent 31fa4dc58b
commit 284491d874
16 changed files with 52 additions and 192 deletions

View File

@ -43,7 +43,6 @@ import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter; import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.index.service.IndexService; import org.elasticsearch.index.service.IndexService;
@ -246,20 +245,14 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
final Set<Filter> warmUp = new HashSet<>(); final Set<Filter> warmUp = new HashSet<>();
final MapperService mapperService = indexShard.mapperService(); final MapperService mapperService = indexShard.mapperService();
for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (DocumentMapper docMapper : mapperService.docMappers(false)) {
ParentFieldMapper parentFieldMapper = docMapper.parentFieldMapper();
if (parentFieldMapper.active()) {
warmUp.add(docMapper.typeFilter());
DocumentMapper parentDocumentMapper = mapperService.documentMapper(parentFieldMapper.type());
if (parentDocumentMapper != null) {
warmUp.add(parentDocumentMapper.typeFilter());
}
}
if (docMapper.hasNestedObjects()) { if (docMapper.hasNestedObjects()) {
hasNested = true; hasNested = true;
for (ObjectMapper objectMapper : docMapper.objectMappers().values()) { for (ObjectMapper objectMapper : docMapper.objectMappers().values()) {
if (objectMapper.nested().isNested()) { if (objectMapper.nested().isNested()) {
warmUp.add(objectMapper.nestedTypeFilter()); ObjectMapper parentObjectMapper = docMapper.findParentObjectMapper(objectMapper);
if (parentObjectMapper != null && parentObjectMapper.nested().isNested()) {
warmUp.add(parentObjectMapper.nestedTypeFilter());
}
} }
} }
} }

View File

@ -154,7 +154,7 @@ public class HasChildFilterParser implements FilterParser {
nonNestedDocsFilter = parseContext.bitsetFilter(NonNestedDocsFilter.INSTANCE); nonNestedDocsFilter = parseContext.bitsetFilter(NonNestedDocsFilter.INSTANCE);
} }
BitDocIdSetFilter parentFilter = parseContext.bitsetFilter(parentDocMapper.typeFilter()); Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
Query childrenQuery; Query childrenQuery;

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -155,7 +156,7 @@ public class HasChildQueryParser implements QueryParser {
innerQuery = new FilteredQuery(innerQuery, parseContext.cacheFilter(childDocMapper.typeFilter(), null)); innerQuery = new FilteredQuery(innerQuery, parseContext.cacheFilter(childDocMapper.typeFilter(), null));
Query query; Query query;
BitDocIdSetFilter parentFilter = parseContext.bitsetFilter(parentDocMapper.typeFilter()); Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = parseContext.getForField(parentFieldMapper);
if (minChildren > 1 || maxChildren > 0 || scoreType != ScoreType.NONE) { if (minChildren > 1 || maxChildren > 0 || scoreType != ScoreType.NONE) {
query = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter, innerQuery, scoreType, minChildren, query = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter, innerQuery, scoreType, minChildren,

View File

@ -22,7 +22,6 @@ import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery; import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.NotFilter; import org.elasticsearch.common.lucene.search.NotFilter;
@ -182,7 +181,7 @@ public class HasParentQueryParser implements QueryParser {
// wrap the query with type query // wrap the query with type query
innerQuery = new FilteredQuery(innerQuery, parseContext.cacheFilter(parentDocMapper.typeFilter(), null)); innerQuery = new FilteredQuery(innerQuery, parseContext.cacheFilter(parentDocMapper.typeFilter(), null));
BitDocIdSetFilter childrenFilter = parseContext.bitsetFilter(new NotFilter(parentFilter)); Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);
if (score) { if (score) {
return new ParentQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter); return new ParentQuery(parentChildIndexFieldData, innerQuery, parentDocMapper.type(), childrenFilter);
} else { } else {

View File

@ -57,14 +57,14 @@ public class ChildrenConstantScoreQuery extends Query {
private Query originalChildQuery; private Query originalChildQuery;
private final String parentType; private final String parentType;
private final String childType; private final String childType;
private final BitDocIdSetFilter parentFilter; private final Filter parentFilter;
private final int shortCircuitParentDocSet; private final int shortCircuitParentDocSet;
private final BitDocIdSetFilter nonNestedDocsFilter; private final BitDocIdSetFilter nonNestedDocsFilter;
private Query rewrittenChildQuery; private Query rewrittenChildQuery;
private IndexReader rewriteIndexReader; private IndexReader rewriteIndexReader;
public ChildrenConstantScoreQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String parentType, String childType, BitDocIdSetFilter parentFilter, int shortCircuitParentDocSet, BitDocIdSetFilter nonNestedDocsFilter) { public ChildrenConstantScoreQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet, BitDocIdSetFilter nonNestedDocsFilter) {
this.parentChildIndexFieldData = parentChildIndexFieldData; this.parentChildIndexFieldData = parentChildIndexFieldData;
this.parentFilter = parentFilter; this.parentFilter = parentFilter;
this.parentType = parentType; this.parentType = parentType;

View File

@ -68,7 +68,7 @@ public class ChildrenQuery extends Query {
protected final ParentChildIndexFieldData ifd; protected final ParentChildIndexFieldData ifd;
protected final String parentType; protected final String parentType;
protected final String childType; protected final String childType;
protected final BitDocIdSetFilter parentFilter; protected final Filter parentFilter;
protected final ScoreType scoreType; protected final ScoreType scoreType;
protected Query originalChildQuery; protected Query originalChildQuery;
protected final int minChildren; protected final int minChildren;
@ -79,7 +79,7 @@ public class ChildrenQuery extends Query {
protected Query rewrittenChildQuery; protected Query rewrittenChildQuery;
protected IndexReader rewriteIndexReader; protected IndexReader rewriteIndexReader;
public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, BitDocIdSetFilter parentFilter, Query childQuery, ScoreType scoreType, int minChildren, int maxChildren, int shortCircuitParentDocSet, BitDocIdSetFilter nonNestedDocsFilter) { public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int minChildren, int maxChildren, int shortCircuitParentDocSet, BitDocIdSetFilter nonNestedDocsFilter) {
this.ifd = ifd; this.ifd = ifd;
this.parentType = parentType; this.parentType = parentType;
this.childType = childType; this.childType = childType;

View File

@ -22,16 +22,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.*;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredDocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LongBitSet; import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.docset.DocIdSets; import org.elasticsearch.common.lucene.docset.DocIdSets;
@ -53,12 +44,12 @@ public class ParentConstantScoreQuery extends Query {
private final ParentChildIndexFieldData parentChildIndexFieldData; private final ParentChildIndexFieldData parentChildIndexFieldData;
private Query originalParentQuery; private Query originalParentQuery;
private final String parentType; private final String parentType;
private final BitDocIdSetFilter childrenFilter; private final Filter childrenFilter;
private Query rewrittenParentQuery; private Query rewrittenParentQuery;
private IndexReader rewriteIndexReader; private IndexReader rewriteIndexReader;
public ParentConstantScoreQuery(ParentChildIndexFieldData parentChildIndexFieldData, Query parentQuery, String parentType, BitDocIdSetFilter childrenFilter) { public ParentConstantScoreQuery(ParentChildIndexFieldData parentChildIndexFieldData, Query parentQuery, String parentType, Filter childrenFilter) {
this.parentChildIndexFieldData = parentChildIndexFieldData; this.parentChildIndexFieldData = parentChildIndexFieldData;
this.originalParentQuery = parentQuery; this.originalParentQuery = parentQuery;
this.parentType = parentType; this.parentType = parentType;

View File

@ -18,20 +18,8 @@
*/ */
package org.elasticsearch.index.search.child; package org.elasticsearch.index.search.child;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.*;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.*;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedSetDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils; import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
@ -61,12 +49,12 @@ public class ParentQuery extends Query {
private final ParentChildIndexFieldData parentChildIndexFieldData; private final ParentChildIndexFieldData parentChildIndexFieldData;
private Query originalParentQuery; private Query originalParentQuery;
private final String parentType; private final String parentType;
private final BitDocIdSetFilter childrenFilter; private final Filter childrenFilter;
private Query rewrittenParentQuery; private Query rewrittenParentQuery;
private IndexReader rewriteIndexReader; private IndexReader rewriteIndexReader;
public ParentQuery(ParentChildIndexFieldData parentChildIndexFieldData, Query parentQuery, String parentType, BitDocIdSetFilter childrenFilter) { public ParentQuery(ParentChildIndexFieldData parentChildIndexFieldData, Query parentQuery, String parentType, Filter childrenFilter) {
this.parentChildIndexFieldData = parentChildIndexFieldData; this.parentChildIndexFieldData = parentChildIndexFieldData;
this.originalParentQuery = parentQuery; this.originalParentQuery = parentQuery;
this.parentType = parentType; this.parentType = parentType;

View File

@ -23,7 +23,6 @@ import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.elasticsearch.ElasticsearchIllegalStateException; import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
@ -54,7 +53,7 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator implement
private final String parentType; private final String parentType;
private final Filter childFilter; private final Filter childFilter;
private final BitDocIdSetFilter parentFilter; private final Filter parentFilter;
private final ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource; private final ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource;
// Maybe use PagedGrowableWriter? This will be less wasteful than LongArray, but then we don't have the reuse feature of BigArrays. // Maybe use PagedGrowableWriter? This will be less wasteful than LongArray, but then we don't have the reuse feature of BigArrays.
@ -80,7 +79,7 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator implement
// so use the filter cache instead. When the filter cache is smarter with what filter impl to pick we can benefit // so use the filter cache instead. When the filter cache is smarter with what filter impl to pick we can benefit
// from it here // from it here
this.childFilter = aggregationContext.searchContext().filterCache().cache(childFilter); this.childFilter = aggregationContext.searchContext().filterCache().cache(childFilter);
this.parentFilter = aggregationContext.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilter); this.parentFilter = aggregationContext.searchContext().filterCache().cache(parentFilter);
this.parentOrdToBuckets = aggregationContext.bigArrays().newLongArray(maxOrd, false); this.parentOrdToBuckets = aggregationContext.bigArrays().newLongArray(maxOrd, false);
this.parentOrdToBuckets.fill(0, maxOrd, -1); this.parentOrdToBuckets.fill(0, maxOrd, -1);
this.parentOrdToOtherBuckets = new LongObjectPagedHashMap<>(aggregationContext.bigArrays()); this.parentOrdToOtherBuckets = new LongObjectPagedHashMap<>(aggregationContext.bigArrays());
@ -99,7 +98,7 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator implement
@Override @Override
public void collect(int docId, long bucketOrdinal) throws IOException { public void collect(int docId, long bucketOrdinal) throws IOException {
if (parentDocs != null && parentDocs.get(docId)) { if (parentDocs.get(docId)) {
long globalOrdinal = globalOrdinals.getOrd(docId); long globalOrdinal = globalOrdinals.getOrd(docId);
if (globalOrdinal != -1) { if (globalOrdinal != -1) {
if (parentOrdToBuckets.get(globalOrdinal) == -1) { if (parentOrdToBuckets.get(globalOrdinal) == -1) {
@ -129,11 +128,10 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator implement
assert globalOrdinals != null; assert globalOrdinals != null;
try { try {
DocIdSet parentDocIdSet = parentFilter.getDocIdSet(reader, null); DocIdSet parentDocIdSet = parentFilter.getDocIdSet(reader, null);
if (parentDocIdSet != null) { // The DocIdSets.toSafeBits(...) can convert to FixedBitSet, but this
parentDocs = parentDocIdSet.bits(); // will only happen if the none filter cache is used. (which only happens in tests)
} else { // Otherwise the filter cache will produce a bitset based filter.
parentDocs = null; parentDocs = DocIdSets.toSafeBits(reader.reader(), parentDocIdSet);
}
DocIdSet childDocIdSet = childFilter.getDocIdSet(reader, null); DocIdSet childDocIdSet = childFilter.getDocIdSet(reader, null);
if (globalOrdinals != null && !DocIdSets.isEmpty(childDocIdSet)) { if (globalOrdinals != null && !DocIdSets.isEmpty(childDocIdSet)) {
replay.add(reader); replay.add(reader);

View File

@ -120,7 +120,11 @@ public abstract class AbstractChildTests extends ElasticsearchSingleNodeLuceneTe
} }
} }
static BitDocIdSetFilter wrap(Filter filter) { static Filter wrap(Filter filter) {
return SearchContext.current().filterCache().cache(filter);
}
static BitDocIdSetFilter wrapWithFixedBitSetFilter(Filter filter) {
return SearchContext.current().bitsetFilterCache().getBitDocIdSetFilter(filter); return SearchContext.current().bitsetFilterCache().getBitDocIdSetFilter(filter);
} }

View File

@ -89,8 +89,8 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
Query childQuery = new TermQuery(new Term("field", "value")); Query childQuery = new TermQuery(new Term("field", "value"));
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
BitDocIdSetFilter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent"))); Filter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent")));
Query query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, 12, wrap(NonNestedDocsFilter.INSTANCE)); Query query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, 12, wrapWithFixedBitSetFilter(NonNestedDocsFilter.INSTANCE));
QueryUtils.check(query); QueryUtils.check(query);
} }
@ -122,7 +122,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
)); ));
TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3)))); TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3))));
BitDocIdSetFilter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent"))); Filter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent")));
int shortCircuitParentDocSet = random().nextInt(5); int shortCircuitParentDocSet = random().nextInt(5);
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
@ -216,7 +216,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
BitDocIdSetFilter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent"))); Filter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent")));
Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me"))); Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me")));
int max = numUniqueChildValues / 4; int max = numUniqueChildValues / 4;
for (int i = 0; i < max; i++) { for (int i = 0; i < max; i++) {
@ -259,7 +259,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTests {
String childValue = childValues[random().nextInt(numUniqueChildValues)]; String childValue = childValues[random().nextInt(numUniqueChildValues)];
TermQuery childQuery = new TermQuery(new Term("field1", childValue)); TermQuery childQuery = new TermQuery(new Term("field1", childValue));
int shortCircuitParentDocSet = random().nextInt(numParentDocs); int shortCircuitParentDocSet = random().nextInt(numParentDocs);
BitDocIdSetFilter nonNestedDocsFilter = random().nextBoolean() ? wrap(NonNestedDocsFilter.INSTANCE) : null; BitDocIdSetFilter nonNestedDocsFilter = random().nextBoolean() ? wrapWithFixedBitSetFilter(NonNestedDocsFilter.INSTANCE) : null;
Query query; Query query;
if (random().nextBoolean()) { if (random().nextBoolean()) {
// Usage in HasChildQueryParser // Usage in HasChildQueryParser

View File

@ -27,29 +27,9 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.*;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.queries.TermFilter; import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.*;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
@ -106,11 +86,11 @@ public class ChildrenQueryTests extends AbstractChildTests {
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)]; ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
BitDocIdSetFilter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent"))); Filter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent")));
int minChildren = random().nextInt(10); int minChildren = random().nextInt(10);
int maxChildren = scaledRandomIntBetween(minChildren, 10); int maxChildren = scaledRandomIntBetween(minChildren, 10);
Query query = new ChildrenQuery(parentChildIndexFieldData, "parent", "child", parentFilter, childQuery, scoreType, minChildren, Query query = new ChildrenQuery(parentChildIndexFieldData, "parent", "child", parentFilter, childQuery, scoreType, minChildren,
maxChildren, 12, wrap(NonNestedDocsFilter.INSTANCE)); maxChildren, 12, wrapWithFixedBitSetFilter(NonNestedDocsFilter.INSTANCE));
QueryUtils.check(query); QueryUtils.check(query);
} }
@ -196,7 +176,7 @@ public class ChildrenQueryTests extends AbstractChildTests {
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
BitDocIdSetFilter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent"))); Filter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent")));
Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me"))); Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me")));
int max = numUniqueChildValues / 4; int max = numUniqueChildValues / 4;
for (int i = 0; i < max; i++) { for (int i = 0; i < max; i++) {
@ -240,7 +220,7 @@ public class ChildrenQueryTests extends AbstractChildTests {
Query childQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", childValue))); Query childQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", childValue)));
int shortCircuitParentDocSet = random().nextInt(numParentDocs); int shortCircuitParentDocSet = random().nextInt(numParentDocs);
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)]; ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
BitDocIdSetFilter nonNestedDocsFilter = random().nextBoolean() ? wrap(NonNestedDocsFilter.INSTANCE) : null; BitDocIdSetFilter nonNestedDocsFilter = random().nextBoolean() ? wrapWithFixedBitSetFilter(NonNestedDocsFilter.INSTANCE) : null;
// leave min/max set to 0 half the time // leave min/max set to 0 half the time
int minChildren = random().nextInt(2) * scaledRandomIntBetween(0, 110); int minChildren = random().nextInt(2) * scaledRandomIntBetween(0, 110);
@ -390,7 +370,7 @@ public class ChildrenQueryTests extends AbstractChildTests {
((TestSearchContext)context).setSearcher(new ContextIndexSearcher(context, engineSearcher)); ((TestSearchContext)context).setSearcher(new ContextIndexSearcher(context, engineSearcher));
ParentFieldMapper parentFieldMapper = context.mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = context.mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = context.fieldData().getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = context.fieldData().getForField(parentFieldMapper);
BitDocIdSetFilter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent"))); Filter parentFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "parent")));
// child query that returns the score as the value of "childScore" for each child document, // child query that returns the score as the value of "childScore" for each child document,
// with the parent's score determined by the score type // with the parent's score determined by the score type

View File

@ -24,25 +24,9 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.*;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.queries.TermFilter; import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.*;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
@ -88,7 +72,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTests {
Query parentQuery = new TermQuery(new Term("field", "value")); Query parentQuery = new TermQuery(new Term("field", "value"));
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
BitDocIdSetFilter childrenFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "child"))); Filter childrenFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "child")));
Query query = new ParentConstantScoreQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter); Query query = new ParentConstantScoreQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter);
QueryUtils.check(query); QueryUtils.check(query);
} }
@ -174,7 +158,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTests {
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
BitDocIdSetFilter childrenFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "child"))); Filter childrenFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "child")));
Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me"))); Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me")));
int max = numUniqueParentValues / 4; int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) { for (int i = 0; i < max; i++) {

View File

@ -25,27 +25,9 @@ import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.*;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.queries.TermFilter; import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.*;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase;
@ -90,7 +72,7 @@ public class ParentQueryTests extends AbstractChildTests {
Query parentQuery = new TermQuery(new Term("field", "value")); Query parentQuery = new TermQuery(new Term("field", "value"));
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
BitDocIdSetFilter childrenFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "child"))); Filter childrenFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "child")));
Query query = new ParentQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter); Query query = new ParentQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter);
QueryUtils.check(query); QueryUtils.check(query);
} }
@ -176,7 +158,7 @@ public class ParentQueryTests extends AbstractChildTests {
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
BitDocIdSetFilter childrenFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "child"))); Filter childrenFilter = wrap(new TermFilter(new Term(TypeFieldMapper.NAME, "child")));
Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me"))); Filter rawFilterMe = new NotFilter(new TermFilter(new Term("filter", "me")));
int max = numUniqueParentValues / 4; int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) { for (int i = 0; i < max; i++) {

View File

@ -57,7 +57,7 @@ public class TopChildrenQueryTests extends AbstractChildTests {
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)]; ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper);
Query query = new TopChildrenQuery(parentChildIndexFieldData, childQuery, "child", "parent", scoreType, 1, 1, wrap(NonNestedDocsFilter.INSTANCE)); Query query = new TopChildrenQuery(parentChildIndexFieldData, childQuery, "child", "parent", scoreType, 1, 1, wrapWithFixedBitSetFilter(NonNestedDocsFilter.INSTANCE));
QueryUtils.check(query); QueryUtils.check(query);
} }

View File

@ -20,7 +20,6 @@ package org.elasticsearch.search.child;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalArgumentException; import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.admin.cluster.stats.ClusterStatsResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
@ -66,11 +65,9 @@ import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilde
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.FilterBuilders.*; import static org.elasticsearch.index.query.FilterBuilders.*;
import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.factorFunction; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.factorFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.*;
/** /**
@ -350,63 +347,6 @@ public class SimpleChildQuerySearchTests extends ElasticsearchIntegrationTest {
assertThat(indicesStatsResponse.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l)); assertThat(indicesStatsResponse.getTotal().getFieldData().getMemorySizeInBytes(), equalTo(0l));
} }
@Test
public void testCheckFixedBitSetCache() throws Exception {
boolean loadFixedBitSetLazily = randomBoolean();
ImmutableSettings.Builder settingsBuilder = ImmutableSettings.builder().put(indexSettings())
.put("index.refresh_interval", -1);
if (loadFixedBitSetLazily) {
settingsBuilder.put("index.load_fixed_bitset_filters_eagerly", false);
}
// enforce lazy loading to make sure that p/c stats are not counted as part of field data
assertAcked(prepareCreate("test")
.setSettings(settingsBuilder)
.addMapping("parent")
);
client().prepareIndex("test", "parent", "p0").setSource("p_field", "p_value0").get();
client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get();
refresh();
ensureSearchable("test");
// No _parent field yet, there shouldn't be anything in the parent id cache
ClusterStatsResponse clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0l));
// Now add mapping + children
assertAcked(
client().admin().indices().preparePutMapping("test").setType("child").setSource("_parent", "type=parent")
);
// index simple data
client().prepareIndex("test", "child", "c1").setSource("c_field", "red").setParent("p1").get();
client().prepareIndex("test", "child", "c2").setSource("c_field", "yellow").setParent("p1").get();
client().prepareIndex("test", "parent", "p2").setSource("p_field", "p_value2").get();
client().prepareIndex("test", "child", "c3").setSource("c_field", "blue").setParent("p2").get();
client().prepareIndex("test", "child", "c4").setSource("c_field", "red").setParent("p2").get();
refresh();
ensureSearchable("test");
if (loadFixedBitSetLazily) {
clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0l));
// only when querying with has_child the fixed bitsets are loaded
SearchResponse searchResponse = client().prepareSearch("test")
// Use setShortCircuitCutoff(0), otherwise the parent filter isn't used.
.setQuery(hasChildQuery("child", termQuery("c_field", "blue")).setShortCircuitCutoff(0))
.get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
}
clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), greaterThan(0l));
assertAcked(client().admin().indices().prepareDelete("test"));
clusterStatsResponse = client().admin().cluster().prepareClusterStats().get();
assertThat(clusterStatsResponse.getIndicesStats().getSegments().getBitsetMemoryInBytes(), equalTo(0l));
}
@Test @Test
// See: https://github.com/elasticsearch/elasticsearch/issues/3290 // See: https://github.com/elasticsearch/elasticsearch/issues/3290
public void testCachingBug_withFqueryFilter() throws Exception { public void testCachingBug_withFqueryFilter() throws Exception {