Fixes related to accepted docs not taken into account:

* Removed the applyAcceptedDocs in ChildrenConstantScoreQuery, they need to be applied at all times. (because of short circuit mechanism)
* Moved ParentDocSet to FilteredDocIdSetIterator, because it fits better than MatchDocIdSet.
* Made similar changes to ParentConstantScoreQuery for consistency between the two queries. The bug accepted docs bug didn't occur in the ParentConstantScoreQuery.
* Updated random p/c tests to randomly update parent or child docs during the test run.

Closes #4306
This commit is contained in:
Martijn van Groningen 2013-11-30 20:05:34 +01:00
parent 5d2c334bbd
commit ac1e985670
11 changed files with 216 additions and 84 deletions

View File

@ -135,7 +135,7 @@ public class HasChildFilterParser implements FilterParser {
}
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
Query childrenConstantScoreQuery = new ChildrenConstantScoreQuery(query, parentType, childType, parentFilter, shortCircuitParentDocSet, false);
Query childrenConstantScoreQuery = new ChildrenConstantScoreQuery(query, parentType, childType, parentFilter, shortCircuitParentDocSet);
if (filterName != null) {
parseContext.addNamedQuery(filterName, childrenConstantScoreQuery);

View File

@ -142,7 +142,7 @@ public class HasChildQueryParser implements QueryParser {
if (!deleteByQuery && scoreType != null) {
query = new ChildrenQuery(parentType, childType, parentFilter, innerQuery, scoreType, shortCircuitParentDocSet);
} else {
query = new ChildrenConstantScoreQuery(innerQuery, parentType, childType, parentFilter, shortCircuitParentDocSet, true);
query = new ChildrenConstantScoreQuery(innerQuery, parentType, childType, parentFilter, shortCircuitParentDocSet);
if (deleteByQuery) {
query = new XConstantScoreQuery(new DeleteByQueryWrappingFilter(query));
}

View File

@ -158,7 +158,7 @@ public class HasParentFilterParser implements FilterParser {
parentFilter = parentsFilter;
}
Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);
Query parentConstantScoreQuery = new ParentConstantScoreQuery(query, parentType, childrenFilter, false);
Query parentConstantScoreQuery = new ParentConstantScoreQuery(query, parentType, childrenFilter);
if (filterName != null) {
parseContext.addNamedQuery(filterName, parentConstantScoreQuery);

View File

@ -163,7 +163,7 @@ public class HasParentQueryParser implements QueryParser {
if (!deleteByQuery && score) {
query = new ParentQuery(innerQuery, parentType, childrenFilter);
} else {
query = new ParentConstantScoreQuery(innerQuery, parentType, childrenFilter, true);
query = new ParentConstantScoreQuery(innerQuery, parentType, childrenFilter);
if (deleteByQuery) {
query = new XConstantScoreQuery(new DeleteByQueryWrappingFilter(query));
}

View File

@ -31,7 +31,6 @@ import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.recycler.Recycler;
@ -54,18 +53,16 @@ public class ChildrenConstantScoreQuery extends Query {
private final String childType;
private final Filter parentFilter;
private final int shortCircuitParentDocSet;
private final boolean applyAcceptedDocs;
private Query rewrittenChildQuery;
private IndexReader rewriteIndexReader;
public ChildrenConstantScoreQuery(Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet, boolean applyAcceptedDocs) {
public ChildrenConstantScoreQuery(Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet) {
this.parentFilter = parentFilter;
this.parentType = parentType;
this.childType = childType;
this.originalChildQuery = childQuery;
this.shortCircuitParentDocSet = shortCircuitParentDocSet;
this.applyAcceptedDocs = applyAcceptedDocs;
}
@Override
@ -129,15 +126,8 @@ public class ChildrenConstantScoreQuery extends Query {
private float queryWeight;
public ParentWeight(Filter parentFilter, Filter shortCircuitFilter, SearchContext searchContext, Recycler.V<ObjectOpenHashSet<HashedBytesArray>> collectedUids) {
if (applyAcceptedDocs) {
// In case filters are cached, we need to apply deletes, since filters from filter cache didn't apply deletes
this.parentFilter = new ApplyAcceptedDocsFilter(parentFilter);
this.shortCircuitFilter = shortCircuitFilter != null ? new ApplyAcceptedDocsFilter(shortCircuitFilter) : null;
} else {
this.parentFilter = parentFilter;
this.shortCircuitFilter = shortCircuitFilter;
}
this.parentFilter = new ApplyAcceptedDocsFilter(parentFilter);
this.shortCircuitFilter = shortCircuitFilter;
this.searchContext = searchContext;
this.collectedUids = collectedUids;
this.remaining = collectedUids.v().size();
@ -170,12 +160,16 @@ public class ChildrenConstantScoreQuery extends Query {
if (remaining == 0) {
return null;
}
if (!applyAcceptedDocs) {
acceptDocs = null;
}
if (shortCircuitFilter != null) {
return ConstantScorer.create(shortCircuitFilter.getDocIdSet(context, acceptDocs), this, queryWeight);
DocIdSet docIdSet = shortCircuitFilter.getDocIdSet(context, acceptDocs);
if (!DocIdSets.isEmpty(docIdSet)) {
DocIdSetIterator iterator = docIdSet.iterator();
if (iterator != null) {
return ConstantScorer.create(iterator, this, queryWeight);
}
}
return null;
}
DocIdSet parentDocIdSet = this.parentFilter.getDocIdSet(context, acceptDocs);
@ -183,14 +177,15 @@ public class ChildrenConstantScoreQuery extends Query {
return null;
}
Bits parentsBits = DocIdSets.toSafeBits(context.reader(), parentDocIdSet);
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
if (idReaderTypeCache != null) {
DocIdSet docIdSet = new ParentDocSet(context.reader(), parentsBits, collectedUids.v(), idReaderTypeCache);
return ConstantScorer.create(docIdSet, this, queryWeight);
} else {
return null;
DocIdSetIterator innerIterator = parentDocIdSet.iterator();
if (innerIterator != null) {
ParentDocIdIterator parentDocIdIterator = new ParentDocIdIterator(innerIterator, collectedUids.v(), idReaderTypeCache);
return ConstantScorer.create(parentDocIdIterator, this, queryWeight);
}
}
return null;
}
@Override
@ -199,21 +194,25 @@ public class ChildrenConstantScoreQuery extends Query {
return true;
}
private final class ParentDocSet extends MatchDocIdSet {
private final class ParentDocIdIterator extends FilteredDocIdSetIterator {
private final ObjectOpenHashSet<HashedBytesArray> parents;
private final IdReaderTypeCache typeCache;
ParentDocSet(IndexReader reader, Bits acceptDocs, ObjectOpenHashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
super(reader.maxDoc(), acceptDocs);
private ParentDocIdIterator(DocIdSetIterator innerIterator, ObjectOpenHashSet<HashedBytesArray> parents, IdReaderTypeCache typeCache) {
super(innerIterator);
this.parents = parents;
this.typeCache = typeCache;
}
@Override
protected boolean matchDoc(int doc) {
protected boolean match(int doc) {
if (remaining == 0) {
shortCircuit();
try {
advance(DocIdSetIterator.NO_MORE_DOCS);
} catch (IOException e) {
throw new RuntimeException(e);
}
return false;
}

View File

@ -19,11 +19,9 @@
package org.elasticsearch.index.search.child;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import java.io.IOException;
@ -33,11 +31,8 @@ import java.io.IOException;
// Borrowed from ConstantScoreQuery
class ConstantScorer extends Scorer {
static ConstantScorer create(DocIdSet docIdSet, Weight weight, float constantScore) throws IOException {
if (DocIdSets.isEmpty(docIdSet)) {
return null;
}
return new ConstantScorer(docIdSet.iterator(), weight, constantScore);
static ConstantScorer create(DocIdSetIterator iterator, Weight weight, float constantScore) throws IOException {
return new ConstantScorer(iterator, weight, constantScore);
}
private final DocIdSetIterator docIdSetIterator;

View File

@ -29,7 +29,6 @@ import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.common.bytes.HashedBytesArray;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.docset.DocIdSets;
import org.elasticsearch.common.lucene.docset.MatchDocIdSet;
import org.elasticsearch.common.lucene.search.ApplyAcceptedDocsFilter;
import org.elasticsearch.common.lucene.search.NoopCollector;
import org.elasticsearch.common.lucene.search.Queries;
@ -49,21 +48,14 @@ public class ParentConstantScoreQuery extends Query {
private final Query originalParentQuery;
private final String parentType;
private final Filter childrenFilter;
private final boolean applyAcceptedDocs;
private Query rewrittenParentQuery;
private IndexReader rewriteIndexReader;
public ParentConstantScoreQuery(Query parentQuery, String parentType, Filter childrenFilter, boolean applyAcceptedDocs) {
public ParentConstantScoreQuery(Query parentQuery, String parentType, Filter childrenFilter) {
this.originalParentQuery = parentQuery;
this.parentType = parentType;
// In case the childrenFilter is cached.
if (applyAcceptedDocs) {
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
} else {
this.childrenFilter = childrenFilter;
}
this.applyAcceptedDocs = applyAcceptedDocs;
this.childrenFilter = childrenFilter;
}
@Override
@ -102,20 +94,22 @@ public class ParentConstantScoreQuery extends Query {
return Queries.newMatchNoDocsQuery().createWeight(searcher);
}
ChildrenWeight childrenWeight = new ChildrenWeight(searchContext, parents);
ChildrenWeight childrenWeight = new ChildrenWeight(childrenFilter, searchContext, parents);
searchContext.addReleasable(childrenWeight);
return childrenWeight;
}
private final class ChildrenWeight extends Weight implements Releasable {
private final Filter childrenFilter;
private final SearchContext searchContext;
private final Recycler.V<ObjectOpenHashSet<HashedBytesArray>> parents;
private float queryNorm;
private float queryWeight;
private ChildrenWeight(SearchContext searchContext, Recycler.V<ObjectOpenHashSet<HashedBytesArray>> parents) {
private ChildrenWeight(Filter childrenFilter, SearchContext searchContext, Recycler.V<ObjectOpenHashSet<HashedBytesArray>> parents) {
this.childrenFilter = new ApplyAcceptedDocsFilter(childrenFilter);
this.searchContext = searchContext;
this.parents = parents;
}
@ -144,23 +138,20 @@ public class ParentConstantScoreQuery extends Query {
@Override
public Scorer scorer(AtomicReaderContext context, boolean scoreDocsInOrder, boolean topScorer, Bits acceptDocs) throws IOException {
if (!applyAcceptedDocs) {
acceptDocs = null;
}
DocIdSet childrenDocIdSet = childrenFilter.getDocIdSet(context, acceptDocs);
if (DocIdSets.isEmpty(childrenDocIdSet)) {
return null;
}
Bits childrenBits = DocIdSets.toSafeBits(context.reader(), childrenDocIdSet);
IdReaderTypeCache idReaderTypeCache = searchContext.idCache().reader(context.reader()).type(parentType);
if (idReaderTypeCache != null) {
DocIdSet docIdSet = new ChildrenDocSet(context.reader(), childrenBits, parents.v(), idReaderTypeCache);
return ConstantScorer.create(docIdSet, this, queryWeight);
} else {
return null;
DocIdSetIterator innerIterator = childrenDocIdSet.iterator();
if (innerIterator != null) {
ChildrenDocIdIterator childrenDocIdIterator = new ChildrenDocIdIterator(innerIterator, parents.v(), idReaderTypeCache);
return ConstantScorer.create(childrenDocIdIterator, this, queryWeight);
}
}
return null;
}
@Override
@ -169,19 +160,19 @@ public class ParentConstantScoreQuery extends Query {
return true;
}
private final class ChildrenDocSet extends MatchDocIdSet {
private final class ChildrenDocIdIterator extends FilteredDocIdSetIterator {
private final ObjectOpenHashSet<HashedBytesArray> parents;
private final IdReaderTypeCache idReaderTypeCache;
ChildrenDocSet(IndexReader reader, Bits acceptDocs, ObjectOpenHashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache) {
super(reader.maxDoc(), acceptDocs);
ChildrenDocIdIterator(DocIdSetIterator innerIterator, ObjectOpenHashSet<HashedBytesArray> parents, IdReaderTypeCache idReaderTypeCache) {
super(innerIterator);
this.parents = parents;
this.idReaderTypeCache = idReaderTypeCache;
}
@Override
protected boolean matchDoc(int doc) {
protected boolean match(int doc) {
return parents.contains(idReaderTypeCache.parentIdByDoc(doc));
}

View File

@ -18,16 +18,14 @@
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.IntOpenHashSet;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.*;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
@ -108,7 +106,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3))));
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
int shortCircuitParentDocSet = random().nextInt(5);
ChildrenConstantScoreQuery query = new ChildrenConstantScoreQuery(childQuery, "parent", "child", parentFilter, shortCircuitParentDocSet, true);
ChildrenConstantScoreQuery query = new ChildrenConstantScoreQuery(childQuery, "parent", "child", parentFilter, shortCircuitParentDocSet);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
searcher.search(query, collector);
@ -131,6 +129,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
childValues[i] = Integer.toString(i);
}
IntOpenHashSet initialDeletedParentIds = new IntOpenHashSet();
int childDocId = 0;
int numParentDocs = 1 + random().nextInt(TEST_NIGHTLY ? 20000 : 1000);
ObjectObjectOpenHashMap<String, NavigableSet<String>> childValueToParentIds = new ObjectObjectOpenHashMap<String, NavigableSet<String>>();
@ -141,6 +140,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
if (markParentAsDeleted) {
initialDeletedParentIds.add(parentDocId);
document.add(new StringField("delete", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
@ -182,7 +182,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
// Delete docs that are marked to be deleted.
indexWriter.deleteDocuments(new Term("delete", "me"));
indexWriter.close();
indexWriter.commit();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(indexReader);
Engine.Searcher engineSearcher = new Engine.SimpleSearcher(
@ -190,22 +190,57 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
Filter rawParentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
int max = numUniqueChildValues / 4;
for (int i = 0; i < max; i++) {
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
// that deletes are applied at the top level when filters are cached.
Filter parentFilter;
if (random().nextBoolean()) {
parentFilter = SearchContext.current().filterCache().cache(rawParentFilter);
} else {
parentFilter = rawParentFilter;
}
// Simulate a parent update
if (random().nextBoolean()) {
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
for (int j = 0; j < numberOfUpdates; j++) {
int parentId;
do {
parentId = random().nextInt(numParentDocs);
} while (initialDeletedParentIds.contains(parentId));
String parentUid = Uid.createUid("parent", Integer.toString(parentId));
indexWriter.deleteDocuments(new Term(UidFieldMapper.NAME, parentUid));
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, parentUid, Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
indexWriter.addDocument(document);
}
indexReader.close();
indexReader = DirectoryReader.open(indexWriter.w, true);
searcher = new IndexSearcher(indexReader);
engineSearcher = new Engine.SimpleSearcher(
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
}
String childValue = childValues[random().nextInt(numUniqueChildValues)];
TermQuery childQuery = new TermQuery(new Term("field1", childValue));
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
Query query;
boolean applyAcceptedDocs = random().nextBoolean();
if (applyAcceptedDocs) {
if (random().nextBoolean()) {
// Usage in HasChildQueryParser
query = new ChildrenConstantScoreQuery(childQuery, "parent", "child", parentFilter, shortCircuitParentDocSet, applyAcceptedDocs);
query = new ChildrenConstantScoreQuery(childQuery, "parent", "child", parentFilter, shortCircuitParentDocSet);
} else {
// Usage in HasChildFilterParser
query = new XConstantScoreQuery(
new CustomQueryWrappingFilter(
new ChildrenConstantScoreQuery(childQuery, "parent", "child", parentFilter, shortCircuitParentDocSet, applyAcceptedDocs)
new ChildrenConstantScoreQuery(childQuery, "parent", "child", parentFilter, shortCircuitParentDocSet)
)
);
}
@ -236,6 +271,7 @@ public class ChildrenConstantScoreQueryTests extends ElasticsearchLuceneTestCase
assertBitSet(actualResult, expectedResult, searcher);
}
indexWriter.close();
indexReader.close();
directory.close();
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntOpenHashSet;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -74,6 +75,8 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
childValues[i] = Integer.toString(i);
}
IntOpenHashSet filteredOrDeletedDocs = new IntOpenHashSet();
int childDocId = 0;
int numParentDocs = 1 + random().nextInt(TEST_NIGHTLY ? 20000 : 1000);
ObjectObjectOpenHashMap<String, NavigableMap<String, FloatArrayList>> childValueToParentIds = new ObjectObjectOpenHashMap<String, NavigableMap<String, FloatArrayList>>();
@ -85,9 +88,11 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
if (markParentAsDeleted) {
filteredOrDeletedDocs.add(parentDocId);
document.add(new StringField("delete", "me", Field.Store.NO));
}
if (filterMe) {
filteredOrDeletedDocs.add(parentDocId);
document.add(new StringField("filter", "me", Field.Store.NO));
}
indexWriter.addDocument(document);
@ -132,8 +137,8 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
// Delete docs that are marked to be deleted.
indexWriter.deleteDocuments(new Term("delete", "me"));
indexWriter.commit();
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(indexReader);
Engine.Searcher engineSearcher = new Engine.SimpleSearcher(
@ -161,6 +166,33 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
filterMe = rawFilterMe;
}
// Simulate a parent update
if (random().nextBoolean()) {
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
for (int j = 0; j < numberOfUpdates; j++) {
int parentId;
do {
parentId = random().nextInt(numParentDocs);
} while (filteredOrDeletedDocs.contains(parentId));
String parentUid = Uid.createUid("parent", Integer.toString(parentId));
indexWriter.deleteDocuments(new Term(UidFieldMapper.NAME, parentUid));
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, parentUid, Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
indexWriter.addDocument(document);
}
indexReader.close();
indexReader = DirectoryReader.open(indexWriter.w, true);
searcher = new IndexSearcher(indexReader);
engineSearcher = new Engine.SimpleSearcher(
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
}
String childValue = childValues[random().nextInt(numUniqueChildValues)];
Query childQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", childValue)));
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
@ -202,6 +234,7 @@ public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
assertTopDocs(actualTopDocsCollector.topDocs(), expectedTopDocsCollector.topDocs());
}
indexWriter.close();
indexReader.close();
directory.close();
}

View File

@ -18,12 +18,14 @@
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.IntIntOpenHashMap;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.queries.TermFilter;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
@ -77,6 +79,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
int childDocId = 0;
int numParentDocs = 1 + random().nextInt(TEST_NIGHTLY ? 10000 : 1000);
ObjectObjectOpenHashMap<String, NavigableSet<String>> parentValueToChildDocIds = new ObjectObjectOpenHashMap<String, NavigableSet<String>>();
IntIntOpenHashMap childIdToParentId = new IntIntOpenHashMap();
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
boolean markParentAsDeleted = rarely();
String parentValue = parentValues[random().nextInt(parentValues.length)];
@ -117,6 +120,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
parentValueToChildDocIds.put(parentValue, childIds = new TreeSet<String>());
}
if (!markParentAsDeleted) {
childIdToParentId.put(Integer.valueOf(child), parentDocId);
childIds.add(child);
}
}
@ -125,8 +129,8 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
// Delete docs that are marked to be deleted.
indexWriter.deleteDocuments(new Term("delete", "me"));
indexWriter.commit();
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(indexReader);
Engine.Searcher engineSearcher = new Engine.SimpleSearcher(
@ -134,21 +138,55 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
TermFilter childrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
TermFilter rawChildrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) {
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
// that deletes are applied at the top level when filters are cached.
Filter childrenFilter;
if (random().nextBoolean()) {
childrenFilter = SearchContext.current().filterCache().cache(rawChildrenFilter);
} else {
childrenFilter = rawChildrenFilter;
}
// Simulate a child update
if (random().nextBoolean()) {
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
int[] childIds = childIdToParentId.keys().toArray();
for (int j = 0; j < numberOfUpdates; j++) {
int childId = childIds[random().nextInt(childIds.length)];
String childUid = Uid.createUid("child", Integer.toString(childId));
indexWriter.deleteDocuments(new Term(UidFieldMapper.NAME, childUid));
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, childUid, Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
String parentUid = Uid.createUid("parent", Integer.toString(childIdToParentId.get(childId)));
document.add(new StringField(ParentFieldMapper.NAME, parentUid, Field.Store.NO));
indexWriter.addDocument(document);
}
indexReader.close();
indexReader = DirectoryReader.open(indexWriter.w, true);
searcher = new IndexSearcher(indexReader);
engineSearcher = new Engine.SimpleSearcher(
ParentConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
}
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
TermQuery parentQuery = new TermQuery(new Term("field1", parentValue));
Query query;
boolean applyAcceptedDocs = random().nextBoolean();
if (applyAcceptedDocs) {
if (random().nextBoolean()) {
// Usage in HasParentQueryParser
query = new ParentConstantScoreQuery(parentQuery, "parent", childrenFilter, applyAcceptedDocs);
query = new ParentConstantScoreQuery(parentQuery, "parent", childrenFilter);
} else {
// Usage in HasParentFilterParser
query = new XConstantScoreQuery(
new CustomQueryWrappingFilter(
new ParentConstantScoreQuery(parentQuery, "parent", childrenFilter, applyAcceptedDocs)
new ParentConstantScoreQuery(parentQuery, "parent", childrenFilter)
)
);
}
@ -179,6 +217,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
assertBitSet(actualResult, expectedResult, searcher);
}
indexWriter.close();
indexReader.close();
directory.close();
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntIntOpenHashMap;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
@ -75,6 +76,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
int childDocId = 0;
int numParentDocs = 1 + random().nextInt(TEST_NIGHTLY ? 20000 : 1000);
ObjectObjectOpenHashMap<String, NavigableMap<String, Float>> parentValueToChildIds = new ObjectObjectOpenHashMap<String, NavigableMap<String, Float>>();
IntIntOpenHashMap childIdToParentId = new IntIntOpenHashMap();
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
boolean markParentAsDeleted = rarely();
String parentValue = parentValues[random().nextInt(parentValues.length)];
@ -116,6 +118,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
if (!markParentAsDeleted) {
assert !childIdToScore.containsKey(child);
childIdToScore.put(child, 1f);
childIdToParentId.put(Integer.valueOf(child), parentDocId);
}
}
}
@ -123,8 +126,8 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
// Delete docs that are marked to be deleted.
indexWriter.deleteDocuments(new Term("delete", "me"));
indexWriter.commit();
indexWriter.close();
IndexReader indexReader = DirectoryReader.open(directory);
IndexSearcher searcher = new IndexSearcher(indexReader);
Engine.Searcher engineSearcher = new Engine.SimpleSearcher(
@ -132,12 +135,47 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
TermFilter childFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
TermFilter rawChildrenFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) {
// Randomly pick a cached version: there is specific logic inside ChildrenQuery that deals with the fact
// that deletes are applied at the top level when filters are cached.
Filter childrenFilter;
if (random().nextBoolean()) {
childrenFilter = SearchContext.current().filterCache().cache(rawChildrenFilter);
} else {
childrenFilter = rawChildrenFilter;
}
// Simulate a child update
if (random().nextBoolean()) {
int numberOfUpdates = 1 + random().nextInt(TEST_NIGHTLY ? 25 : 5);
int[] childIds = childIdToParentId.keys().toArray();
for (int j = 0; j < numberOfUpdates; j++) {
int childId = childIds[random().nextInt(childIds.length)];
String childUid = Uid.createUid("child", Integer.toString(childId));
indexWriter.deleteDocuments(new Term(UidFieldMapper.NAME, childUid));
Document document = new Document();
document.add(new StringField(UidFieldMapper.NAME, childUid, Field.Store.YES));
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
String parentUid = Uid.createUid("parent", Integer.toString(childIdToParentId.get(childId)));
document.add(new StringField(ParentFieldMapper.NAME, parentUid, Field.Store.NO));
indexWriter.addDocument(document);
}
indexReader.close();
indexReader = DirectoryReader.open(indexWriter.w, true);
searcher = new IndexSearcher(indexReader);
engineSearcher = new Engine.SimpleSearcher(
ParentConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
}
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];
Query parentQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", parentValue)));
Query query = new ParentQuery(parentQuery,"parent", childFilter);
Query query = new ParentQuery(parentQuery,"parent", childrenFilter);
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
int numHits = 1 + random().nextInt(25);
TopScoreDocCollector actualTopDocsCollector = TopScoreDocCollector.create(numHits, false);
@ -175,6 +213,7 @@ public class ParentQueryTests extends ElasticsearchLuceneTestCase {
assertTopDocs(actualTopDocsCollector.topDocs(), expectedTopDocsCollector.topDocs());
}
indexWriter.close();
indexReader.close();
directory.close();
}