Added random test for ParentQuery and ChildrenQuery.
This commit is contained in:
parent
eb55458e44
commit
688a6bd29b
|
@ -0,0 +1,189 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations under
|
||||
* the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.search.child;
|
||||
|
||||
import com.carrotsearch.hppc.FloatArrayList;
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.queries.TermFilter;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static org.elasticsearch.index.search.child.ChildrenConstantScoreQueryTests.assertBitSet;
|
||||
import static org.elasticsearch.index.search.child.ChildrenConstantScoreQueryTests.createSearchContext;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class ChildrenQueryTests extends ElasticsearchLuceneTestCase {
|
||||
|
||||
@BeforeClass
|
||||
public static void before() throws IOException {
|
||||
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void after() throws IOException {
|
||||
SearchContext.removeCurrent();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandom() throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
int numUniqueChildValues = 1 + random().nextInt(TEST_NIGHTLY ? 6000 : 600);
|
||||
String[] childValues = new String[numUniqueChildValues];
|
||||
for (int i = 0; i < numUniqueChildValues; i++) {
|
||||
childValues[i] = Integer.toString(i);
|
||||
}
|
||||
|
||||
int childDocId = 0;
|
||||
int numParentDocs = 1 + random().nextInt(TEST_NIGHTLY ? 20000 : 1000);
|
||||
ObjectObjectOpenHashMap<String, NavigableMap<String, FloatArrayList>> childValueToParentIds = new ObjectObjectOpenHashMap<String, NavigableMap<String, FloatArrayList>>();
|
||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||
boolean markParentAsDeleted = rarely();
|
||||
String parent = Integer.toString(parentDocId);
|
||||
Document document = new Document();
|
||||
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.YES));
|
||||
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
|
||||
if (markParentAsDeleted) {
|
||||
document.add(new StringField("delete", "me", Field.Store.NO));
|
||||
}
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
int numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
|
||||
for (int i = 0; i < numChildDocs; i++) {
|
||||
boolean markChildAsDeleted = rarely();
|
||||
String childValue = childValues[random().nextInt(childValues.length)];
|
||||
|
||||
document = new Document();
|
||||
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("child", Integer.toString(childDocId)), Field.Store.NO));
|
||||
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
|
||||
document.add(new StringField(ParentFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.NO));
|
||||
document.add(new StringField("field1", childValue, Field.Store.NO));
|
||||
if (markChildAsDeleted) {
|
||||
document.add(new StringField("delete", "me", Field.Store.NO));
|
||||
}
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
if (!markChildAsDeleted) {
|
||||
NavigableMap<String, FloatArrayList> parentIdToChildScores;
|
||||
if (childValueToParentIds.containsKey(childValue)) {
|
||||
parentIdToChildScores = childValueToParentIds.lget();
|
||||
} else {
|
||||
childValueToParentIds.put(childValue, parentIdToChildScores = new TreeMap<String, FloatArrayList>());
|
||||
}
|
||||
if (!markParentAsDeleted) {
|
||||
FloatArrayList childScores = parentIdToChildScores.get(parent);
|
||||
if (childScores == null) {
|
||||
parentIdToChildScores.put(parent, childScores = new FloatArrayList());
|
||||
}
|
||||
childScores.add(1f);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete docs that are marked to be deleted.
|
||||
indexWriter.deleteDocuments(new Term("delete", "me"));
|
||||
|
||||
indexWriter.close();
|
||||
IndexReader indexReader = DirectoryReader.open(directory);
|
||||
IndexSearcher searcher = new IndexSearcher(indexReader);
|
||||
Engine.Searcher engineSearcher = new Engine.SimpleSearcher(
|
||||
ChildrenQueryTests.class.getSimpleName(), searcher
|
||||
);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||
|
||||
TermFilter parentFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "parent"));
|
||||
for (String childValue : childValues) {
|
||||
Query childQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", childValue)));
|
||||
int shortCircuitParentDocSet = random().nextInt(numParentDocs);
|
||||
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
|
||||
Query query = new ChildrenQuery("parent", "child", parentFilter, childQuery, scoreType, shortCircuitParentDocSet);
|
||||
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
|
||||
int numHits = 1 + random().nextInt(25);
|
||||
TopScoreDocCollector actualTopDocsCollector = TopScoreDocCollector.create(numHits, false);
|
||||
searcher.search(query, MultiCollector.wrap(collector, actualTopDocsCollector));
|
||||
FixedBitSet actualResult = collector.getResult();
|
||||
|
||||
FixedBitSet expectedResult = new FixedBitSet(indexReader.maxDoc());
|
||||
MockScorer mockScorer = new MockScorer(scoreType);
|
||||
TopScoreDocCollector expectedTopDocsCollector = TopScoreDocCollector.create(numHits, false);
|
||||
expectedTopDocsCollector.setScorer(mockScorer);
|
||||
if (childValueToParentIds.containsKey(childValue)) {
|
||||
AtomicReader slowAtomicReader = SlowCompositeReaderWrapper.wrap(indexReader);
|
||||
Terms terms = slowAtomicReader.terms(UidFieldMapper.NAME);
|
||||
if (terms != null) {
|
||||
NavigableMap<String, FloatArrayList> parentIdToChildScores = childValueToParentIds.lget();
|
||||
TermsEnum termsEnum = terms.iterator(null);
|
||||
DocsEnum docsEnum = null;
|
||||
for (Map.Entry<String, FloatArrayList> entry : parentIdToChildScores.entrySet()) {
|
||||
TermsEnum.SeekStatus seekStatus = termsEnum.seekCeil(Uid.createUidAsBytes("parent", entry.getKey()));
|
||||
if (seekStatus == TermsEnum.SeekStatus.FOUND) {
|
||||
docsEnum = termsEnum.docs(slowAtomicReader.getLiveDocs(), docsEnum, DocsEnum.FLAG_NONE);
|
||||
expectedResult.set(docsEnum.nextDoc());
|
||||
mockScorer.scores = entry.getValue();
|
||||
expectedTopDocsCollector.collect(docsEnum.docID());
|
||||
} else if (seekStatus == TermsEnum.SeekStatus.END) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assertBitSet(actualResult, expectedResult, searcher);
|
||||
assertTopDocs(actualTopDocsCollector.topDocs(), expectedTopDocsCollector.topDocs());
|
||||
}
|
||||
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
||||
static void assertTopDocs(TopDocs actual, TopDocs expected) {
|
||||
assertThat("actual.totalHits != expected.totalHits", actual.totalHits, equalTo(expected.totalHits));
|
||||
assertThat("actual.getMaxScore() != expected.getMaxScore()", actual.getMaxScore(), equalTo(expected.getMaxScore()));
|
||||
assertThat("actual.scoreDocs.length != expected.scoreDocs.length", actual.scoreDocs.length, equalTo(actual.scoreDocs.length));
|
||||
for (int i = 0; i < actual.scoreDocs.length; i++) {
|
||||
ScoreDoc actualHit = actual.scoreDocs[i];
|
||||
ScoreDoc expectedHit = expected.scoreDocs[i];
|
||||
assertThat("actualHit.doc != expectedHit.doc", actualHit.doc, equalTo(expectedHit.doc));
|
||||
assertThat("actualHit.score != expectedHit.score", actualHit.score, equalTo(expectedHit.score));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations under
|
||||
* the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.search.child;
|
||||
|
||||
import com.carrotsearch.hppc.FloatArrayList;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
class MockScorer extends Scorer {
|
||||
|
||||
final ScoreType scoreType;
|
||||
FloatArrayList scores;
|
||||
|
||||
MockScorer(ScoreType scoreType) {
|
||||
super(null);
|
||||
this.scoreType = scoreType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float score() throws IOException {
|
||||
float aggregateScore = 0;
|
||||
for (int i = 0; i < scores.elementsCount; i++) {
|
||||
float score = scores.buffer[i];
|
||||
switch (scoreType) {
|
||||
case MAX:
|
||||
if (aggregateScore < score) {
|
||||
aggregateScore = score;
|
||||
}
|
||||
break;
|
||||
case SUM:
|
||||
case AVG:
|
||||
aggregateScore += score;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (scoreType == ScoreType.AVG) {
|
||||
aggregateScore /= scores.elementsCount;
|
||||
}
|
||||
|
||||
return aggregateScore;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int freq() throws IOException {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int docID() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int nextDoc() throws IOException {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int advance(int target) throws IOException {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long cost() {
|
||||
return 0;
|
||||
}
|
||||
}
|
|
@ -67,18 +67,15 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
|
|||
public void testRandom() throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
|
||||
|
||||
int numUniqueChildValues = 1 + random().nextInt(TEST_NIGHTLY ? 20000 : 1000);
|
||||
String[] parentValues = new String[numUniqueChildValues];
|
||||
for (int i = 0; i < numUniqueChildValues; i++) {
|
||||
int numUniqueParentValues = 1 + random().nextInt(TEST_NIGHTLY ? 20000 : 1000);
|
||||
String[] parentValues = new String[numUniqueParentValues];
|
||||
for (int i = 0; i < numUniqueParentValues; i++) {
|
||||
parentValues[i] = Integer.toString(i);
|
||||
}
|
||||
|
||||
ObjectObjectOpenHashMap<String, NavigableSet<String>> parentValueToChildDocIds = new ObjectObjectOpenHashMap<String, NavigableSet<String>>();
|
||||
|
||||
int childDocId = 0;
|
||||
int numParentDocs = 1 + random().nextInt(TEST_NIGHTLY ? 10000 : 1000);
|
||||
ObjectObjectOpenHashMap<String, NavigableSet<String>> parentValueToChildDocIds = new ObjectObjectOpenHashMap<String, NavigableSet<String>>();
|
||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||
boolean markParentAsDeleted = rarely();
|
||||
String parentValue = parentValues[random().nextInt(parentValues.length)];
|
||||
|
@ -127,7 +124,7 @@ public class ParentConstantScoreQueryTests extends ElasticsearchLuceneTestCase {
|
|||
IndexReader indexReader = DirectoryReader.open(directory);
|
||||
IndexSearcher searcher = new IndexSearcher(indexReader);
|
||||
Engine.Searcher engineSearcher = new Engine.SimpleSearcher(
|
||||
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
|
||||
ParentConstantScoreQuery.class.getSimpleName(), searcher
|
||||
);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||
|
||||
|
|
|
@ -0,0 +1,174 @@
|
|||
/*
|
||||
* Licensed to ElasticSearch and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. ElasticSearch licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations under
|
||||
* the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.search.child;
|
||||
|
||||
import com.carrotsearch.hppc.FloatArrayList;
|
||||
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.*;
|
||||
import org.apache.lucene.queries.TermFilter;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.FixedBitSet;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.search.internal.ContextIndexSearcher;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.util.NavigableMap;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static org.elasticsearch.index.search.child.ChildrenConstantScoreQueryTests.assertBitSet;
|
||||
import static org.elasticsearch.index.search.child.ChildrenConstantScoreQueryTests.createSearchContext;
|
||||
import static org.elasticsearch.index.search.child.ChildrenQueryTests.assertTopDocs;
|
||||
|
||||
public class ParentQueryTests extends ElasticsearchLuceneTestCase {
|
||||
|
||||
@BeforeClass
|
||||
public static void before() throws IOException {
|
||||
SearchContext.setCurrent(createSearchContext("test", "parent", "child"));
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void after() throws IOException {
|
||||
SearchContext.removeCurrent();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRandom() throws Exception {
|
||||
Directory directory = newDirectory();
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
int numUniqueParentValues = 1 + random().nextInt(TEST_NIGHTLY ? 6000 : 600);
|
||||
String[] parentValues = new String[numUniqueParentValues];
|
||||
for (int i = 0; i < numUniqueParentValues; i++) {
|
||||
parentValues[i] = Integer.toString(i);
|
||||
}
|
||||
|
||||
int childDocId = 0;
|
||||
int numParentDocs = 1 + random().nextInt(TEST_NIGHTLY ? 20000 : 1000);
|
||||
ObjectObjectOpenHashMap<String, NavigableMap<String, Float>> parentValueToChildIds = new ObjectObjectOpenHashMap<String, NavigableMap<String, Float>>();
|
||||
for (int parentDocId = 0; parentDocId < numParentDocs; parentDocId++) {
|
||||
boolean markParentAsDeleted = rarely();
|
||||
String parentValue = parentValues[random().nextInt(parentValues.length)];
|
||||
String parent = Integer.toString(parentDocId);
|
||||
Document document = new Document();
|
||||
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.NO));
|
||||
document.add(new StringField(TypeFieldMapper.NAME, "parent", Field.Store.NO));
|
||||
document.add(new StringField("field1", parentValue, Field.Store.NO));
|
||||
if (markParentAsDeleted) {
|
||||
document.add(new StringField("delete", "me", Field.Store.NO));
|
||||
}
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
int numChildDocs = random().nextInt(TEST_NIGHTLY ? 100 : 25);
|
||||
for (int i = 0; i < numChildDocs; i++) {
|
||||
String child = Integer.toString(childDocId++);
|
||||
boolean markChildAsDeleted = rarely();
|
||||
document = new Document();
|
||||
document.add(new StringField(UidFieldMapper.NAME, Uid.createUid("child", child), Field.Store.YES));
|
||||
document.add(new StringField(TypeFieldMapper.NAME, "child", Field.Store.NO));
|
||||
document.add(new StringField(ParentFieldMapper.NAME, Uid.createUid("parent", parent), Field.Store.NO));
|
||||
if (markChildAsDeleted) {
|
||||
document.add(new StringField("delete", "me", Field.Store.NO));
|
||||
}
|
||||
indexWriter.addDocument(document);
|
||||
|
||||
if (!markChildAsDeleted) {
|
||||
NavigableMap<String, Float> childIdToScore;
|
||||
if (parentValueToChildIds.containsKey(parentValue)) {
|
||||
childIdToScore = parentValueToChildIds.lget();
|
||||
} else {
|
||||
parentValueToChildIds.put(parentValue, childIdToScore = new TreeMap<String, Float>());
|
||||
}
|
||||
if (!markParentAsDeleted) {
|
||||
assert !childIdToScore.containsKey(child);
|
||||
childIdToScore.put(child, 1f);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Delete docs that are marked to be deleted.
|
||||
indexWriter.deleteDocuments(new Term("delete", "me"));
|
||||
|
||||
indexWriter.close();
|
||||
IndexReader indexReader = DirectoryReader.open(directory);
|
||||
IndexSearcher searcher = new IndexSearcher(indexReader);
|
||||
Engine.Searcher engineSearcher = new Engine.SimpleSearcher(
|
||||
ParentQueryTests.class.getSimpleName(), searcher
|
||||
);
|
||||
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
|
||||
|
||||
TermFilter childFilter = new TermFilter(new Term(TypeFieldMapper.NAME, "child"));
|
||||
for (String parentValue : parentValues) {
|
||||
Query parentQuery = new ConstantScoreQuery(new TermQuery(new Term("field1", parentValue)));
|
||||
Query query = new ParentQuery(parentQuery,"parent", childFilter);
|
||||
BitSetCollector collector = new BitSetCollector(indexReader.maxDoc());
|
||||
int numHits = 1 + random().nextInt(25);
|
||||
TopScoreDocCollector actualTopDocsCollector = TopScoreDocCollector.create(numHits, false);
|
||||
searcher.search(query, MultiCollector.wrap(collector, actualTopDocsCollector));
|
||||
FixedBitSet actualResult = collector.getResult();
|
||||
|
||||
FixedBitSet expectedResult = new FixedBitSet(indexReader.maxDoc());
|
||||
MockScorer mockScorer = new MockScorer(ScoreType.MAX); // just save one score per parent...
|
||||
mockScorer.scores = new FloatArrayList();
|
||||
TopScoreDocCollector expectedTopDocsCollector = TopScoreDocCollector.create(numHits, false);
|
||||
expectedTopDocsCollector.setScorer(mockScorer);
|
||||
if (parentValueToChildIds.containsKey(parentValue)) {
|
||||
AtomicReader slowAtomicReader = SlowCompositeReaderWrapper.wrap(indexReader);
|
||||
Terms terms = slowAtomicReader.terms(UidFieldMapper.NAME);
|
||||
if (terms != null) {
|
||||
NavigableMap<String, Float> childIdsAndScore = parentValueToChildIds.lget();
|
||||
TermsEnum termsEnum = terms.iterator(null);
|
||||
DocsEnum docsEnum = null;
|
||||
for (Map.Entry<String, Float> entry : childIdsAndScore.entrySet()) {
|
||||
TermsEnum.SeekStatus seekStatus = termsEnum.seekCeil(Uid.createUidAsBytes("child", entry.getKey()));
|
||||
if (seekStatus == TermsEnum.SeekStatus.FOUND) {
|
||||
docsEnum = termsEnum.docs(slowAtomicReader.getLiveDocs(), docsEnum, DocsEnum.FLAG_NONE);
|
||||
expectedResult.set(docsEnum.nextDoc());
|
||||
mockScorer.scores.add(entry.getValue());
|
||||
expectedTopDocsCollector.collect(docsEnum.docID());
|
||||
mockScorer.scores.clear();
|
||||
} else if (seekStatus == TermsEnum.SeekStatus.END) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assertBitSet(actualResult, expectedResult, searcher);
|
||||
assertTopDocs(actualTopDocsCollector.topDocs(), expectedTopDocsCollector.topDocs());
|
||||
}
|
||||
|
||||
indexReader.close();
|
||||
directory.close();
|
||||
}
|
||||
|
||||
}
|
|
@ -67,6 +67,7 @@ class TestSearchContext extends SearchContext {
|
|||
final IndexService indexService;
|
||||
|
||||
ContextIndexSearcher searcher;
|
||||
int size;
|
||||
|
||||
TestSearchContext(CacheRecycler cacheRecycler, IdCache idCache, IndexService indexService) {
|
||||
this.cacheRecycler = cacheRecycler;
|
||||
|
@ -390,9 +391,14 @@ class TestSearchContext extends SearchContext {
|
|||
|
||||
@Override
|
||||
public int size() {
|
||||
return 0;
|
||||
return size;
|
||||
}
|
||||
|
||||
public void setSize(int size) {
|
||||
this.size = size;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public SearchContext size(int size) {
|
||||
return null;
|
||||
|
|
Loading…
Reference in New Issue