parent/child: use the index searcher from engine searcher directly for JoinUtil based impl

and use the provided index searcher directly for the pre 2.0 parent/child imp.
This commit is contained in:
Martijn van Groningen 2015-08-14 10:57:06 +02:00
parent bd0753eaa3
commit 3b400aafd6
20 changed files with 87 additions and 264 deletions

View File

@ -197,7 +197,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
valid = false;
error = e.getMessage();
} finally {
SearchContext.current().close();
searchContext.close();
SearchContext.removeCurrent();
}

View File

@ -174,9 +174,8 @@ public class TransportExistsAction extends TransportBroadcastAction<ExistsReques
}
context.preProcess();
try {
Lucene.EarlyTerminatingCollector existsCollector = Lucene.createExistsCollector();
Lucene.exists(context.searcher(), context.query(), existsCollector);
return new ShardExistsResponse(request.shardId(), existsCollector.exists());
boolean exists = Lucene.exists(context, context.query(), Lucene.createExistsCollector());
return new ShardExistsResponse(request.shardId(), exists);
} catch (Exception e) {
throw new QueryPhaseExecutionException(context, "failed to execute exists", e);
}

View File

@ -20,46 +20,14 @@
package org.elasticsearch.common.lucene;
import com.google.common.collect.Iterables;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.codecs.DocValuesFormat;
import org.apache.lucene.codecs.PostingsFormat;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexFileNames;
import org.apache.lucene.index.IndexFormatTooNewException;
import org.apache.lucene.index.IndexFormatTooOldException;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopFieldDocs;
import org.apache.lucene.search.TwoPhaseIterator;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexInput;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.store.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Counter;
@ -75,14 +43,11 @@ import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.index.analysis.AnalyzerScope;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.*;
import static org.elasticsearch.common.lucene.search.NoopCollector.NOOP_COLLECTOR;
@ -379,6 +344,24 @@ public class Lucene {
return false;
}
/**
* Performs an exists (count > 0) query on the searcher from the <code>searchContext</code> for <code>query</code>
* using the given <code>collector</code>
*
* The <code>collector</code> can be instantiated using <code>Lucene.createExistsCollector()</code>
*/
public static boolean exists(SearchContext searchContext, Query query, EarlyTerminatingCollector collector) throws IOException {
collector.reset();
try {
searchContext.searcher().search(query, collector);
} catch (EarlyTerminationException e) {
// ignore, just early termination...
} finally {
searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION);
}
return collector.exists();
}
/**
* Creates an {@link org.elasticsearch.common.lucene.Lucene.EarlyTerminatingCollector}
* with a threshold of <code>1</code>

View File

@ -255,11 +255,9 @@ public class HasChildQueryParser implements QueryParser {
throw new IllegalArgumentException("Search context is required to be set");
}
IndexSearcher indexSearcher = searchContext.searcher();
String joinField = ParentFieldMapper.joinField(parentType);
IndexReader indexReader = searchContext.searcher().getIndexReader();
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
indexSearcher.setQueryCache(null);
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexReader);
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexSearcher.getIndexReader());
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren);
}

View File

@ -103,11 +103,8 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
return new BooleanQuery().createWeight(searcher, needsScores);
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity(true));
indexSearcher.setQueryCache(null);
ParentOrdCollector collector = new ParentOrdCollector(globalIfd, valueCount, parentType);
indexSearcher.search(childQuery, collector);
searcher.search(childQuery, collector);
final long remaining = collector.foundParents();
if (remaining == 0) {

View File

@ -152,9 +152,6 @@ public final class ChildrenQuery extends IndexCacheableQuery {
// No docs of the specified type exist on this shard
return new BooleanQuery().createWeight(searcher, needsScores);
}
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity(true));
indexSearcher.setQueryCache(null);
boolean abort = true;
long numFoundParents;
@ -193,7 +190,7 @@ public final class ChildrenQuery extends IndexCacheableQuery {
}
}
indexSearcher.search(childQuery, collector);
searcher.search(childQuery, collector);
numFoundParents = collector.foundParents();
if (numFoundParents == 0) {
return new BooleanQuery().createWeight(searcher, needsScores);

View File

@ -22,16 +22,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.FilteredDocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.*;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.IndexCacheableQuery;
@ -92,10 +83,7 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery {
}
ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType);
IndexSearcher indexSearcher = new IndexSearcher(searcher.getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity(true));
indexSearcher.setQueryCache(null);
indexSearcher.search(parentQuery, collector);
searcher.search(parentQuery, collector);
if (collector.parentCount() == 0) {
return new BooleanQuery().createWeight(searcher, needsScores);

View File

@ -129,10 +129,7 @@ public class ParentQuery extends IndexCacheableQuery {
try {
collector = new ParentOrdAndScoreCollector(sc, globalIfd, parentType);
IndexSearcher indexSearcher = new IndexSearcher(sc.searcher().getIndexReader());
indexSearcher.setSimilarity(searcher.getSimilarity(true));
indexSearcher.setQueryCache(null);
indexSearcher.search(parentQuery, collector);
searcher.search(parentQuery, collector);
if (collector.parentCount() == 0) {
return new BooleanQuery().createWeight(searcher, needsScores);
}

View File

@ -19,22 +19,14 @@
package org.elasticsearch.percolator;
import com.carrotsearch.hppc.FloatArrayList;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SimpleCollector;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.*;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.logging.ESLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search.aggregations;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
@ -130,6 +129,8 @@ public class AggregationPhase implements SearchPhase {
context.searcher().search(query, globalsCollector);
} catch (Exception e) {
throw new QueryPhaseExecutionException(context, "Failed to execute global aggregators", e);
} finally {
context.clearReleasables(SearchContext.Lifetime.COLLECTION);
}
}

View File

@ -20,7 +20,6 @@ package org.elasticsearch.search.fetch.explain;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
import org.elasticsearch.search.fetch.FetchSubPhase;
@ -68,6 +67,8 @@ public class ExplainFetchSubPhase implements FetchSubPhase {
hitContext.hit().explanation(explanation);
} catch (IOException e) {
throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().type() + "#" + hitContext.hit().id() + "]", e);
} finally {
context.clearReleasables(SearchContext.Lifetime.COLLECTION);
}
}
}

View File

@ -135,7 +135,11 @@ public final class InnerHitsContext {
} else {
topDocsCollector = TopScoreDocCollector.create(topN);
}
try {
context.searcher().search(q, topDocsCollector);
} finally {
clearReleasables(Lifetime.COLLECTION);
}
return topDocsCollector.topDocs(from(), size());
}
}
@ -306,7 +310,11 @@ public final class InnerHitsContext {
} else {
topDocsCollector = TopScoreDocCollector.create(topN);
}
try {
context.searcher().search(q, topDocsCollector);
} finally {
clearReleasables(Lifetime.COLLECTION);
}
return topDocsCollector.topDocs(from(), size());
}
}

View File

@ -19,18 +19,14 @@
package org.elasticsearch.search.internal;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext;
import org.apache.lucene.search.*;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.search.dfs.AggregatedDfs;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import java.io.IOException;
import java.util.List;
/**
* Context-aware extension of {@link IndexSearcher}.
@ -42,14 +38,11 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
* AssertingIndexSearcher. */
private final IndexSearcher in;
private final SearchContext searchContext;
private AggregatedDfs aggregatedDfs;
public ContextIndexSearcher(SearchContext searchContext, Engine.Searcher searcher) {
super(searcher.reader());
in = searcher.searcher();
this.searchContext = searchContext;
setSimilarity(searcher.searcher().getSimilarity(true));
setQueryCache(searchContext.indexShard().indexService().cache().query());
setQueryCachingPolicy(searchContext.indexShard().getQueryCachingPolicy());
@ -65,46 +58,23 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
@Override
public Query rewrite(Query original) throws IOException {
try {
return in.rewrite(original);
} catch (Throwable t) {
searchContext.clearReleasables(Lifetime.COLLECTION);
throw ExceptionsHelper.convertToElastic(t);
}
}
@Override
public Weight createNormalizedWeight(Query query, boolean needsScores) throws IOException {
// During tests we prefer to use the wrapped IndexSearcher, because then we use the AssertingIndexSearcher
// it is hacky, because if we perform a dfs search, we don't use the wrapped IndexSearcher...
try {
// if scores are needed and we have dfs data then use it
if (aggregatedDfs != null && needsScores) {
// if scores are needed and we have dfs data then use it
return super.createNormalizedWeight(query, needsScores);
}
return in.createNormalizedWeight(query, needsScores);
} catch (Throwable t) {
searchContext.clearReleasables(Lifetime.COLLECTION);
throw ExceptionsHelper.convertToElastic(t);
}
}
@Override
public Explanation explain(Query query, int doc) throws IOException {
try {
return in.explain(query, doc);
} finally {
searchContext.clearReleasables(Lifetime.COLLECTION);
}
}
@Override
protected void search(List<LeafReaderContext> leaves, Weight weight, Collector collector) throws IOException {
try {
super.search(leaves, weight, collector);
} finally {
searchContext.clearReleasables(Lifetime.COLLECTION);
}
}
@Override

View File

@ -21,7 +21,6 @@ package org.elasticsearch.search.internal;
import com.google.common.collect.Iterables;
import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
@ -46,7 +45,6 @@ import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.dfs.DfsSearchResult;

View File

@ -20,24 +20,8 @@
package org.elasticsearch.search.query;
import com.google.common.collect.ImmutableMap;
import org.apache.lucene.queries.MinDocQuery;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Collector;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.*;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.Lucene;
@ -173,8 +157,8 @@ public class QueryPhase implements SearchPhase {
// skip to the desired doc and stop collecting after ${size} matches
if (scrollContext.lastEmittedDoc != null) {
BooleanQuery bq = new BooleanQuery();
bq.add(query, Occur.MUST);
bq.add(new MinDocQuery(lastEmittedDoc.doc + 1), Occur.FILTER);
bq.add(query, BooleanClause.Occur.MUST);
bq.add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER);
query = bq;
}
searchContext.terminateAfter(numDocs);
@ -264,13 +248,15 @@ public class QueryPhase implements SearchPhase {
}
try {
searcher.search(query, collector);
searchContext.searcher().search(query, collector);
} catch (TimeLimitingCollector.TimeExceededException e) {
assert timeoutSet : "TimeExceededException thrown even though timeout wasn't set";
searchContext.queryResult().searchTimedOut(true);
} catch (Lucene.EarlyTerminationException e) {
assert terminateAfterSet : "EarlyTerminationException thrown even though terminateAfter wasn't set";
searchContext.queryResult().terminatedEarly(true);
} finally {
searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION);
}
if (terminateAfterSet && searchContext.queryResult().terminatedEarly() == null) {
searchContext.queryResult().terminatedEarly(false);

View File

@ -20,26 +20,12 @@ package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
@ -53,7 +39,6 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.TestSearchContext;
import org.junit.AfterClass;
@ -65,11 +50,7 @@ import java.util.NavigableSet;
import java.util.Random;
import java.util.TreeSet;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.hamcrest.Matchers.equalTo;
public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
@ -119,9 +100,9 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
IndexReader indexReader = DirectoryReader.open(indexWriter.w, false);
IndexSearcher searcher = new IndexSearcher(indexReader);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(
SearchContext.current(), new Engine.Searcher(ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher)
));
((TestSearchContext) SearchContext.current()).setSearcher(
new Engine.Searcher(ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher)
);
TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3))));
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))));
@ -214,7 +195,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
Engine.Searcher engineSearcher = new Engine.Searcher(
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
int max = numUniqueChildValues / 4;
for (int i = 0; i < max; i++) {
@ -243,7 +224,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
engineSearcher = new Engine.Searcher(
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
}
String childValue = childValues[random().nextInt(numUniqueChildValues)];

View File

@ -22,34 +22,13 @@ import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.randomizedtesting.generators.RandomInts;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
@ -65,7 +44,6 @@ import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionBuilder;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.TestSearchContext;
import org.junit.AfterClass;
@ -73,19 +51,9 @@ import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException;
import java.util.Locale;
import java.util.Map;
import java.util.NavigableMap;
import java.util.Random;
import java.util.TreeMap;
import java.util.*;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.QueryBuilders.typeQuery;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
@ -195,7 +163,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase {
Engine.Searcher engineSearcher = new Engine.Searcher(
ChildrenQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
int max = numUniqueChildValues / 4;
for (int i = 0; i < max; i++) {
@ -224,7 +192,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase {
engineSearcher = new Engine.Searcher(
ChildrenConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
}
String childValue = childValues[random().nextInt(numUniqueChildValues)];
@ -385,7 +353,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase {
// setup to read the parent/child map
Engine.Searcher engineSearcher = new Engine.Searcher(ChildrenQueryTests.class.getSimpleName(), searcher);
((TestSearchContext)context).setSearcher(new ContextIndexSearcher(context, engineSearcher));
((TestSearchContext)context).setSearcher(engineSearcher);
// child query that returns the score as the value of "childScore" for each child document, with the parent's score determined by the score type
QueryBuilder childQueryBuilder = functionScoreQuery(typeQuery("child")).add(new FieldValueFactorFunctionBuilder(CHILD_SCORE_NAME));

View File

@ -20,26 +20,12 @@ package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.IntIntHashMap;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
@ -52,7 +38,6 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.TestSearchContext;
import org.junit.AfterClass;
@ -64,11 +49,7 @@ import java.util.NavigableSet;
import java.util.Random;
import java.util.TreeSet;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery;
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.QueryBuilders.*;
/**
*/
@ -173,7 +154,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTestCase {
Engine.Searcher engineSearcher = new Engine.Searcher(
ParentConstantScoreQuery.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) {
@ -200,7 +181,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTestCase {
engineSearcher = new Engine.Searcher(
ParentConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
}
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];

View File

@ -21,30 +21,12 @@ package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntIntHashMap;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.index.*;
import org.apache.lucene.search.*;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
@ -57,7 +39,6 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.TestSearchContext;
import org.junit.AfterClass;
@ -70,11 +51,7 @@ import java.util.NavigableMap;
import java.util.Random;
import java.util.TreeMap;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
import static org.elasticsearch.index.query.QueryBuilders.hasParentQuery;
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.QueryBuilders.*;
public class ParentQueryTests extends AbstractChildTestCase {
@ -175,7 +152,7 @@ public class ParentQueryTests extends AbstractChildTestCase {
Engine.Searcher engineSearcher = new Engine.Searcher(
ParentQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
int max = numUniqueParentValues / 4;
for (int i = 0; i < max; i++) {
@ -202,7 +179,7 @@ public class ParentQueryTests extends AbstractChildTestCase {
engineSearcher = new Engine.Searcher(
ParentConstantScoreQueryTests.class.getSimpleName(), searcher
);
((TestSearchContext) SearchContext.current()).setSearcher(new ContextIndexSearcher(SearchContext.current(), engineSearcher));
((TestSearchContext) SearchContext.current()).setSearcher(engineSearcher);
}
String parentValue = parentValues[random().nextInt(numUniqueParentValues)];

View File

@ -36,6 +36,7 @@ import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
@ -284,8 +285,8 @@ public class TestSearchContext extends SearchContext {
return searcher;
}
public void setSearcher(ContextIndexSearcher searcher) {
this.searcher = searcher;
public void setSearcher(Engine.Searcher searcher) {
this.searcher = new ContextIndexSearcher(this, searcher);
}
@Override