Clarify SourceLookup sharing across fetch subphases. (#60484)

The `SourceLookup` class provides access to the _source for a particular
document, specified through `SourceLookup#setSegmentAndDocument`. Previously
the search context contained a single `SourceLookup` that was shared between
different fetch subphases. It was hard to reason about its state: is
`SourceLookup` set to the expected document? Is the _source already loaded and
available?

Instead of using a global source lookup, the fetch hit context now provides
access to a lookup that is set to load from the hit document.

This refactor closes #31000, since the same `SourceLookup` is no longer shared
between the 'fetch _source phase' and script execution.
This commit is contained in:
Julie Tibshirani 2020-07-30 13:22:31 -07:00 committed by GitHub
parent da69644498
commit dfd7f226f0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 114 additions and 115 deletions

View File

@ -98,12 +98,11 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
SearchHighlightContext highlight = new SearchHighlightContext(context.highlight().fields(), true);
QueryShardContext shardContext = new QueryShardContext(context.getQueryShardContext());
shardContext.freezeContext();
shardContext.lookup().source().setSegmentAndDocument(percolatorLeafReaderContext, slot);
shardContext.lookup().source().setSource(document);
hitContext.reset(
new SearchHit(slot, "unknown", new Text(hit.getType()), Collections.emptyMap(), Collections.emptyMap()),
percolatorLeafReaderContext, slot, percolatorIndexSearcher
);
hitContext.sourceLookup().setSource(document);
hitContext.cache().clear();
highlightPhase.hitExecute(context.shardTarget(), shardContext, query, highlight, hitContext);
for (Map.Entry<String, HighlightField> entry : hitContext.hit().getHighlightFields().entrySet()) {

View File

@ -26,7 +26,6 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedHighlighterAnalyzer;
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext.Field;
@ -47,10 +46,9 @@ public class AnnotatedTextHighlighter extends UnifiedHighlighter {
@Override
protected List<Object> loadFieldValues(MappedFieldType fieldType,
Field field,
QueryShardContext context,
HitContext hitContext,
boolean forceSource) throws IOException {
List<Object> fieldValues = super.loadFieldValues(fieldType, field, context, hitContext, forceSource);
List<Object> fieldValues = super.loadFieldValues(fieldType, field, hitContext, forceSource);
String[] fieldValuesAsString = fieldValues.toArray(new String[fieldValues.size()]);
AnnotatedText[] annotations = new AnnotatedText[fieldValuesAsString.length];

View File

@ -96,7 +96,7 @@ public abstract class InnerHitContextBuilder {
for (SearchSourceBuilder.ScriptField field : innerHitBuilder.getScriptFields()) {
QueryShardContext innerContext = innerHitsContext.getQueryShardContext();
FieldScript.Factory factory = innerContext.compile(field.script(), FieldScript.CONTEXT);
FieldScript.LeafFactory fieldScript = factory.newFactory(field.script().getParams(), innerHitsContext.lookup());
FieldScript.LeafFactory fieldScript = factory.newFactory(field.script().getParams(), innerContext.lookup());
innerHitsContext.scriptFields().add(new org.elasticsearch.search.fetch.subphase.ScriptFieldsContext.ScriptField(
field.fieldName(), fieldScript, field.ignoreFailure()));
}

View File

@ -96,6 +96,7 @@ import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SearchContext.Lifetime;
import org.elasticsearch.search.internal.SearchContextId;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QueryPhase;
import org.elasticsearch.search.query.QuerySearchRequest;
@ -943,7 +944,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
}
for (org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField field : source.scriptFields()) {
FieldScript.Factory factory = scriptService.compile(field.script(), FieldScript.CONTEXT);
FieldScript.LeafFactory searchScript = factory.newFactory(field.script().getParams(), context.lookup());
SearchLookup lookup = context.getQueryShardContext().lookup();
FieldScript.LeafFactory searchScript = factory.newFactory(field.script().getParams(), lookup);
context.scriptFields().add(new ScriptField(field.fieldName(), searchScript, field.ignoreFailure()));
}
}

View File

@ -164,19 +164,18 @@ public class FetchPhase implements SearchPhase {
LeafReaderContext subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
int subDocId = docId - subReaderContext.docBase;
final SearchHit searchHit;
int rootDocId = findRootDocumentIfNested(context, subReaderContext, subDocId);
if (rootDocId != -1) {
searchHit = createNestedSearchHit(context, docId, subDocId, rootDocId,
prepareNestedHitContext(hitContext, context, docId, subDocId, rootDocId,
storedToRequestedFields, subReaderContext);
} else {
searchHit = createSearchHit(context, fieldsVisitor, docId, subDocId,
prepareHitContext(hitContext, context, fieldsVisitor, docId, subDocId,
storedToRequestedFields, subReaderContext);
}
SearchHit searchHit = hitContext.hit();
sortedHits[index] = searchHit;
hits[docs[index].index] = searchHit;
hitContext.reset(searchHit, subReaderContext, subDocId, context.searcher());
for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
fetchSubPhase.hitExecute(context, hitContext);
}
@ -226,43 +225,64 @@ public class FetchPhase implements SearchPhase {
return -1;
}
private SearchHit createSearchHit(SearchContext context,
FieldsVisitor fieldsVisitor,
int docId,
int subDocId,
Map<String, Set<String>> storedToRequestedFields,
LeafReaderContext subReaderContext) {
/**
* Resets the provided {@link FetchSubPhase.HitContext} with information on the current
* document. This includes the following:
* - Adding an initial {@link SearchHit} instance.
* - Loading the document source and setting it on {@link SourceLookup}. This allows
* fetch subphases that use the hit context to access the preloaded source.
*/
private void prepareHitContext(FetchSubPhase.HitContext hitContext,
SearchContext context,
FieldsVisitor fieldsVisitor,
int docId,
int subDocId,
Map<String, Set<String>> storedToRequestedFields,
LeafReaderContext subReaderContext) {
DocumentMapper documentMapper = context.mapperService().documentMapper();
Text typeText = documentMapper.typeText();
if (fieldsVisitor == null) {
return new SearchHit(docId, null, typeText, null, null);
}
loadStoredFields(context.shardTarget(), subReaderContext, fieldsVisitor, subDocId);
fieldsVisitor.postProcess(context.mapperService());
SearchHit searchHit;
if (fieldsVisitor.fields().isEmpty() == false) {
Map<String, DocumentField> docFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
fillDocAndMetaFields(context, fieldsVisitor, storedToRequestedFields, docFields, metaFields);
searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, docFields, metaFields);
SearchHit hit = new SearchHit(docId, null, typeText, null, null);
hitContext.reset(hit, subReaderContext, subDocId, context.searcher());
} else {
searchHit = new SearchHit(docId, fieldsVisitor.uid().id(), typeText, emptyMap(), emptyMap());
SearchHit hit;
loadStoredFields(context.shardTarget(), subReaderContext, fieldsVisitor, subDocId);
fieldsVisitor.postProcess(context.mapperService());
Uid uid = fieldsVisitor.uid();
if (fieldsVisitor.fields().isEmpty() == false) {
Map<String, DocumentField> docFields = new HashMap<>();
Map<String, DocumentField> metaFields = new HashMap<>();
fillDocAndMetaFields(context, fieldsVisitor, storedToRequestedFields, docFields, metaFields);
hit = new SearchHit(docId, uid.id(), typeText, docFields, metaFields);
} else {
hit = new SearchHit(docId, uid.id(), typeText, emptyMap(), emptyMap());
}
hitContext.reset(hit, subReaderContext, subDocId, context.searcher());
if (fieldsVisitor.source() != null) {
hitContext.sourceLookup().setSource(fieldsVisitor.source());
}
}
// Set _source if requested.
SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument(subReaderContext, subDocId);
if (fieldsVisitor.source() != null) {
sourceLookup.setSource(fieldsVisitor.source());
}
return searchHit;
}
private SearchHit createNestedSearchHit(SearchContext context,
int nestedTopDocId,
int nestedSubDocId,
int rootSubDocId,
Map<String, Set<String>> storedToRequestedFields,
LeafReaderContext subReaderContext) throws IOException {
/**
/**
* Resets the provided {@link FetchSubPhase.HitContext} with information on the current
* nested document. This includes the following:
* - Adding an initial {@link SearchHit} instance.
* - Loading the document source, filtering it based on the nested document ID, then
* setting it on {@link SourceLookup}. This allows fetch subphases that use the hit
* context to access the preloaded source.
*/
@SuppressWarnings("unchecked")
private void prepareNestedHitContext(FetchSubPhase.HitContext hitContext,
SearchContext context,
int nestedTopDocId,
int nestedSubDocId,
int rootSubDocId,
Map<String, Set<String>> storedToRequestedFields,
LeafReaderContext subReaderContext) throws IOException {
// Also if highlighting is requested on nested documents we need to fetch the _source from the root document,
// otherwise highlighting will attempt to fetch the _source from the nested doc, which will fail,
// because the entire _source is only stored with the root document.
@ -295,16 +315,19 @@ public class FetchPhase implements SearchPhase {
}
DocumentMapper documentMapper = context.mapperService().documentMapper();
SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument(subReaderContext, nestedSubDocId);
Text typeText = documentMapper.typeText();
ObjectMapper nestedObjectMapper = documentMapper.findNestedObjectMapper(nestedSubDocId, context, subReaderContext);
assert nestedObjectMapper != null;
SearchHit.NestedIdentity nestedIdentity =
getInternalNestedIdentity(context, nestedSubDocId, subReaderContext, context.mapperService(), nestedObjectMapper);
SearchHit hit = new SearchHit(nestedTopDocId, uid.id(), typeText, nestedIdentity, docFields, metaFields);
hitContext.reset(hit, subReaderContext, nestedSubDocId, context.searcher());
if (source != null) {
Tuple<XContentType, Map<String, Object>> tuple = XContentHelper.convertToMap(source, true);
XContentType contentType = tuple.v1();
Map<String, Object> sourceAsMap = tuple.v2();
// Isolate the nested json array object that matches with nested hit and wrap it back into the same json
@ -347,11 +370,11 @@ public class FetchPhase implements SearchPhase {
current = next;
}
}
context.lookup().source().setSource(nestedSourceAsMap);
XContentType contentType = tuple.v1();
context.lookup().source().setSourceContentType(contentType);
hitContext.sourceLookup().setSource(nestedSourceAsMap);
hitContext.sourceLookup().setSourceContentType(contentType);
}
return new SearchHit(nestedTopDocId, uid.id(), documentMapper.typeText(), nestedIdentity, docFields, metaFields);
}
private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId,

View File

@ -24,6 +24,7 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.IndexSearcher;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
import java.util.HashMap;
@ -39,6 +40,7 @@ public interface FetchSubPhase {
private IndexSearcher searcher;
private LeafReaderContext readerContext;
private int docId;
private final SourceLookup sourceLookup = new SourceLookup();
private Map<String, Object> cache;
public void reset(SearchHit hit, LeafReaderContext context, int docId, IndexSearcher searcher) {
@ -46,6 +48,7 @@ public interface FetchSubPhase {
this.readerContext = context;
this.docId = docId;
this.searcher = searcher;
this.sourceLookup.setSegmentAndDocument(context, docId);
}
public SearchHit hit() {
@ -64,6 +67,17 @@ public interface FetchSubPhase {
return docId;
}
/**
* This lookup provides access to the source for the given hit document. Note
* that it should always be set to the correct doc ID and {@link LeafReaderContext}.
*
* In most cases, the hit document's source is loaded eagerly at the start of the
* {@link FetchPhase}. This lookup will contain the preloaded source.
*/
public SourceLookup sourceLookup() {
return sourceLookup;
}
public IndexReader topLevelReader() {
return searcher.getIndexReader();
}

View File

@ -44,7 +44,7 @@ public final class FetchFieldsPhase implements FetchSubPhase {
}
SearchHit hit = hitContext.hit();
SourceLookup sourceLookup = context.lookup().source();
SourceLookup sourceLookup = hitContext.sourceLookup();
FieldValueRetriever fieldValueRetriever = fetchFieldsContext.fieldValueRetriever();
Set<String> ignoredFields = getIgnoredFields(hit);

View File

@ -39,7 +39,7 @@ public final class FetchSourcePhase implements FetchSubPhase {
return;
}
final boolean nestedHit = hitContext.hit().getNestedIdentity() != null;
SourceLookup source = context.lookup().source();
SourceLookup source = hitContext.sourceLookup();
FetchSourceContext fetchSourceContext = context.fetchSourceContext();
assert fetchSourceContext.fetchSource();

View File

@ -66,10 +66,7 @@ public final class InnerHitsPhase implements FetchSubPhase {
}
innerHits.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
innerHits.setUid(new Uid(hit.getType(), hit.getId()));
innerHits.lookup().source().setSource(context.lookup().source().internalSourceRef());
if (context.lookup().source().source() != null) {
innerHits.lookup().source().setSource(context.lookup().source().source());
}
fetchPhase.execute(innerHits);
FetchSearchResult fetchResult = innerHits.fetchResult();
SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits();

View File

@ -39,7 +39,6 @@ import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext.Field;
@ -71,7 +70,6 @@ public class FastVectorHighlighter implements Highlighter {
@Override
public HighlightField highlight(FieldHighlightContext fieldContext) {
SearchHighlightContext.Field field = fieldContext.field;
QueryShardContext context = fieldContext.context;
FetchSubPhase.HitContext hitContext = fieldContext.hitContext;
MappedFieldType fieldType = fieldContext.fieldType;
boolean forceSource = fieldContext.forceSource;
@ -104,7 +102,7 @@ public class FastVectorHighlighter implements Highlighter {
fragmentsBuilder = new SimpleFragmentsBuilder(fieldType, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), boundaryScanner);
} else {
fragmentsBuilder = new SourceSimpleFragmentsBuilder(fieldType, context,
fragmentsBuilder = new SourceSimpleFragmentsBuilder(fieldType, hitContext.sourceLookup(),
field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
}
} else {
@ -115,7 +113,7 @@ public class FastVectorHighlighter implements Highlighter {
fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(),
field.fieldOptions().postTags(), boundaryScanner);
} else {
fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(fieldType, context,
fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(fieldType, hitContext.sourceLookup(),
field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
}
} else {
@ -124,8 +122,8 @@ public class FastVectorHighlighter implements Highlighter {
field.fieldOptions().postTags(), boundaryScanner);
} else {
fragmentsBuilder =
new SourceSimpleFragmentsBuilder(fieldType, context, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), boundaryScanner);
new SourceSimpleFragmentsBuilder(fieldType, hitContext.sourceLookup(),
field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
}
}
}

View File

@ -24,7 +24,6 @@ import org.apache.lucene.search.highlight.SimpleHTMLEncoder;
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.lookup.SourceLookup;
@ -48,7 +47,6 @@ public final class HighlightUtils {
* Load field values for highlighting.
*/
public static List<Object> loadFieldValues(MappedFieldType fieldType,
QueryShardContext context,
FetchSubPhase.HitContext hitContext,
boolean forceSource) throws IOException {
//percolator needs to always load from source, thus it sets the global force source to true
@ -63,8 +61,7 @@ public final class HighlightUtils {
textsToHighlight = Collections.emptyList();
}
} else {
SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId());
SourceLookup sourceLookup = hitContext.sourceLookup();
textsToHighlight = sourceLookup.extractRawValues(fieldType.name());
}
assert textsToHighlight != null;

View File

@ -112,7 +112,7 @@ public class PlainHighlighter implements Highlighter {
final int maxAnalyzedOffset = context.getIndexSettings().getHighlightMaxAnalyzedOffset();
try {
textsToHighlight = HighlightUtils.loadFieldValues(fieldType, context, hitContext, fieldContext.forceSource);
textsToHighlight = HighlightUtils.loadFieldValues(fieldType, hitContext, fieldContext.forceSource);
for (Object textToHighlight : textsToHighlight) {
String text = convertFieldValue(fieldType, textToHighlight);

View File

@ -21,13 +21,11 @@ package org.elasticsearch.search.fetch.subphase.highlight;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.highlight.Encoder;
import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo;
import org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
@ -36,25 +34,21 @@ import java.util.List;
public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder {
private final MappedFieldType fieldType;
private final QueryShardContext context;
private final SourceLookup sourceLookup;
public SourceScoreOrderFragmentsBuilder(MappedFieldType fieldType,
QueryShardContext context,
SourceLookup sourceLookup,
String[] preTags,
String[] postTags,
BoundaryScanner boundaryScanner) {
super(preTags, postTags, boundaryScanner);
this.fieldType = fieldType;
this.context = context;
this.sourceLookup = sourceLookup;
}
@Override
protected Field[] getFields(IndexReader reader, int docId, String fieldName) throws IOException {
// we know its low level reader, and matching docId, since that's how we call the highlighter with
SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId);
List<Object> values = sourceLookup.extractRawValues(fieldType.name());
Field[] fields = new Field[values.size()];
for (int i = 0; i < values.size(); i++) {

View File

@ -21,10 +21,8 @@ package org.elasticsearch.search.fetch.subphase.highlight;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
@ -32,15 +30,15 @@ import java.util.List;
public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder {
private final QueryShardContext context;
private final SourceLookup sourceLookup;
public SourceSimpleFragmentsBuilder(MappedFieldType fieldType,
QueryShardContext context,
SourceLookup sourceLookup,
String[] preTags,
String[] postTags,
BoundaryScanner boundaryScanner) {
super(fieldType, preTags, postTags, boundaryScanner);
this.context = context;
this.sourceLookup = sourceLookup;
}
public static final Field[] EMPTY_FIELDS = new Field[0];
@ -48,9 +46,6 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder {
@Override
protected Field[] getFields(IndexReader reader, int docId, String fieldName) throws IOException {
// we know its low level reader, and matching docId, since that's how we call the highlighter with
SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId);
List<Object> values = sourceLookup.extractRawValues(fieldType.name());
if (values.isEmpty()) {
return EMPTY_FIELDS;

View File

@ -78,7 +78,7 @@ public class UnifiedHighlighter implements Highlighter {
try {
final Analyzer analyzer = getAnalyzer(context.getMapperService().documentMapper(hitContext.hit().getType()),
hitContext);
List<Object> fieldValues = loadFieldValues(fieldType, field, context, hitContext, fieldContext.forceSource);
List<Object> fieldValues = loadFieldValues(fieldType, field, hitContext, fieldContext.forceSource);
if (fieldValues.size() == 0) {
return null;
}
@ -168,10 +168,9 @@ public class UnifiedHighlighter implements Highlighter {
protected List<Object> loadFieldValues(MappedFieldType fieldType,
SearchHighlightContext.Field field,
QueryShardContext context,
FetchSubPhase.HitContext hitContext,
boolean forceSource) throws IOException {
List<Object> fieldValues = HighlightUtils.loadFieldValues(fieldType, context, hitContext, forceSource);
List<Object> fieldValues = HighlightUtils.loadFieldValues(fieldType, hitContext, forceSource);
fieldValues = fieldValues.stream()
.map((s) -> convertFieldValue(fieldType, s))
.collect(Collectors.toList());

View File

@ -47,7 +47,6 @@ import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreContext;
@ -471,11 +470,6 @@ public abstract class FilteredSearchContext extends SearchContext {
in.keepAlive(keepAlive);
}
@Override
public SearchLookup lookup() {
return in.lookup();
}
@Override
public DfsSearchResult dfsResult() {
return in.dfsResult();

View File

@ -55,7 +55,6 @@ import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreContext;
@ -341,10 +340,6 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
public abstract void keepAlive(long keepAlive);
public SearchLookup lookup() {
return getQueryShardContext().lookup();
}
public abstract DfsSearchResult dfsResult();
public abstract QuerySearchResult queryResult();

View File

@ -19,6 +19,8 @@
package org.elasticsearch.search.fetch.subphase;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.memory.MemoryIndex;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -28,7 +30,6 @@ import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.TestSearchContext;
@ -134,7 +135,6 @@ public class FetchSourcePhaseTests extends ESTestCase {
return hitExecute(source, fetchSource, include, exclude, null);
}
private FetchSubPhase.HitContext hitExecute(XContentBuilder source, boolean fetchSource, String include, String exclude,
SearchHit.NestedIdentity nestedIdentity) {
return hitExecuteMultiple(source, fetchSource,
@ -149,11 +149,17 @@ public class FetchSourcePhaseTests extends ESTestCase {
private FetchSubPhase.HitContext hitExecuteMultiple(XContentBuilder source, boolean fetchSource, String[] includes, String[] excludes,
SearchHit.NestedIdentity nestedIdentity) {
FetchSourceContext fetchSourceContext = new FetchSourceContext(fetchSource, includes, excludes);
SearchContext searchContext = new FetchSourcePhaseTestSearchContext(fetchSourceContext,
source == null ? null : BytesReference.bytes(source));
SearchContext searchContext = new FetchSourcePhaseTestSearchContext(fetchSourceContext);
FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
final SearchHit searchHit = new SearchHit(1, null, null, nestedIdentity, null, null);
hitContext.reset(searchHit, null, 1, null);
// We don't need a real index, just a LeafReaderContext which cannot be mocked.
MemoryIndex index = new MemoryIndex();
LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0);
hitContext.reset(searchHit, leafReaderContext, 1, null);
hitContext.sourceLookup().setSource(source == null ? null : BytesReference.bytes(source));
FetchSourcePhase phase = new FetchSourcePhase();
phase.hitExecute(searchContext, hitContext);
return hitContext;
@ -161,13 +167,11 @@ public class FetchSourcePhaseTests extends ESTestCase {
private static class FetchSourcePhaseTestSearchContext extends TestSearchContext {
final FetchSourceContext context;
final BytesReference source;
final IndexShard indexShard;
FetchSourcePhaseTestSearchContext(FetchSourceContext context, BytesReference source) {
FetchSourcePhaseTestSearchContext(FetchSourceContext context) {
super(null);
this.context = context;
this.source = source;
this.indexShard = mock(IndexShard.class);
when(indexShard.shardId()).thenReturn(new ShardId("index", "index", 1));
}
@ -182,13 +186,6 @@ public class FetchSourcePhaseTests extends ESTestCase {
return context;
}
@Override
public SearchLookup lookup() {
SearchLookup lookup = new SearchLookup(this.mapperService(), this::getForField, null);
lookup.source().setSource(source);
return lookup;
}
@Override
public IndexShard indexShard() {
return indexShard;

View File

@ -118,7 +118,6 @@ import org.elasticsearch.search.fetch.subphase.FetchDocValuesPhase;
import org.elasticsearch.search.fetch.subphase.FetchSourcePhase;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.junit.After;
@ -319,10 +318,7 @@ public abstract class AggregatorTestCase extends ESTestCase {
}), circuitBreakerService, mapperService);
when(searchContext.getForField(Mockito.any(MappedFieldType.class)))
.thenAnswer(invocationOnMock -> ifds.getForField((MappedFieldType) invocationOnMock.getArguments()[0]));
SearchLookup searchLookup = new SearchLookup(mapperService, ifds::getForField, new String[]{TYPE_NAME});
when(searchContext.lookup()).thenReturn(searchLookup);
QueryShardContext queryShardContext =
queryShardContextMock(contextIndexSearcher, mapperService, indexSettings, circuitBreakerService, bigArrays);
when(searchContext.getQueryShardContext()).thenReturn(queryShardContext);
@ -334,6 +330,7 @@ public abstract class AggregatorTestCase extends ESTestCase {
}
return null;
});
Map<String, MappedFieldType> fieldNameToType = new HashMap<>();
fieldNameToType.putAll(Arrays.stream(fieldTypes)
.filter(Objects::nonNull)