Introduce FetchContext (#62357)

We currently pass a SearchContext around to share configuration among
FetchSubPhases. With the introduction of runtime fields, it would be useful
to start storing some state on this context to be shared between different
subphases (for example, stored fields or search lookups can be loaded lazily
but referred to by many different subphases). However, SearchContext is a
very large and unwieldy class, and adding more methods or state here feels
like a bridge too far.

This commit introduces a new FetchContext class that exposes only those
methods on SearchContext that are required for fetch phases. This reduces
the API surface area for fetch phases considerably, and should give us some
leeway to add further state.
This commit is contained in:
Alan Woodward 2020-09-17 09:46:03 +01:00 committed by Alan Woodward
parent d091c12e0c
commit 63afc61b08
25 changed files with 327 additions and 181 deletions

View File

@ -26,15 +26,14 @@ import org.apache.lucene.search.QueryVisitor;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase;
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
@ -57,11 +56,11 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
}
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException {
if (searchContext.highlight() == null) {
public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext, SearchLookup lookup) {
if (fetchContext.highlight() == null) {
return null;
}
List<PercolateQuery> percolateQueries = locatePercolatorQuery(searchContext.query());
List<PercolateQuery> percolateQueries = locatePercolatorQuery(fetchContext.query());
if (percolateQueries.isEmpty()) {
return null;
}
@ -70,7 +69,7 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
LeafReaderContext ctx;
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
public void setNextReader(LeafReaderContext readerContext) {
this.ctx = readerContext;
}
@ -111,10 +110,8 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
);
subContext.sourceLookup().setSource(document);
// force source because MemoryIndex does not store fields
SearchHighlightContext highlight = new SearchHighlightContext(searchContext.highlight().fields(), true);
QueryShardContext shardContext = new QueryShardContext(searchContext.getQueryShardContext());
FetchSubPhaseProcessor processor = highlightPhase.getProcessor(shardContext, searchContext.shardTarget(),
highlight, query);
SearchHighlightContext highlight = new SearchHighlightContext(fetchContext.highlight().fields(), true);
FetchSubPhaseProcessor processor = highlightPhase.getProcessor(fetchContext, highlight, query);
processor.process(subContext);
for (Map.Entry<String, HighlightField> entry : subContext.hit().getHighlightFields().entrySet()) {
if (percolateQuery.getDocuments().size() == 1) {

View File

@ -34,9 +34,9 @@ import org.apache.lucene.util.BitSetIterator;
import org.elasticsearch.Version;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
@ -58,10 +58,10 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase {
static final String FIELD_NAME_PREFIX = "_percolator_document_slot";
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException {
public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext, SearchLookup lookup) throws IOException {
List<PercolateContext> percolateContexts = new ArrayList<>();
List<PercolateQuery> percolateQueries = locatePercolatorQuery(searchContext.query());
List<PercolateQuery> percolateQueries = locatePercolatorQuery(fetchContext.query());
boolean singlePercolateQuery = percolateQueries.size() == 1;
for (PercolateQuery pq : percolateQueries) {
percolateContexts.add(new PercolateContext(pq, singlePercolateQuery));

View File

@ -28,12 +28,11 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.RandomScoreFunction;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase;
import org.mockito.Mockito;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
@ -41,20 +40,21 @@ import static java.util.Collections.emptyMap;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.sameInstance;
import static org.mockito.Mockito.mock;
public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase {
public void testHitsExecutionNeeded() throws IOException {
public void testHitsExecutionNeeded() {
PercolateQuery percolateQuery = new PercolateQuery("_name", ctx -> null, Collections.singletonList(new BytesArray("{}")),
new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery());
PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(emptyMap());
SearchContext searchContext = Mockito.mock(SearchContext.class);
Mockito.when(searchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList()));
Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery());
FetchContext fetchContext = mock(FetchContext.class);
Mockito.when(fetchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList()));
Mockito.when(fetchContext.query()).thenReturn(new MatchAllDocsQuery());
assertNull(subFetchPhase.getProcessor(searchContext, null));
Mockito.when(searchContext.query()).thenReturn(percolateQuery);
assertNotNull(subFetchPhase.getProcessor(searchContext, null));
assertNull(subFetchPhase.getProcessor(fetchContext, null));
Mockito.when(fetchContext.query()).thenReturn(percolateQuery);
assertNotNull(subFetchPhase.getProcessor(fetchContext, null));
}
public void testLocatePercolatorQuery() {

View File

@ -26,7 +26,6 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.ScoreDoc;
@ -37,9 +36,9 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
@ -63,7 +62,6 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {
PercolatorMatchedSlotSubFetchPhase phase = new PercolatorMatchedSlotSubFetchPhase();
try (DirectoryReader reader = DirectoryReader.open(directory)) {
IndexSearcher indexSearcher = new IndexSearcher(reader);
LeafReaderContext context = reader.leaves().get(0);
// A match:
{
@ -75,7 +73,7 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery());
SearchContext sc = mock(SearchContext.class);
FetchContext sc = mock(FetchContext.class);
when(sc.query()).thenReturn(percolateQuery);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
@ -96,7 +94,7 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery());
SearchContext sc = mock(SearchContext.class);
FetchContext sc = mock(FetchContext.class);
when(sc.query()).thenReturn(percolateQuery);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
@ -116,7 +114,7 @@ public class PercolatorMatchedSlotSubFetchPhaseTests extends ESTestCase {
PercolateQuery percolateQuery = new PercolateQuery("_name", queryStore, Collections.emptyList(),
new MatchAllDocsQuery(), memoryIndex.createSearcher(), null, new MatchNoDocsQuery());
SearchContext sc = mock(SearchContext.class);
FetchContext sc = mock(FetchContext.class);
when(sc.query()).thenReturn(percolateQuery);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);

View File

@ -115,6 +115,10 @@ setup:
body:
keyword: [ "a" ]
- do:
indices.refresh:
index: [ test ]
- do:
catch: bad_request
search:

View File

@ -19,26 +19,22 @@
package org.elasticsearch.search.fetch;
import org.apache.logging.log4j.LogManager;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.termvectors.TermVectorsService;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
@ -121,7 +117,7 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
private static final String NAME = "term_vectors_fetch";
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) {
public FetchSubPhaseProcessor getProcessor(FetchContext searchContext, SearchLookup lookup) {
return new FetchSubPhaseProcessor() {
@Override
public void setNextReader(LeafReaderContext readerContext) {
@ -129,13 +125,13 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
}
@Override
public void process(HitContext hitContext) {
public void process(HitContext hitContext) throws IOException {
hitExecute(searchContext, hitContext);
}
};
}
private void hitExecute(SearchContext context, HitContext hitContext) {
private void hitExecute(FetchContext context, HitContext hitContext) throws IOException {
TermVectorsFetchBuilder fetchSubPhaseBuilder = (TermVectorsFetchBuilder)context.getSearchExt(NAME);
if (fetchSubPhaseBuilder == null) {
return;
@ -146,19 +142,18 @@ public class FetchSubPhasePluginIT extends ESIntegTestCase {
hitField = new DocumentField(NAME, new ArrayList<>(1));
hitContext.hit().setDocumentField(NAME, hitField);
}
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(),
hitContext.hit().getType(), hitContext.hit().getId());
TermVectorsResponse termVector = TermVectorsService.getTermVectors(context.indexShard(), termVectorsRequest);
try {
Terms terms = hitContext.reader().getTermVector(hitContext.docId(), field);
if (terms != null) {
TermsEnum te = terms.iterator();
Map<String, Integer> tv = new HashMap<>();
TermsEnum terms = termVector.getFields().terms(field).iterator();
BytesRef term;
while ((term = terms.next()) != null) {
tv.put(term.utf8ToString(), terms.postings(null, PostingsEnum.ALL).freq());
PostingsEnum pe = null;
while ((term = te.next()) != null) {
pe = te.postings(pe, PostingsEnum.FREQS);
pe.nextDoc();
tv.put(term.utf8ToString(), pe.freq());
}
hitField.getValues().add(tv);
} catch (IOException e) {
LogManager.getLogger(FetchSubPhasePluginIT.class).info("Swallowed exception", e);
}
}
}

View File

@ -0,0 +1,200 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext;
import org.elasticsearch.search.fetch.subphase.FetchFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.FieldAndFormat;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.rescore.RescoreContext;
import java.util.Collections;
import java.util.List;
/**
* Encapsulates state required to execute fetch phases
*/
public class FetchContext {
private final SearchContext searchContext;
/**
* Create a FetchContext based on a SearchContext
*/
public FetchContext(SearchContext searchContext) {
this.searchContext = searchContext;
}
/**
* The name of the index that documents are being fetched from
*/
public String getIndexName() {
return searchContext.indexShard().shardId().getIndexName();
}
/**
* The point-in-time searcher the original query was executed against
*/
public ContextIndexSearcher searcher() {
return searchContext.searcher();
}
/**
* The mapper service for the index we are fetching documents from
*/
public MapperService mapperService() {
return searchContext.mapperService();
}
/**
* The index settings for the index we are fetching documents from
*/
public IndexSettings getIndexSettings() {
return mapperService().getIndexSettings();
}
/**
* The original query
*/
public Query query() {
return searchContext.query();
}
/**
* The original query with additional filters and named queries
*/
public ParsedQuery parsedQuery() {
return searchContext.parsedQuery();
}
/**
* Any post-filters run as part of the search
*/
public ParsedQuery parsedPostFilter() {
return searchContext.parsedPostFilter();
}
/**
* Configuration for fetching _source
*/
public FetchSourceContext fetchSourceContext() {
return searchContext.fetchSourceContext();
}
/**
* Should the response include `explain` output
*/
public boolean explain() {
return searchContext.explain() && searchContext.query() != null;
}
/**
* The rescorers included in the original search, used for explain output
*/
public List<RescoreContext> rescore() {
return searchContext.rescore();
}
/**
* Should the response include sequence number and primary term metadata
*/
public boolean seqNoAndPrimaryTerm() {
return searchContext.seqNoAndPrimaryTerm();
}
/**
* Configuration for fetching docValues fields
*/
public FetchDocValuesContext docValuesContext() {
FetchDocValuesContext dvContext = searchContext.docValuesContext();
if (searchContext.collapse() != null) {
// retrieve the `doc_value` associated with the collapse field
String name = searchContext.collapse().getFieldName();
if (dvContext == null) {
return new FetchDocValuesContext(Collections.singletonList(new FieldAndFormat(name, null)));
} else if (searchContext.docValuesContext().fields().stream().map(ff -> ff.field).anyMatch(name::equals) == false) {
dvContext.fields().add(new FieldAndFormat(name, null));
}
}
return dvContext;
}
/**
* Configuration for highlighting
*/
public SearchHighlightContext highlight() {
return searchContext.highlight();
}
/**
* Should the response include scores, even if scores were not calculated in the original query
*/
public boolean fetchScores() {
return searchContext.sort() != null && searchContext.trackScores();
}
/**
* Configuration for returning inner hits
*/
public InnerHitsContext innerHits() {
return searchContext.innerHits();
}
/**
* Should the response include version metadata
*/
public boolean version() {
// TODO version is loaded from docvalues, not stored fields, so why are we checking
// stored fields here?
return searchContext.version() &&
(searchContext.storedFieldsContext() == null || searchContext.storedFieldsContext().fetchFields());
}
/**
* Configuration for the 'fields' response
*/
public FetchFieldsContext fetchFieldsContext() {
return searchContext.fetchFieldsContext();
}
/**
* Configuration for script fields
*/
public ScriptFieldsContext scriptFields() {
return searchContext.scriptFields();
}
/**
* Configuration for external fetch phase plugins
*/
public SearchExtBuilder getSearchExt(String name) {
return searchContext.getSearchExt(name);
}
}

View File

@ -51,6 +51,7 @@ import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.search.SearchContextSourcePrinter;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
@ -92,8 +93,12 @@ public class FetchPhase {
LOGGER.trace("{}", new SearchContextSourcePrinter(context));
}
Map<String, Set<String>> storedToRequestedFields = new HashMap<>();
FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields);
if (context.docIdsToLoadSize() == 0) {
// no individual hits to process, so we shortcut
context.fetchResult().hits(new SearchHits(new SearchHit[0], context.queryResult().getTotalHits(),
context.queryResult().getMaxScore()));
return;
}
DocIdToIndex[] docs = new DocIdToIndex[context.docIdsToLoadSize()];
for (int index = 0; index < context.docIdsToLoadSize(); index++) {
@ -101,11 +106,16 @@ public class FetchPhase {
}
Arrays.sort(docs);
Map<String, Set<String>> storedToRequestedFields = new HashMap<>();
FieldsVisitor fieldsVisitor = createStoredFieldsVisitor(context, storedToRequestedFields);
FetchContext fetchContext = new FetchContext(context);
SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()];
Map<String, Object> sharedCache = new HashMap<>();
SearchLookup lookup = context.getQueryShardContext().newFetchLookup();
List<FetchSubPhaseProcessor> processors = getProcessors(context, lookup);
SearchLookup lookup = context.getQueryShardContext().newFetchLookup();
List<FetchSubPhaseProcessor> processors = getProcessors(context.shardTarget(), lookup, fetchContext);
int currentReaderIndex = -1;
LeafReaderContext currentReaderContext = null;
@ -150,7 +160,7 @@ public class FetchPhase {
}
List<FetchSubPhaseProcessor> getProcessors(SearchContext context, SearchLookup lookup) {
List<FetchSubPhaseProcessor> getProcessors(SearchShardTarget target, SearchLookup lookup, FetchContext context) {
try {
List<FetchSubPhaseProcessor> processors = new ArrayList<>();
for (FetchSubPhase fsp : fetchSubPhases) {
@ -161,7 +171,7 @@ public class FetchPhase {
}
return processors;
} catch (Exception e) {
throw new FetchPhaseExecutionException(context.shardTarget(), "Error building fetch sub-phases", e);
throw new FetchPhaseExecutionException(target, "Error building fetch sub-phases", e);
}
}

View File

@ -23,7 +23,6 @@ import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
@ -100,8 +99,8 @@ public interface FetchSubPhase {
/**
* Returns a {@link FetchSubPhaseProcessor} for this sub phase.
*
* If nothing should be executed for the provided {@link SearchContext}, then the
* If nothing should be executed for the provided {@code FetchContext}, then the
* implementation should return {@code null}
*/
FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException;
FetchSubPhaseProcessor getProcessor(FetchContext fetchContext, SearchLookup lookup) throws IOException;
}

View File

@ -20,9 +20,9 @@ package org.elasticsearch.search.fetch.subphase;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.rescore.RescoreContext;
@ -34,8 +34,8 @@ import java.io.IOException;
public final class ExplainPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) {
if (context.explain() == false || context.hasOnlySuggest()) {
public FetchSubPhaseProcessor getProcessor(FetchContext context, SearchLookup lookup) {
if (context.explain() == false) {
return null;
}
return new FetchSubPhaseProcessor() {

View File

@ -51,7 +51,7 @@ public class FetchDocValuesContext {
return new FetchDocValuesContext(fields);
}
FetchDocValuesContext(List<FieldAndFormat> fields) {
public FetchDocValuesContext(List<FieldAndFormat> fields) {
this.fields = fields;
}

View File

@ -23,15 +23,14 @@ import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.index.mapper.DocValueFetcher;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
@ -45,19 +44,9 @@ public final class FetchDocValuesPhase implements FetchSubPhase {
private static final DeprecationLogger DEPRECATION_LOGGER = DeprecationLogger.getLogger(FetchDocValuesPhase.class);
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) throws IOException {
if (context.collapse() != null) {
// retrieve the `doc_value` associated with the collapse field
String name = context.collapse().getFieldName();
if (context.docValuesContext() == null) {
context.docValuesContext(new FetchDocValuesContext(
Collections.singletonList(new FieldAndFormat(name, null))));
} else if (context.docValuesContext().fields().stream().map(ff -> ff.field).anyMatch(name::equals) == false) {
context.docValuesContext().fields().add(new FieldAndFormat(name, null));
}
}
if (context.docValuesContext() == null) {
public FetchSubPhaseProcessor getProcessor(FetchContext context, SearchLookup lookup) {
FetchDocValuesContext dvContext = context.docValuesContext();
if (dvContext == null) {
return null;
}
@ -87,7 +76,7 @@ public final class FetchDocValuesPhase implements FetchSubPhase {
return new FetchSubPhaseProcessor() {
@Override
public void setNextReader(LeafReaderContext readerContext) throws IOException {
public void setNextReader(LeafReaderContext readerContext) {
for (DocValueField f : fields) {
f.fetcher.setNextReader(readerContext);
}
@ -109,7 +98,7 @@ public final class FetchDocValuesPhase implements FetchSubPhase {
};
}
private class DocValueField {
private static class DocValueField {
private final String field;
private final ValueFetcher fetcher;

View File

@ -23,9 +23,9 @@ import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.index.mapper.IgnoredFieldMapper;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
@ -41,14 +41,14 @@ import java.util.Set;
public final class FetchFieldsPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) {
FetchFieldsContext fetchFieldsContext = searchContext.fetchFieldsContext();
public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext, SearchLookup lookup) {
FetchFieldsContext fetchFieldsContext = fetchContext.fetchFieldsContext();
if (fetchFieldsContext == null) {
return null;
}
FieldValueRetriever retriever = fetchFieldsContext.fieldValueRetriever(
searchContext.indexShard().shardId().getIndexName(),
searchContext.mapperService(),
fetchContext.getIndexName(),
fetchContext.mapperService(),
lookup
);
return new FetchSubPhaseProcessor() {

View File

@ -25,9 +25,9 @@ import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.ScorerSupplier;
import org.apache.lucene.search.Weight;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
@ -35,10 +35,8 @@ import java.io.IOException;
public class FetchScorePhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) throws IOException {
if (context.trackScores() == false || context.docIdsToLoadSize() == 0 ||
// scores were already computed since they are needed on the coordinated node to merge top hits
context.sort() == null) {
public FetchSubPhaseProcessor getProcessor(FetchContext context, SearchLookup lookup) throws IOException {
if (context.fetchScores() == false) {
return null;
}
final IndexSearcher searcher = context.searcher();

View File

@ -25,9 +25,9 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
@ -37,12 +37,12 @@ import java.util.Map;
public final class FetchSourcePhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) {
if (searchContext.sourceRequested() == false) {
public FetchSubPhaseProcessor getProcessor(FetchContext fetchContext, SearchLookup lookup) {
FetchSourceContext fetchSourceContext = fetchContext.fetchSourceContext();
if (fetchSourceContext == null || fetchSourceContext.fetchSource() == false) {
return null;
}
String index = searchContext.indexShard().shardId().getIndexName();
FetchSourceContext fetchSourceContext = searchContext.fetchSourceContext();
String index = fetchContext.getIndexName();
assert fetchSourceContext.fetchSource();
return new FetchSubPhaseProcessor() {

View File

@ -22,9 +22,9 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.index.mapper.VersionFieldMapper;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
@ -32,9 +32,8 @@ import java.io.IOException;
public final class FetchVersionPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) {
if (context.version() == false ||
(context.storedFieldsContext() != null && context.storedFieldsContext().fetchFields() == false)) {
public FetchSubPhaseProcessor getProcessor(FetchContext context, SearchLookup lookup) {
if (context.version() == false) {
return null;
}
return new FetchSubPhaseProcessor() {

View File

@ -26,11 +26,11 @@ import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;
@ -47,7 +47,7 @@ public final class InnerHitsPhase implements FetchSubPhase {
}
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) {
public FetchSubPhaseProcessor getProcessor(FetchContext searchContext, SearchLookup lookup) {
if (searchContext.innerHits() == null) {
return null;
}

View File

@ -25,9 +25,9 @@ import org.apache.lucene.search.ScorerSupplier;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
@ -39,13 +39,11 @@ import java.util.Map;
public final class MatchedQueriesPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) throws IOException {
if (context.docIdsToLoadSize() == 0 ||
// in case the request has only suggest, parsed query is null
context.parsedQuery() == null) {
return null;
public FetchSubPhaseProcessor getProcessor(FetchContext context, SearchLookup lookup) throws IOException {
Map<String, Query> namedQueries = new HashMap<>();
if (context.parsedQuery() != null) {
namedQueries.putAll(context.parsedQuery().namedFilters());
}
Map<String, Query> namedQueries = new HashMap<>(context.parsedQuery().namedFilters());
if (context.parsedPostFilter() != null) {
namedQueries.putAll(context.parsedPostFilter().namedFilters());
}

View File

@ -22,9 +22,9 @@ import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.util.CollectionUtils;
import org.elasticsearch.script.FieldScript;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
@ -36,8 +36,8 @@ import java.util.List;
public final class ScriptFieldsPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) {
if (context.hasScriptFields() == false) {
public FetchSubPhaseProcessor getProcessor(FetchContext context, SearchLookup lookup) {
if (context.scriptFields() == null) {
return null;
}
List<ScriptFieldsContext.ScriptField> scriptFields = context.scriptFields().fields();

View File

@ -22,9 +22,9 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
@ -32,7 +32,7 @@ import java.io.IOException;
public final class SeqNoPrimaryTermPhase implements FetchSubPhase {
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) throws IOException {
public FetchSubPhaseProcessor getProcessor(FetchContext context, SearchLookup lookup) {
if (context.seqNoAndPrimaryTerm() == false) {
return null;
}

View File

@ -20,8 +20,7 @@ package org.elasticsearch.search.fetch.subphase.highlight;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
public class FieldHighlightContext {
@ -29,8 +28,7 @@ public class FieldHighlightContext {
public final String fieldName;
public final SearchHighlightContext.Field field;
public final MappedFieldType fieldType;
public final SearchShardTarget shardTarget;
public final QueryShardContext context;
public final FetchContext context;
public final FetchSubPhase.HitContext hitContext;
public final Query query;
public final boolean forceSource;
@ -38,15 +36,13 @@ public class FieldHighlightContext {
public FieldHighlightContext(String fieldName,
SearchHighlightContext.Field field,
MappedFieldType fieldType,
SearchShardTarget shardTarget,
QueryShardContext context,
FetchContext context,
FetchSubPhase.HitContext hitContext,
Query query,
boolean forceSource) {
this.fieldName = fieldName;
this.field = field;
this.fieldType = fieldType;
this.shardTarget = shardTarget;
this.context = context;
this.hitContext = hitContext;
this.query = query;

View File

@ -26,11 +26,9 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
@ -50,16 +48,16 @@ public class HighlightPhase implements FetchSubPhase {
}
@Override
public FetchSubPhaseProcessor getProcessor(SearchContext context, SearchLookup lookup) {
public FetchSubPhaseProcessor getProcessor(FetchContext context, SearchLookup lookup) {
if (context.highlight() == null) {
return null;
}
return getProcessor(context.getQueryShardContext(), context.shardTarget(), context.highlight(), context.parsedQuery().query());
return getProcessor(context, context.highlight(), context.parsedQuery().query());
}
public FetchSubPhaseProcessor getProcessor(QueryShardContext qsc, SearchShardTarget target, SearchHighlightContext hc, Query query) {
Map<String, Function<HitContext, FieldHighlightContext>> contextBuilders = contextBuilders(qsc, target, hc, query);
public FetchSubPhaseProcessor getProcessor(FetchContext context, SearchHighlightContext highlightContext, Query query) {
Map<String, Function<HitContext, FieldHighlightContext>> contextBuilders = contextBuilders(context, highlightContext, query);
return new FetchSubPhaseProcessor() {
@Override
public void setNextReader(LeafReaderContext readerContext) {
@ -99,22 +97,21 @@ public class HighlightPhase implements FetchSubPhase {
return highlighter;
}
private Map<String, Function<HitContext, FieldHighlightContext>> contextBuilders(QueryShardContext context,
SearchShardTarget shardTarget,
SearchHighlightContext highlight,
private Map<String, Function<HitContext, FieldHighlightContext>> contextBuilders(FetchContext context,
SearchHighlightContext highlightContext,
Query query) {
Map<String, Function<HitContext, FieldHighlightContext>> builders = new LinkedHashMap<>();
for (SearchHighlightContext.Field field : highlight.fields()) {
for (SearchHighlightContext.Field field : highlightContext.fields()) {
Highlighter highlighter = getHighlighter(field);
Collection<String> fieldNamesToHighlight;
if (Regex.isSimpleMatchPattern(field.field())) {
fieldNamesToHighlight = context.getMapperService().simpleMatchToFullName(field.field());
fieldNamesToHighlight = context.mapperService().simpleMatchToFullName(field.field());
} else {
fieldNamesToHighlight = Collections.singletonList(field.field());
}
if (highlight.forceSource(field)) {
SourceFieldMapper sourceFieldMapper = context.getMapperService().documentMapper().sourceMapper();
if (highlightContext.forceSource(field)) {
SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper().sourceMapper();
if (sourceFieldMapper.enabled() == false) {
throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight
+ " but _source is disabled");
@ -123,7 +120,7 @@ public class HighlightPhase implements FetchSubPhase {
boolean fieldNameContainsWildcards = field.field().contains("*");
for (String fieldName : fieldNamesToHighlight) {
MappedFieldType fieldType = context.getMapperService().fieldType(fieldName);
MappedFieldType fieldType = context.mapperService().fieldType(fieldName);
if (fieldType == null) {
continue;
}
@ -148,9 +145,9 @@ public class HighlightPhase implements FetchSubPhase {
Query highlightQuery = field.fieldOptions().highlightQuery();
boolean forceSource = highlight.forceSource(field);
boolean forceSource = highlightContext.forceSource(field);
builders.put(fieldName,
hc -> new FieldHighlightContext(fieldType.name(), field, fieldType, shardTarget, context, hc,
hc -> new FieldHighlightContext(fieldType.name(), field, fieldType, context, hc,
highlightQuery == null ? query : highlightQuery, forceSource));
}
}

View File

@ -38,7 +38,7 @@ import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import java.io.IOException;
@ -55,7 +55,7 @@ public class PlainHighlighter implements Highlighter {
@Override
public HighlightField highlight(FieldHighlightContext fieldContext) throws IOException {
SearchHighlightContext.Field field = fieldContext.field;
QueryShardContext context = fieldContext.context;
FetchContext context = fieldContext.context;
FetchSubPhase.HitContext hitContext = fieldContext.hitContext;
MappedFieldType fieldType = fieldContext.fieldType;
@ -100,10 +100,10 @@ public class PlainHighlighter implements Highlighter {
int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments();
ArrayList<TextFragment> fragsList = new ArrayList<>();
List<Object> textsToHighlight;
Analyzer analyzer = context.getMapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer();
Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer();
Integer keywordIgnoreAbove = null;
if (fieldType instanceof KeywordFieldMapper.KeywordFieldType) {
KeywordFieldMapper mapper = (KeywordFieldMapper) context.getMapperService().documentMapper()
KeywordFieldMapper mapper = (KeywordFieldMapper) context.mapperService().documentMapper()
.mappers().getMapper(fieldContext.fieldName);
keywordIgnoreAbove = mapper.ignoreAbove();
};
@ -120,7 +120,7 @@ public class PlainHighlighter implements Highlighter {
if (textLength > maxAnalyzedOffset) {
throw new IllegalArgumentException(
"The length of [" + fieldContext.fieldName + "] field of [" + hitContext.hit().getId() +
"] doc of [" + context.index().getName() + "] index " +
"] doc of [" + context.getIndexName() + "] index " +
"has exceeded [" + maxAnalyzedOffset + "] - maximum allowed to be analyzed for highlighting. " +
"This maximum can be set by changing the [" + IndexSettings.MAX_ANALYZED_OFFSET_SETTING.getKey() +
"] index level setting. " + "For large texts, indexing with offsets or term vectors, and highlighting " +

View File

@ -38,7 +38,6 @@ import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
@ -80,13 +79,7 @@ public class UnifiedHighlighter implements Highlighter {
}
return mergeFieldValues(fieldValues, MULTIVAL_SEP_CHAR);
};
Snippet[] fieldSnippets;
try {
fieldSnippets = highlighter.highlightField(hitContext.reader(), hitContext.docId(), loadFieldValues);
} catch (IOException e) {
throw new FetchPhaseExecutionException(fieldContext.shardTarget,
"Failed to highlight field [" + fieldContext.fieldName + "]", e);
}
Snippet[] fieldSnippets = highlighter.highlightField(hitContext.reader(), hitContext.docId(), loadFieldValues);
if (fieldSnippets == null || fieldSnippets.length == 0) {
return null;
@ -121,12 +114,12 @@ public class UnifiedHighlighter implements Highlighter {
int maxAnalyzedOffset = fieldContext.context.getIndexSettings().getHighlightMaxAnalyzedOffset();
int keywordIgnoreAbove = Integer.MAX_VALUE;
if (fieldContext.fieldType instanceof KeywordFieldMapper.KeywordFieldType) {
KeywordFieldMapper mapper = (KeywordFieldMapper) fieldContext.context.getMapperService().documentMapper()
KeywordFieldMapper mapper = (KeywordFieldMapper) fieldContext.context.mapperService().documentMapper()
.mappers().getMapper(fieldContext.fieldName);
keywordIgnoreAbove = mapper.ignoreAbove();
}
int numberOfFragments = fieldContext.field.fieldOptions().numberOfFragments();
Analyzer analyzer = getAnalyzer(fieldContext.context.getMapperService().documentMapper());
Analyzer analyzer = getAnalyzer(fieldContext.context.mapperService().documentMapper());
PassageFormatter passageFormatter = getPassageFormatter(fieldContext.hitContext, fieldContext.field, encoder);
IndexSearcher searcher = fieldContext.context.searcher();
OffsetSource offsetSource = getOffsetSource(fieldContext.fieldType);
@ -155,7 +148,7 @@ public class UnifiedHighlighter implements Highlighter {
passageFormatter,
fieldContext.field.fieldOptions().boundaryScannerLocale(),
breakIterator,
fieldContext.context.getFullyQualifiedIndex().getName(),
fieldContext.context.getIndexName(),
fieldContext.fieldName,
fieldContext.query,
fieldContext.field.fieldOptions().noMatchSize(),

View File

@ -25,15 +25,12 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.TestSearchContext;
import java.io.IOException;
import java.util.Collections;
@ -153,7 +150,9 @@ public class FetchSourcePhaseTests extends ESTestCase {
private HitContext hitExecuteMultiple(XContentBuilder source, boolean fetchSource, String[] includes, String[] excludes,
SearchHit.NestedIdentity nestedIdentity) throws IOException {
FetchSourceContext fetchSourceContext = new FetchSourceContext(fetchSource, includes, excludes);
SearchContext searchContext = new FetchSourcePhaseTestSearchContext(fetchSourceContext);
FetchContext fetchContext = mock(FetchContext.class);
when(fetchContext.fetchSourceContext()).thenReturn(fetchSourceContext);
when(fetchContext.getIndexName()).thenReturn("index");
final SearchHit searchHit = new SearchHit(1, null, null, nestedIdentity, null, null);
@ -164,7 +163,7 @@ public class FetchSourcePhaseTests extends ESTestCase {
hitContext.sourceLookup().setSource(source == null ? null : BytesReference.bytes(source));
FetchSourcePhase phase = new FetchSourcePhase();
FetchSubPhaseProcessor processor = phase.getProcessor(searchContext, null);
FetchSubPhaseProcessor processor = phase.getProcessor(fetchContext, null);
if (fetchSource == false) {
assertNull(processor);
} else {
@ -174,30 +173,4 @@ public class FetchSourcePhaseTests extends ESTestCase {
return hitContext;
}
private static class FetchSourcePhaseTestSearchContext extends TestSearchContext {
final FetchSourceContext context;
final IndexShard indexShard;
FetchSourcePhaseTestSearchContext(FetchSourceContext context) {
super(null);
this.context = context;
this.indexShard = mock(IndexShard.class);
when(indexShard.shardId()).thenReturn(new ShardId("index", "index", 1));
}
@Override
public boolean sourceRequested() {
return context != null && context.fetchSource();
}
@Override
public FetchSourceContext fetchSourceContext() {
return context;
}
@Override
public IndexShard indexShard() {
return indexShard;
}
}
}