Remove the SearchContext from the highlighter context (#47733)

Today built-in highlighter and plugins have access to the SearchContext through the
highlighter context. However most of the information exposed in the SearchContext are not needed and a QueryShardContext
would be enough to perform highlighting. This change replaces the SearchContext by the informations that are absolutely
required by highlighter: a QueryShardContext and the SearchContextHighlight. This change allows to reduce the exposure of the
complex SearchContext and remove the needs to clone it in the percolator sub phase.

Relates #47198
Relates #46523
This commit is contained in:
Jim Ferenczi 2019-10-09 14:40:05 +02:00 committed by jimczi
parent 0e7869128a
commit bd6e2592a7
12 changed files with 102 additions and 76 deletions

View File

@ -32,7 +32,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
@ -40,7 +40,6 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase;
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SubSearchContext;
import java.io.IOException;
import java.util.ArrayList;
@ -100,15 +99,19 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
for (Object matchedSlot : field.getValues()) {
int slot = (int) matchedSlot;
BytesReference document = percolateQuery.getDocuments().get(slot);
SubSearchContext subSearchContext =
createSubSearchContext(context, percolatorLeafReaderContext, document, slot);
subSearchContext.parsedQuery(new ParsedQuery(query));
SearchContextHighlight highlight = new SearchContextHighlight(context.highlight().fields());
// Enforce highlighting by source, because MemoryIndex doesn't support stored fields.
highlight.globalForceSource(true);
QueryShardContext shardContext = new QueryShardContext(context.getQueryShardContext());
shardContext.freezeContext();
shardContext.lookup().source().setSegmentAndDocument(percolatorLeafReaderContext, slot);
shardContext.lookup().source().setSource(document);
hitContext.reset(
new SearchHit(slot, "unknown", new Text(hit.getType()), Collections.emptyMap()),
percolatorLeafReaderContext, slot, percolatorIndexSearcher
);
hitContext.cache().clear();
highlightPhase.hitExecute(subSearchContext, hitContext);
highlightPhase.hitExecute(context.shardTarget(), shardContext, query, highlight, hitContext);
for (Map.Entry<String, HighlightField> entry : hitContext.hit().getHighlightFields().entrySet()) {
if (percolateQuery.getDocuments().size() == 1) {
String hlFieldName;
@ -166,15 +169,4 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
}
return Collections.emptyList();
}
private SubSearchContext createSubSearchContext(SearchContext context, LeafReaderContext leafReaderContext,
BytesReference source, int docId) {
SubSearchContext subSearchContext = new SubSearchContext(context);
subSearchContext.highlight(new SearchContextHighlight(context.highlight().fields()));
// Enforce highlighting by source, because MemoryIndex doesn't support stored fields.
subSearchContext.highlight().globalForceSource(true);
subSearchContext.lookup().source().setSegmentAndDocument(leafReaderContext, docId);
subSearchContext.lookup().source().setSource(source);
return subSearchContext;
}
}

View File

@ -26,9 +26,9 @@ import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedHighlighterAnalyzer;
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
@ -45,18 +45,21 @@ public class AnnotatedTextHighlighter extends UnifiedHighlighter {
// Convert the marked-up values held on-disk to plain-text versions for highlighting
@Override
protected List<Object> loadFieldValues(MappedFieldType fieldType, Field field, SearchContext context, HitContext hitContext)
throws IOException {
List<Object> fieldValues = super.loadFieldValues(fieldType, field, context, hitContext);
protected List<Object> loadFieldValues(MappedFieldType fieldType,
Field field,
QueryShardContext context,
HitContext hitContext,
boolean forceSource) throws IOException {
List<Object> fieldValues = super.loadFieldValues(fieldType, field, context, hitContext, forceSource);
String[] fieldValuesAsString = fieldValues.toArray(new String[fieldValues.size()]);
AnnotatedText[] annotations = new AnnotatedText[fieldValuesAsString.length];
for (int i = 0; i < fieldValuesAsString.length; i++) {
annotations[i] = AnnotatedText.parse(fieldValuesAsString[i]);
}
// Store the annotations in the hitContext
hitContext.cache().put(AnnotatedText.class.getName(), annotations);
ArrayList<Object> result = new ArrayList<>(annotations.length);
for (int i = 0; i < annotations.length; i++) {
result.add(annotations[i].textMinusMarkup);

View File

@ -45,7 +45,7 @@ public final class InnerHitsFetchSubPhase implements FetchSubPhase {
@Override
public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException {
if (context.innerHits().isEmpty()) {
if ((context.innerHits() != null && context.innerHits().size() > 0) == false) {
return;
}
@ -72,6 +72,10 @@ public final class InnerHitsFetchSubPhase implements FetchSubPhase {
}
innerHits.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
innerHits.setUid(new Uid(hit.getType(), hit.getId()));
innerHits.lookup().source().setSource(context.lookup().source().internalSourceRef());
if (context.lookup().source().source() != null) {
innerHits.lookup().source().setSource(context.lookup().source().source());
}
fetchPhase.execute(innerHits);
FetchSearchResult fetchResult = innerHits.fetchResult();
SearchHit[] internalHits = fetchResult.fetchResult().hits().getHits();

View File

@ -37,11 +37,11 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.FieldOptions;
import org.elasticsearch.search.internal.SearchContext;
import java.text.BreakIterator;
import java.util.Collections;
@ -69,7 +69,7 @@ public class FastVectorHighlighter implements Highlighter {
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
SearchContextHighlight.Field field = highlighterContext.field;
SearchContext context = highlighterContext.context;
QueryShardContext context = highlighterContext.context;
FetchSubPhase.HitContext hitContext = highlighterContext.hitContext;
MappedFieldType fieldType = highlighterContext.fieldType;
@ -93,7 +93,7 @@ public class FastVectorHighlighter implements Highlighter {
BaseFragmentsBuilder fragmentsBuilder;
final BoundaryScanner boundaryScanner = getBoundaryScanner(field);
boolean forceSource = context.highlight().forceSource(field);
boolean forceSource = highlighterContext.highlight.forceSource(field);
if (field.fieldOptions().numberOfFragments() == 0) {
fragListBuilder = new SingleFragListBuilder();
@ -203,7 +203,7 @@ public class FastVectorHighlighter implements Highlighter {
return null;
} catch (Exception e) {
throw new FetchPhaseExecutionException(context.shardTarget(),
throw new FetchPhaseExecutionException(highlighterContext.shardTarget,
"Failed to highlight field [" + highlighterContext.fieldName + "]", e);
}
}

View File

@ -25,6 +25,8 @@ import org.elasticsearch.index.mapper.KeywordFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
@ -45,17 +47,25 @@ public class HighlightPhase implements FetchSubPhase {
if (context.highlight() == null) {
return;
}
hitExecute(context.shardTarget(), context.getQueryShardContext(), context.parsedQuery().query(), context.highlight(), hitContext);
}
public void hitExecute(SearchShardTarget shardTarget,
QueryShardContext context,
Query query,
SearchContextHighlight highlight,
HitContext hitContext) {
Map<String, HighlightField> highlightFields = new HashMap<>();
for (SearchContextHighlight.Field field : context.highlight().fields()) {
for (SearchContextHighlight.Field field : highlight.fields()) {
Collection<String> fieldNamesToHighlight;
if (Regex.isSimpleMatchPattern(field.field())) {
fieldNamesToHighlight = context.mapperService().simpleMatchToFullName(field.field());
fieldNamesToHighlight = context.getMapperService().simpleMatchToFullName(field.field());
} else {
fieldNamesToHighlight = Collections.singletonList(field.field());
}
if (context.highlight().forceSource(field)) {
SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper(hitContext.hit().getType()).sourceMapper();
if (highlight.forceSource(field)) {
SourceFieldMapper sourceFieldMapper = context.getMapperService().documentMapper(hitContext.hit().getType()).sourceMapper();
if (!sourceFieldMapper.enabled()) {
throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight
+ " but type [" + hitContext.hit().getType() + "] has disabled _source");
@ -64,7 +74,7 @@ public class HighlightPhase implements FetchSubPhase {
boolean fieldNameContainsWildcards = field.field().contains("*");
for (String fieldName : fieldNamesToHighlight) {
MappedFieldType fieldType = context.mapperService().fullName(fieldName);
MappedFieldType fieldType = context.getMapperService().fullName(fieldName);
if (fieldType == null) {
continue;
}
@ -90,15 +100,15 @@ public class HighlightPhase implements FetchSubPhase {
Highlighter highlighter = highlighters.get(highlighterType);
if (highlighter == null) {
throw new IllegalArgumentException("unknown highlighter type [" + highlighterType
+ "] for the field [" + fieldName + "]");
+ "] for the field [" + fieldName + "]");
}
Query highlightQuery = field.fieldOptions().highlightQuery();
if (highlightQuery == null) {
highlightQuery = context.parsedQuery().query();
highlightQuery = query;
}
HighlighterContext highlighterContext = new HighlighterContext(fieldType.name(),
field, fieldType, context, hitContext, highlightQuery);
field, fieldType, shardTarget, context, highlight, hitContext, highlightQuery);
if ((highlighter.canHighlight(fieldType) == false) && fieldNameContainsWildcards) {
// if several fieldnames matched the wildcard then we want to skip those that we cannot highlight

View File

@ -23,8 +23,8 @@ import org.apache.lucene.search.highlight.Encoder;
import org.apache.lucene.search.highlight.SimpleHTMLEncoder;
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
@ -46,14 +46,13 @@ public final class HighlightUtils {
/**
* Load field values for highlighting.
*/
public static List<Object> loadFieldValues(SearchContextHighlight.Field field,
MappedFieldType fieldType,
SearchContext searchContext,
FetchSubPhase.HitContext hitContext) throws IOException {
public static List<Object> loadFieldValues(MappedFieldType fieldType,
QueryShardContext context,
FetchSubPhase.HitContext hitContext,
boolean forceSource) throws IOException {
//percolator needs to always load from source, thus it sets the global force source to true
boolean forceSource = searchContext.highlight().forceSource(field);
List<Object> textsToHighlight;
if (!forceSource && fieldType.stored()) {
if (forceSource == false && fieldType.stored()) {
CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(singleton(fieldType.name()), false);
hitContext.reader().document(hitContext.docId(), fieldVisitor);
textsToHighlight = fieldVisitor.fields().get(fieldType.name());
@ -62,7 +61,7 @@ public final class HighlightUtils {
textsToHighlight = Collections.emptyList();
}
} else {
SourceLookup sourceLookup = searchContext.lookup().source();
SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument(hitContext.readerContext(), hitContext.docId());
textsToHighlight = sourceLookup.extractRawValues(fieldType.name());
}

View File

@ -20,28 +20,35 @@ package org.elasticsearch.search.fetch.subphase.highlight;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
public class HighlighterContext {
public final String fieldName;
public final SearchContextHighlight.Field field;
public final MappedFieldType fieldType;
public final SearchContext context;
public final SearchShardTarget shardTarget;
public final QueryShardContext context;
public final SearchContextHighlight highlight;
public final FetchSubPhase.HitContext hitContext;
public final Query query;
public HighlighterContext(String fieldName,
SearchContextHighlight.Field field,
MappedFieldType fieldType,
SearchContext context,
SearchShardTarget shardTarget,
QueryShardContext context,
SearchContextHighlight highlight,
FetchSubPhase.HitContext hitContext,
Query query) {
this.fieldName = fieldName;
this.field = field;
this.fieldType = fieldType;
this.shardTarget = shardTarget;
this.context = context;
this.highlight = highlight;
this.hitContext = hitContext;
this.query = query;
}

View File

@ -37,9 +37,9 @@ import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
@ -56,7 +56,7 @@ public class PlainHighlighter implements Highlighter {
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
SearchContextHighlight.Field field = highlighterContext.field;
SearchContext context = highlighterContext.context;
QueryShardContext context = highlighterContext.context;
FetchSubPhase.HitContext hitContext = highlighterContext.hitContext;
MappedFieldType fieldType = highlighterContext.fieldType;
@ -101,18 +101,19 @@ public class PlainHighlighter implements Highlighter {
int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments();
ArrayList<TextFragment> fragsList = new ArrayList<>();
List<Object> textsToHighlight;
Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer();
final int maxAnalyzedOffset = context.indexShard().indexSettings().getHighlightMaxAnalyzedOffset();
Analyzer analyzer = context.getMapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer();
final int maxAnalyzedOffset = context.getIndexSettings().getHighlightMaxAnalyzedOffset();
try {
textsToHighlight = HighlightUtils.loadFieldValues(field, fieldType, context, hitContext);
textsToHighlight = HighlightUtils.loadFieldValues(fieldType, context, hitContext,
highlighterContext.highlight.forceSource(field));
for (Object textToHighlight : textsToHighlight) {
String text = convertFieldValue(fieldType, textToHighlight);
if (text.length() > maxAnalyzedOffset) {
throw new IllegalArgumentException(
"The length of [" + highlighterContext.fieldName + "] field of [" + hitContext.hit().getId() +
"] doc of [" + context.indexShard().shardId().getIndexName() + "] index " +
"] doc of [" + context.index().getName() + "] index " +
"has exceeded [" + maxAnalyzedOffset + "] - maximum allowed to be analyzed for highlighting. " +
"This maximum can be set by changing the [" + IndexSettings.MAX_ANALYZED_OFFSET_SETTING.getKey() +
"] index level setting. " + "For large texts, indexing with offsets or term vectors, and highlighting " +
@ -139,7 +140,7 @@ public class PlainHighlighter implements Highlighter {
// the plain highlighter will parse the source and try to analyze it.
return null;
} else {
throw new FetchPhaseExecutionException(context.shardTarget(),
throw new FetchPhaseExecutionException(highlighterContext.shardTarget,
"Failed to highlight field [" + highlighterContext.fieldName + "]", e);
}
}
@ -179,7 +180,7 @@ public class PlainHighlighter implements Highlighter {
try {
end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, fieldType.name(), fieldContents);
} catch (Exception e) {
throw new FetchPhaseExecutionException(context.shardTarget(),
throw new FetchPhaseExecutionException(highlighterContext.shardTarget,
"Failed to highlight field [" + highlighterContext.fieldName + "]", e);
}
if (end > 0) {

View File

@ -27,7 +27,7 @@ import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
import org.apache.lucene.search.vectorhighlight.FieldFragList.WeightedFragInfo;
import org.apache.lucene.search.vectorhighlight.ScoreOrderFragmentsBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
@ -37,22 +37,22 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder
private final MappedFieldType fieldType;
private final SearchContext searchContext;
private final QueryShardContext context;
public SourceScoreOrderFragmentsBuilder(MappedFieldType fieldType,
SearchContext searchContext,
QueryShardContext context,
String[] preTags,
String[] postTags,
BoundaryScanner boundaryScanner) {
super(preTags, postTags, boundaryScanner);
this.fieldType = fieldType;
this.searchContext = searchContext;
this.context = context;
}
@Override
protected Field[] getFields(IndexReader reader, int docId, String fieldName) throws IOException {
// we know its low level reader, and matching docId, since that's how we call the highlighter with
SourceLookup sourceLookup = searchContext.lookup().source();
SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId);
List<Object> values = sourceLookup.extractRawValues(fieldType.name());

View File

@ -24,7 +24,7 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.vectorhighlight.BoundaryScanner;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SourceLookup;
import java.io.IOException;
@ -32,15 +32,15 @@ import java.util.List;
public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder {
private final SearchContext searchContext;
private final QueryShardContext context;
public SourceSimpleFragmentsBuilder(MappedFieldType fieldType,
SearchContext searchContext,
QueryShardContext context,
String[] preTags,
String[] postTags,
BoundaryScanner boundaryScanner) {
super(fieldType, preTags, postTags, boundaryScanner);
this.searchContext = searchContext;
this.context = context;
}
public static final Field[] EMPTY_FIELDS = new Field[0];
@ -48,7 +48,7 @@ public class SourceSimpleFragmentsBuilder extends SimpleFragmentsBuilder {
@Override
protected Field[] getFields(IndexReader reader, int docId, String fieldName) throws IOException {
// we know its low level reader, and matching docId, since that's how we call the highlighter with
SourceLookup sourceLookup = searchContext.lookup().source();
SourceLookup sourceLookup = context.lookup().source();
sourceLookup.setSegmentAndDocument((LeafReaderContext) reader.getContext(), docId);
List<Object> values = sourceLookup.extractRawValues(fieldType.name());

View File

@ -37,10 +37,10 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.text.BreakIterator;
@ -61,18 +61,19 @@ public class UnifiedHighlighter implements Highlighter {
public HighlightField highlight(HighlighterContext highlighterContext) {
MappedFieldType fieldType = highlighterContext.fieldType;
SearchContextHighlight.Field field = highlighterContext.field;
SearchContext context = highlighterContext.context;
QueryShardContext context = highlighterContext.context;
FetchSubPhase.HitContext hitContext = highlighterContext.hitContext;
Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT;
final int maxAnalyzedOffset = context.indexShard().indexSettings().getHighlightMaxAnalyzedOffset();
final int maxAnalyzedOffset = context.getIndexSettings().getHighlightMaxAnalyzedOffset();
List<Snippet> snippets = new ArrayList<>();
int numberOfFragments;
try {
final Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()),
final Analyzer analyzer = getAnalyzer(context.getMapperService().documentMapper(hitContext.hit().getType()),
hitContext);
List<Object> fieldValues = loadFieldValues(fieldType, field, context, hitContext);
List<Object> fieldValues = loadFieldValues(fieldType, field, context, hitContext,
highlighterContext.highlight.forceSource(field));
if (fieldValues.size() == 0) {
return null;
}
@ -84,7 +85,7 @@ public class UnifiedHighlighter implements Highlighter {
if ((offsetSource == OffsetSource.ANALYSIS) && (fieldValue.length() > maxAnalyzedOffset)) {
throw new IllegalArgumentException(
"The length of [" + highlighterContext.fieldName + "] field of [" + hitContext.hit().getId() +
"] doc of [" + context.indexShard().shardId().getIndexName() + "] index " + "has exceeded [" +
"] doc of [" + context.index().getName() + "] index " + "has exceeded [" +
maxAnalyzedOffset + "] - maximum allowed to be analyzed for highlighting. " +
"This maximum can be set by changing the [" + IndexSettings.MAX_ANALYZED_OFFSET_SETTING.getKey() +
"] index level setting. " + "For large texts, indexing with offsets or term vectors is recommended!");
@ -123,7 +124,7 @@ public class UnifiedHighlighter implements Highlighter {
}
}
} catch (IOException e) {
throw new FetchPhaseExecutionException(context.shardTarget(),
throw new FetchPhaseExecutionException(highlighterContext.shardTarget,
"Failed to highlight field [" + highlighterContext.fieldName + "]", e);
}
@ -153,10 +154,13 @@ public class UnifiedHighlighter implements Highlighter {
protected Analyzer getAnalyzer(DocumentMapper docMapper, HitContext hitContext) {
return docMapper.mappers().indexAnalyzer();
}
protected List<Object> loadFieldValues(MappedFieldType fieldType, SearchContextHighlight.Field field, SearchContext context,
FetchSubPhase.HitContext hitContext) throws IOException {
List<Object> fieldValues = HighlightUtils.loadFieldValues(field, fieldType, context, hitContext);
protected List<Object> loadFieldValues(MappedFieldType fieldType,
SearchContextHighlight.Field field,
QueryShardContext context,
FetchSubPhase.HitContext hitContext,
boolean forceSource) throws IOException {
List<Object> fieldValues = HighlightUtils.loadFieldValues(fieldType, context, hitContext, forceSource);
fieldValues = fieldValues.stream()
.map((s) -> convertFieldValue(fieldType, s))
.collect(Collectors.toList());

View File

@ -31,6 +31,7 @@ import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreContext;
import org.elasticsearch.search.sort.SortAndFormats;
@ -381,4 +382,9 @@ public class SubSearchContext extends FilteredSearchContext {
public void innerHits(Map<String, InnerHitContextBuilder> innerHits) {
this.innerHits = innerHits;
}
@Override
public SearchLookup lookup() {
return queryShardContext.lookup();
}
}