Minor clean-up around search highlight context. (#60422)

* Rename SearchContextHighlight -> SearchHighlightContext.
* Rename HighlighterContext to FieldHighlightContext.
* Make the search highlight context immutable.
* Avoid storing SearchHighlightContext on HighlighterContext.
This commit is contained in:
Julie Tibshirani 2020-07-29 11:39:17 -07:00 committed by GitHub
parent 85fdf959ad
commit 5359417ec3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 125 additions and 129 deletions

View File

@ -33,7 +33,7 @@ import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightPhase;
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -94,9 +94,8 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
for (Object matchedSlot : field.getValues()) {
int slot = (int) matchedSlot;
BytesReference document = percolateQuery.getDocuments().get(slot);
SearchContextHighlight highlight = new SearchContextHighlight(context.highlight().fields());
// Enforce highlighting by source, because MemoryIndex doesn't support stored fields.
highlight.globalForceSource(true);
SearchHighlightContext highlight = new SearchHighlightContext(context.highlight().fields(), true);
QueryShardContext shardContext = new QueryShardContext(context.getQueryShardContext());
shardContext.freezeContext();
shardContext.lookup().source().setSegmentAndDocument(percolatorLeafReaderContext, slot);

View File

@ -28,7 +28,7 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.RandomScoreFunction;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESTestCase;
import org.mockito.Mockito;
@ -49,7 +49,7 @@ public class PercolatorHighlightSubFetchPhaseTests extends ESTestCase {
new MatchAllDocsQuery(), Mockito.mock(IndexSearcher.class), null, new MatchAllDocsQuery());
PercolatorHighlightSubFetchPhase subFetchPhase = new PercolatorHighlightSubFetchPhase(emptyMap());
SearchContext searchContext = Mockito.mock(SearchContext.class);
Mockito.when(searchContext.highlight()).thenReturn(new SearchContextHighlight(Collections.emptyList()));
Mockito.when(searchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList()));
Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery());
assertThat(subFetchPhase.hitsExecutionNeeded(searchContext), is(false));

View File

@ -28,16 +28,16 @@ import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.Ann
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext.Field;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class AnnotatedTextHighlighter extends UnifiedHighlighter {
public static final String NAME = "annotated";
@Override
protected Analyzer getAnalyzer(DocumentMapper docMapper, HitContext hitContext) {
return new AnnotatedHighlighterAnalyzer(super.getAnalyzer(docMapper, hitContext), hitContext);
@ -68,7 +68,7 @@ public class AnnotatedTextHighlighter extends UnifiedHighlighter {
}
@Override
protected PassageFormatter getPassageFormatter(HitContext hitContext,SearchContextHighlight.Field field, Encoder encoder) {
protected PassageFormatter getPassageFormatter(HitContext hitContext, SearchHighlightContext.Field field, Encoder encoder) {
// Retrieve the annotations from the hitContext
AnnotatedText[] annotations = (AnnotatedText[]) hitContext.cache().get(AnnotatedText.class.getName());
return new AnnotatedPassageFormatter(annotations, encoder);

View File

@ -61,7 +61,7 @@ import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext;
import org.elasticsearch.search.fetch.subphase.FetchFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.ScrollContext;
import org.elasticsearch.search.internal.SearchContext;
@ -146,7 +146,7 @@ final class DefaultSearchContext extends SearchContext {
private int docsIdsToLoadFrom;
private int docsIdsToLoadSize;
private SearchContextAggregations aggregations;
private SearchContextHighlight highlight;
private SearchHighlightContext highlight;
private SuggestionSearchContext suggest;
private List<RescoreContext> rescore;
private volatile long keepAlive;
@ -395,12 +395,12 @@ final class DefaultSearchContext extends SearchContext {
}
@Override
public SearchContextHighlight highlight() {
public SearchHighlightContext highlight() {
return highlight;
}
@Override
public void highlight(SearchContextHighlight highlight) {
public void highlight(SearchHighlightContext highlight) {
this.highlight = highlight;
}

View File

@ -42,8 +42,8 @@ import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.FieldOptions;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext.Field;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext.FieldOptions;
import java.text.BreakIterator;
import java.util.Collections;
@ -69,14 +69,15 @@ public class FastVectorHighlighter implements Highlighter {
}
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
SearchContextHighlight.Field field = highlighterContext.field;
QueryShardContext context = highlighterContext.context;
FetchSubPhase.HitContext hitContext = highlighterContext.hitContext;
MappedFieldType fieldType = highlighterContext.fieldType;
public HighlightField highlight(FieldHighlightContext fieldContext) {
SearchHighlightContext.Field field = fieldContext.field;
QueryShardContext context = fieldContext.context;
FetchSubPhase.HitContext hitContext = fieldContext.hitContext;
MappedFieldType fieldType = fieldContext.fieldType;
boolean forceSource = fieldContext.forceSource;
if (canHighlight(fieldType) == false) {
throw new IllegalArgumentException("the field [" + highlighterContext.fieldName +
throw new IllegalArgumentException("the field [" + fieldContext.fieldName +
"] should be indexed with term vector with position offsets to be used with fast vector highlighter");
}
@ -96,7 +97,6 @@ public class FastVectorHighlighter implements Highlighter {
BaseFragmentsBuilder fragmentsBuilder;
final BoundaryScanner boundaryScanner = getBoundaryScanner(field);
boolean forceSource = highlighterContext.highlight.forceSource(field);
if (field.fieldOptions().numberOfFragments() == 0) {
fragListBuilder = new SingleFragListBuilder();
@ -136,14 +136,14 @@ public class FastVectorHighlighter implements Highlighter {
* we use top level reader to rewrite the query against all readers,
* with use caching it across hits (and across readers...)
*/
entry.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query,
entry.fieldMatchFieldQuery = new CustomFieldQuery(fieldContext.query,
hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch());
} else {
/**
* we use top level reader to rewrite the query against all readers,
* with use caching it across hits (and across readers...)
*/
entry.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query,
entry.noFieldMatchFieldQuery = new CustomFieldQuery(fieldContext.query,
hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch());
}
entry.fragListBuilder = fragListBuilder;
@ -186,10 +186,10 @@ public class FastVectorHighlighter implements Highlighter {
}
if (CollectionUtils.isEmpty(fragments) == false) {
return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments));
return new HighlightField(fieldContext.fieldName, Text.convertFromStringArray(fragments));
}
int noMatchSize = highlighterContext.field.fieldOptions().noMatchSize();
int noMatchSize = fieldContext.field.fieldOptions().noMatchSize();
if (noMatchSize > 0) {
// Essentially we just request that a fragment is built from 0 to noMatchSize using
// the normal fragmentsBuilder
@ -199,15 +199,15 @@ public class FastVectorHighlighter implements Highlighter {
fieldType.name(), fieldFragList, 1, field.fieldOptions().preTags(),
field.fieldOptions().postTags(), encoder);
if (CollectionUtils.isEmpty(fragments) == false) {
return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments));
return new HighlightField(fieldContext.fieldName, Text.convertFromStringArray(fragments));
}
}
return null;
} catch (Exception e) {
throw new FetchPhaseExecutionException(highlighterContext.shardTarget,
"Failed to highlight field [" + highlighterContext.fieldName + "]", e);
throw new FetchPhaseExecutionException(fieldContext.shardTarget,
"Failed to highlight field [" + fieldContext.fieldName + "]", e);
}
}

View File

@ -24,32 +24,32 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.fetch.FetchSubPhase;
public class HighlighterContext {
public class FieldHighlightContext {
public final String fieldName;
public final SearchContextHighlight.Field field;
public final SearchHighlightContext.Field field;
public final MappedFieldType fieldType;
public final SearchShardTarget shardTarget;
public final QueryShardContext context;
public final SearchContextHighlight highlight;
public final FetchSubPhase.HitContext hitContext;
public final Query query;
public final boolean forceSource;
public HighlighterContext(String fieldName,
SearchContextHighlight.Field field,
MappedFieldType fieldType,
SearchShardTarget shardTarget,
QueryShardContext context,
SearchContextHighlight highlight,
FetchSubPhase.HitContext hitContext,
Query query) {
public FieldHighlightContext(String fieldName,
SearchHighlightContext.Field field,
MappedFieldType fieldType,
SearchShardTarget shardTarget,
QueryShardContext context,
FetchSubPhase.HitContext hitContext,
Query query,
boolean forceSource) {
this.fieldName = fieldName;
this.field = field;
this.fieldType = fieldType;
this.shardTarget = shardTarget;
this.context = context;
this.highlight = highlight;
this.hitContext = hitContext;
this.query = query;
this.forceSource = forceSource;
}
}

View File

@ -33,7 +33,7 @@ import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.Rewriteable;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.FieldOptions;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext.FieldOptions;
import java.io.IOException;
import java.util.ArrayList;
@ -92,7 +92,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
/**
* a {@link FieldOptions} with default settings
*/
static final FieldOptions defaultOptions = new SearchContextHighlight.FieldOptions.Builder()
static final FieldOptions defaultOptions = new SearchHighlightContext.FieldOptions.Builder()
.preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED)
.highlightFilter(DEFAULT_HIGHLIGHT_FILTER).requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH)
.forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE)
@ -273,9 +273,9 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
return PARSER.apply(p, new HighlightBuilder());
}
public SearchContextHighlight build(QueryShardContext context) throws IOException {
public SearchHighlightContext build(QueryShardContext context) throws IOException {
// create template global options that are later merged with any partial field options
final SearchContextHighlight.FieldOptions.Builder globalOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder();
final SearchHighlightContext.FieldOptions.Builder globalOptionsBuilder = new SearchHighlightContext.FieldOptions.Builder();
globalOptionsBuilder.encoder(this.encoder);
transferOptions(this, globalOptionsBuilder, context);
@ -283,9 +283,9 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
globalOptionsBuilder.merge(defaultOptions);
// create field options
Collection<org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.Field> fieldOptions = new ArrayList<>();
Collection<SearchHighlightContext.Field> fieldOptions = new ArrayList<>();
for (Field field : this.fields) {
final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder();
final SearchHighlightContext.FieldOptions.Builder fieldOptionsBuilder = new SearchHighlightContext.FieldOptions.Builder();
fieldOptionsBuilder.fragmentOffset(field.fragmentOffset);
if (field.matchedFields != null) {
Set<String> matchedFields = new HashSet<>(field.matchedFields.length);
@ -293,10 +293,10 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
fieldOptionsBuilder.matchedFields(matchedFields);
}
transferOptions(field, fieldOptionsBuilder, context);
fieldOptions.add(new SearchContextHighlight.Field(field.name(), fieldOptionsBuilder
fieldOptions.add(new SearchHighlightContext.Field(field.name(), fieldOptionsBuilder
.merge(globalOptionsBuilder.build()).build()));
}
return new SearchContextHighlight(fieldOptions);
return new SearchHighlightContext(fieldOptions);
}
/**
@ -309,7 +309,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
private static void transferOptions(AbstractHighlighterBuilder highlighterBuilder,
SearchContextHighlight.FieldOptions.Builder targetOptionsBuilder, QueryShardContext context) throws IOException {
SearchHighlightContext.FieldOptions.Builder targetOptionsBuilder,
QueryShardContext context) throws IOException {
if (highlighterBuilder.preTags != null) {
targetOptionsBuilder.preTags(highlighterBuilder.preTags);
}

View File

@ -53,10 +53,10 @@ public class HighlightPhase implements FetchSubPhase {
public void hitExecute(SearchShardTarget shardTarget,
QueryShardContext context,
Query query,
SearchContextHighlight highlight,
SearchHighlightContext highlight,
HitContext hitContext) {
Map<String, HighlightField> highlightFields = new HashMap<>();
for (SearchContextHighlight.Field field : highlight.fields()) {
for (SearchHighlightContext.Field field : highlight.fields()) {
Collection<String> fieldNamesToHighlight;
if (Regex.isSimpleMatchPattern(field.field())) {
fieldNamesToHighlight = context.getMapperService().simpleMatchToFullName(field.field());
@ -107,14 +107,16 @@ public class HighlightPhase implements FetchSubPhase {
if (highlightQuery == null) {
highlightQuery = query;
}
HighlighterContext highlighterContext = new HighlighterContext(fieldType.name(),
field, fieldType, shardTarget, context, highlight, hitContext, highlightQuery);
boolean forceSource = highlight.forceSource(field);
FieldHighlightContext fieldContext = new FieldHighlightContext(fieldType.name(),
field, fieldType, shardTarget, context, hitContext, highlightQuery, forceSource);
if ((highlighter.canHighlight(fieldType) == false) && fieldNameContainsWildcards) {
// if several fieldnames matched the wildcard then we want to skip those that we cannot highlight
continue;
}
HighlightField highlightField = highlighter.highlight(highlighterContext);
HighlightField highlightField = highlighter.highlight(fieldContext);
if (highlightField != null) {
// Note that we make sure to use the original field name in the response. This is because the
// original field could be an alias, and highlighter implementations may instead reference the

View File

@ -25,7 +25,7 @@ import org.elasticsearch.index.mapper.MappedFieldType;
*/
public interface Highlighter {
HighlightField highlight(HighlighterContext highlighterContext);
HighlightField highlight(FieldHighlightContext fieldContext);
boolean canHighlight(MappedFieldType fieldType);
}

View File

@ -55,11 +55,11 @@ public class PlainHighlighter implements Highlighter {
private static final String CACHE_KEY = "highlight-plain";
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
SearchContextHighlight.Field field = highlighterContext.field;
QueryShardContext context = highlighterContext.context;
FetchSubPhase.HitContext hitContext = highlighterContext.hitContext;
MappedFieldType fieldType = highlighterContext.fieldType;
public HighlightField highlight(FieldHighlightContext fieldContext) {
SearchHighlightContext.Field field = fieldContext.field;
QueryShardContext context = fieldContext.context;
FetchSubPhase.HitContext hitContext = fieldContext.hitContext;
MappedFieldType fieldType = fieldContext.fieldType;
Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT;
@ -72,7 +72,7 @@ public class PlainHighlighter implements Highlighter {
org.apache.lucene.search.highlight.Highlighter entry = cache.get(fieldType);
if (entry == null) {
QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query,
QueryScorer queryScorer = new CustomQueryScorer(fieldContext.query,
field.fieldOptions().requireFieldMatch() ? fieldType.name() : null);
queryScorer.setExpandMultiTermQuery(true);
Fragmenter fragmenter;
@ -86,7 +86,7 @@ public class PlainHighlighter implements Highlighter {
fragmenter = new SimpleSpanFragmenter(queryScorer, field.fieldOptions().fragmentCharSize());
} else {
throw new IllegalArgumentException("unknown fragmenter option [" + field.fieldOptions().fragmenter()
+ "] for the field [" + highlighterContext.fieldName + "]");
+ "] for the field [" + fieldContext.fieldName + "]");
}
Formatter formatter = new SimpleHTMLFormatter(field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0]);
@ -106,14 +106,13 @@ public class PlainHighlighter implements Highlighter {
Integer keywordIgnoreAbove = null;
if (fieldType instanceof KeywordFieldMapper.KeywordFieldType) {
KeywordFieldMapper mapper = (KeywordFieldMapper) context.getMapperService().documentMapper()
.mappers().getMapper(highlighterContext.fieldName);
.mappers().getMapper(fieldContext.fieldName);
keywordIgnoreAbove = mapper.ignoreAbove();
};
final int maxAnalyzedOffset = context.getIndexSettings().getHighlightMaxAnalyzedOffset();
try {
textsToHighlight = HighlightUtils.loadFieldValues(fieldType, context, hitContext,
highlighterContext.highlight.forceSource(field));
textsToHighlight = HighlightUtils.loadFieldValues(fieldType, context, hitContext, fieldContext.forceSource);
for (Object textToHighlight : textsToHighlight) {
String text = convertFieldValue(fieldType, textToHighlight);
@ -123,7 +122,7 @@ public class PlainHighlighter implements Highlighter {
}
if (textLength > maxAnalyzedOffset) {
throw new IllegalArgumentException(
"The length of [" + highlighterContext.fieldName + "] field of [" + hitContext.hit().getId() +
"The length of [" + fieldContext.fieldName + "] field of [" + hitContext.hit().getId() +
"] doc of [" + context.index().getName() + "] index " +
"has exceeded [" + maxAnalyzedOffset + "] - maximum allowed to be analyzed for highlighting. " +
"This maximum can be set by changing the [" + IndexSettings.MAX_ANALYZED_OFFSET_SETTING.getKey() +
@ -151,8 +150,8 @@ public class PlainHighlighter implements Highlighter {
// the plain highlighter will parse the source and try to analyze it.
return null;
} else {
throw new FetchPhaseExecutionException(highlighterContext.shardTarget,
"Failed to highlight field [" + highlighterContext.fieldName + "]", e);
throw new FetchPhaseExecutionException(fieldContext.shardTarget,
"Failed to highlight field [" + fieldContext.fieldName + "]", e);
}
}
if (field.fieldOptions().scoreOrdered()) {
@ -180,10 +179,10 @@ public class PlainHighlighter implements Highlighter {
}
if (fragments.length > 0) {
return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments));
return new HighlightField(fieldContext.fieldName, Text.convertFromStringArray(fragments));
}
int noMatchSize = highlighterContext.field.fieldOptions().noMatchSize();
int noMatchSize = fieldContext.field.fieldOptions().noMatchSize();
if (noMatchSize > 0 && textsToHighlight.size() > 0) {
// Pull an excerpt from the beginning of the string but make sure to split the string on a term boundary.
String fieldContents = textsToHighlight.get(0).toString();
@ -191,11 +190,11 @@ public class PlainHighlighter implements Highlighter {
try {
end = findGoodEndForNoHighlightExcerpt(noMatchSize, analyzer, fieldType.name(), fieldContents);
} catch (Exception e) {
throw new FetchPhaseExecutionException(highlighterContext.shardTarget,
"Failed to highlight field [" + highlighterContext.fieldName + "]", e);
throw new FetchPhaseExecutionException(fieldContext.shardTarget,
"Failed to highlight field [" + fieldContext.fieldName + "]", e);
}
if (end > 0) {
return new HighlightField(highlighterContext.fieldName, new Text[] { new Text(fieldContents.substring(0, end)) });
return new HighlightField(fieldContext.fieldName, new Text[] { new Text(fieldContents.substring(0, end)) });
}
}
return null;

View File

@ -30,32 +30,28 @@ import java.util.Locale;
import java.util.Map;
import java.util.Set;
public class SearchContextHighlight {
public class SearchHighlightContext {
private final Map<String, Field> fields;
private final boolean globalForceSource;
private boolean globalForceSource = false;
public SearchHighlightContext(Collection<Field> fields) {
this(fields, false);
}
public SearchContextHighlight(Collection<Field> fields) {
public SearchHighlightContext(Collection<Field> fields, boolean globalForceSource) {
assert fields != null;
this.fields = new LinkedHashMap<String, Field>(fields.size());
this.fields = new LinkedHashMap<>(fields.size());
for (Field field : fields) {
this.fields.put(field.field, field);
}
this.globalForceSource = globalForceSource;
}
public Collection<Field> fields() {
return fields.values();
}
public void globalForceSource(boolean globalForceSource) {
this.globalForceSource = globalForceSource;
}
boolean globalForceSource() {
return this.globalForceSource;
}
public boolean forceSource(Field field) {
if (globalForceSource) {
return true;

View File

@ -59,17 +59,17 @@ public class UnifiedHighlighter implements Highlighter {
}
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
MappedFieldType fieldType = highlighterContext.fieldType;
SearchContextHighlight.Field field = highlighterContext.field;
QueryShardContext context = highlighterContext.context;
FetchSubPhase.HitContext hitContext = highlighterContext.hitContext;
public HighlightField highlight(FieldHighlightContext fieldContext) {
MappedFieldType fieldType = fieldContext.fieldType;
SearchHighlightContext.Field field = fieldContext.field;
QueryShardContext context = fieldContext.context;
FetchSubPhase.HitContext hitContext = fieldContext.hitContext;
Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT;
final int maxAnalyzedOffset = context.getIndexSettings().getHighlightMaxAnalyzedOffset();
Integer keywordIgnoreAbove = null;
if (fieldType instanceof KeywordFieldMapper.KeywordFieldType) {
KeywordFieldMapper mapper = (KeywordFieldMapper) context.getMapperService().documentMapper()
.mappers().getMapper(highlighterContext.fieldName);
.mappers().getMapper(fieldContext.fieldName);
keywordIgnoreAbove = mapper.ignoreAbove();
}
@ -77,9 +77,8 @@ public class UnifiedHighlighter implements Highlighter {
int numberOfFragments = field.fieldOptions().numberOfFragments();
try {
final Analyzer analyzer = getAnalyzer(context.getMapperService().documentMapper(hitContext.hit().getType()),
hitContext);
List<Object> fieldValues = loadFieldValues(fieldType, field, context, hitContext,
highlighterContext.highlight.forceSource(field));
hitContext);
List<Object> fieldValues = loadFieldValues(fieldType, field, context, hitContext, fieldContext.forceSource);
if (fieldValues.size() == 0) {
return null;
}
@ -94,7 +93,7 @@ public class UnifiedHighlighter implements Highlighter {
}
if ((offsetSource == OffsetSource.ANALYSIS) && (fieldValueLength > maxAnalyzedOffset)) {
throw new IllegalArgumentException(
"The length of [" + highlighterContext.fieldName + "] field of [" + hitContext.hit().getId() +
"The length of [" + fieldContext.fieldName + "] field of [" + hitContext.hit().getId() +
"] doc of [" + context.index().getName() + "] index " + "has exceeded [" +
maxAnalyzedOffset + "] - maximum allowed to be analyzed for highlighting. " +
"This maximum can be set by changing the [" + IndexSettings.MAX_ANALYZED_OFFSET_SETTING.getKey() +
@ -120,7 +119,7 @@ public class UnifiedHighlighter implements Highlighter {
}
if (field.fieldOptions().requireFieldMatch()) {
final String fieldName = highlighterContext.fieldName;
final String fieldName = fieldContext.fieldName;
highlighter.setFieldMatcher((name) -> fieldName.equals(name));
} else {
// ignore terms that targets the _id field since they use a different encoding
@ -128,16 +127,16 @@ public class UnifiedHighlighter implements Highlighter {
highlighter.setFieldMatcher(name -> IdFieldMapper.NAME.equals(name) == false);
}
Snippet[] fieldSnippets = highlighter.highlightField(highlighterContext.fieldName,
highlighterContext.query, hitContext.docId(), numberOfFragments);
Snippet[] fieldSnippets = highlighter.highlightField(fieldContext.fieldName,
fieldContext.query, hitContext.docId(), numberOfFragments);
for (Snippet fieldSnippet : fieldSnippets) {
if (Strings.hasText(fieldSnippet.getText())) {
snippets.add(fieldSnippet);
}
}
} catch (IOException e) {
throw new FetchPhaseExecutionException(highlighterContext.shardTarget,
"Failed to highlight field [" + highlighterContext.fieldName + "]", e);
throw new FetchPhaseExecutionException(fieldContext.shardTarget,
"Failed to highlight field [" + fieldContext.fieldName + "]", e);
}
if (field.fieldOptions().scoreOrdered()) {
@ -151,12 +150,12 @@ public class UnifiedHighlighter implements Highlighter {
}
if (fragments.length > 0) {
return new HighlightField(highlighterContext.fieldName, Text.convertFromStringArray(fragments));
return new HighlightField(fieldContext.fieldName, Text.convertFromStringArray(fragments));
}
return null;
}
protected PassageFormatter getPassageFormatter(HitContext hitContext, SearchContextHighlight.Field field, Encoder encoder) {
protected PassageFormatter getPassageFormatter(HitContext hitContext, SearchHighlightContext.Field field, Encoder encoder) {
CustomPassageFormatter passageFormatter = new CustomPassageFormatter(field.fieldOptions().preTags()[0],
field.fieldOptions().postTags()[0], encoder);
return passageFormatter;
@ -168,7 +167,7 @@ public class UnifiedHighlighter implements Highlighter {
}
protected List<Object> loadFieldValues(MappedFieldType fieldType,
SearchContextHighlight.Field field,
SearchHighlightContext.Field field,
QueryShardContext context,
FetchSubPhase.HitContext hitContext,
boolean forceSource) throws IOException {
@ -179,8 +178,8 @@ public class UnifiedHighlighter implements Highlighter {
return fieldValues;
}
protected BreakIterator getBreakIterator(SearchContextHighlight.Field field) {
final SearchContextHighlight.FieldOptions fieldOptions = field.fieldOptions();
protected BreakIterator getBreakIterator(SearchHighlightContext.Field field) {
final SearchHighlightContext.FieldOptions fieldOptions = field.fieldOptions();
final Locale locale =
fieldOptions.boundaryScannerLocale() != null ? fieldOptions.boundaryScannerLocale() :
Locale.ROOT;

View File

@ -46,7 +46,7 @@ import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
@ -166,12 +166,12 @@ public abstract class FilteredSearchContext extends SearchContext {
}
@Override
public SearchContextHighlight highlight() {
public SearchHighlightContext highlight() {
return in.highlight();
}
@Override
public void highlight(SearchContextHighlight highlight) {
public void highlight(SearchHighlightContext highlight) {
in.highlight(highlight);
}

View File

@ -54,7 +54,7 @@ import org.elasticsearch.search.fetch.subphase.FetchFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.profile.Profilers;
import org.elasticsearch.search.query.QuerySearchResult;
@ -161,9 +161,9 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
public abstract SearchExtBuilder getSearchExt(String name);
public abstract SearchContextHighlight highlight();
public abstract SearchHighlightContext highlight();
public abstract void highlight(SearchContextHighlight highlight);
public abstract void highlight(SearchHighlightContext highlight);
public InnerHitsContext innerHits() {
if (innerHitsContext == null) {

View File

@ -29,7 +29,7 @@ import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext;
import org.elasticsearch.search.fetch.subphase.FetchFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreContext;
import org.elasticsearch.search.sort.SortAndFormats;
@ -61,7 +61,7 @@ public class SubSearchContext extends FilteredSearchContext {
private FetchSourceContext fetchSourceContext;
private FetchDocValuesContext docValuesContext;
private FetchFieldsContext fetchFieldsContext;
private SearchContextHighlight highlight;
private SearchHighlightContext highlight;
private boolean explain;
private boolean trackScores;
@ -98,12 +98,12 @@ public class SubSearchContext extends FilteredSearchContext {
}
@Override
public SearchContextHighlight highlight() {
public SearchHighlightContext highlight() {
return highlight;
}
@Override
public void highlight(SearchContextHighlight highlight) {
public void highlight(SearchHighlightContext highlight) {
this.highlight = highlight;
}

View File

@ -32,13 +32,13 @@ import java.util.Map;
public class CustomHighlighter implements Highlighter {
@Override
public HighlightField highlight(HighlighterContext highlighterContext) {
SearchContextHighlight.Field field = highlighterContext.field;
CacheEntry cacheEntry = (CacheEntry) highlighterContext.hitContext.cache().get("test-custom");
final int docId = highlighterContext.hitContext.readerContext().docBase + highlighterContext.hitContext.docId();
public HighlightField highlight(FieldHighlightContext fieldContext) {
SearchHighlightContext.Field field = fieldContext.field;
CacheEntry cacheEntry = (CacheEntry) fieldContext.hitContext.cache().get("test-custom");
final int docId = fieldContext.hitContext.readerContext().docBase + fieldContext.hitContext.docId();
if (cacheEntry == null) {
cacheEntry = new CacheEntry();
highlighterContext.hitContext.cache().put("test-custom", cacheEntry);
fieldContext.hitContext.cache().put("test-custom", cacheEntry);
cacheEntry.docId = docId;
cacheEntry.position = 1;
} else {
@ -60,7 +60,7 @@ public class CustomHighlighter implements Highlighter {
}
}
return new HighlightField(highlighterContext.fieldName, responses.toArray(new Text[]{}));
return new HighlightField(fieldContext.fieldName, responses.toArray(new Text[]{}));
}
@Override

View File

@ -52,7 +52,7 @@ import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.BoundaryScannerType;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.Field;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.Order;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight.FieldOptions;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext.FieldOptions;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import org.junit.AfterClass;
@ -269,7 +269,7 @@ public class HighlightBuilderTests extends ESTestCase {
}
/**
* test that build() outputs a {@link SearchContextHighlight} that is has similar parameters
* test that build() outputs a {@link SearchHighlightContext} that is has similar parameters
* than what we have in the random {@link HighlightBuilder}
*/
public void testBuildSearchContextHighlight() throws IOException {
@ -292,8 +292,8 @@ public class HighlightBuilderTests extends ESTestCase {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
HighlightBuilder highlightBuilder = randomHighlighterBuilder();
highlightBuilder = Rewriteable.rewrite(highlightBuilder, mockShardContext);
SearchContextHighlight highlight = highlightBuilder.build(mockShardContext);
for (SearchContextHighlight.Field field : highlight.fields()) {
SearchHighlightContext highlight = highlightBuilder.build(mockShardContext);
for (SearchHighlightContext.Field field : highlight.fields()) {
String encoder = highlightBuilder.encoder() != null ? highlightBuilder.encoder() : HighlightBuilder.DEFAULT_ENCODER;
assertEquals(encoder, field.fieldOptions().encoder());
final Field fieldBuilder = getFieldBuilderByName(highlightBuilder, field.field());

View File

@ -49,7 +49,7 @@ import org.elasticsearch.search.fetch.subphase.FetchDocValuesContext;
import org.elasticsearch.search.fetch.subphase.FetchFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.internal.ContextIndexSearcher;
import org.elasticsearch.search.internal.ScrollContext;
import org.elasticsearch.search.internal.SearchContext;
@ -207,12 +207,12 @@ public class TestSearchContext extends SearchContext {
}
@Override
public SearchContextHighlight highlight() {
public SearchHighlightContext highlight() {
return null;
}
@Override
public void highlight(SearchContextHighlight highlight) {
public void highlight(SearchHighlightContext highlight) {
}
@Override