parent
4b69846ba2
commit
2f4b759df7
|
@ -129,7 +129,7 @@ public class DocumentFieldMappers implements Iterable<FieldMapper> {
|
|||
if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) {
|
||||
fields.add(fieldMapper.names().indexName());
|
||||
} else if (Regex.simpleMatch(pattern, fieldMapper.names().indexName())) {
|
||||
fields.add(fieldMapper.names().name());
|
||||
fields.add(fieldMapper.names().indexName());
|
||||
} else if (Regex.simpleMatch(pattern, fieldMapper.names().name())) {
|
||||
fields.add(fieldMapper.names().indexName());
|
||||
}
|
||||
|
@ -137,6 +137,20 @@ public class DocumentFieldMappers implements Iterable<FieldMapper> {
|
|||
return fields;
|
||||
}
|
||||
|
||||
public Set<String> simpleMatchToFullName(String pattern) {
|
||||
Set<String> fields = Sets.newHashSet();
|
||||
for (FieldMapper fieldMapper : fieldMappers) {
|
||||
if (Regex.simpleMatch(pattern, fieldMapper.names().fullName())) {
|
||||
fields.add(fieldMapper.names().fullName());
|
||||
} else if (Regex.simpleMatch(pattern, fieldMapper.names().indexName())) {
|
||||
fields.add(fieldMapper.names().fullName());
|
||||
} else if (Regex.simpleMatch(pattern, fieldMapper.names().name())) {
|
||||
fields.add(fieldMapper.names().fullName());
|
||||
}
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to find first based on {@link #fullName(String)}, then by {@link #indexName(String)}, and last
|
||||
* by {@link #name(String)}.
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.common.component.AbstractComponent;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FastStringReader;
|
||||
import org.elasticsearch.common.lucene.search.vectorhighlight.SimpleBoundaryScanner2;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.StringText;
|
||||
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
|
||||
|
@ -109,213 +110,224 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
|||
} else {
|
||||
encoder = Encoders.DEFAULT;
|
||||
}
|
||||
FieldMapper mapper = documentMapper.mappers().smartNameFieldMapper(field.field());
|
||||
if (mapper == null) {
|
||||
MapperService.SmartNameFieldMappers fullMapper = context.mapperService().smartName(field.field());
|
||||
if (fullMapper == null || !fullMapper.hasDocMapper()) {
|
||||
//Save skipping missing fields
|
||||
continue;
|
||||
}
|
||||
if (!fullMapper.docMapper().type().equals(hitContext.hit().type())) {
|
||||
continue;
|
||||
}
|
||||
mapper = fullMapper.mapper();
|
||||
|
||||
Set<String> fieldNamesToHighlight;
|
||||
if (Regex.isSimpleMatchPattern(field.field())) {
|
||||
fieldNamesToHighlight = documentMapper.mappers().simpleMatchToFullName(field.field());
|
||||
} else {
|
||||
fieldNamesToHighlight = ImmutableSet.of(field.field());
|
||||
}
|
||||
|
||||
for (String fieldName : fieldNamesToHighlight) {
|
||||
|
||||
FieldMapper mapper = documentMapper.mappers().smartNameFieldMapper(fieldName);
|
||||
if (mapper == null) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
boolean useFastVectorHighlighter;
|
||||
if (field.highlighterType() == null) {
|
||||
// if we can do highlighting using Term Vectors, use FastVectorHighlighter, otherwise, use the
|
||||
// slower plain highlighter
|
||||
useFastVectorHighlighter = mapper.fieldType().storeTermVectors() && mapper.fieldType().storeTermVectorOffsets() && mapper.fieldType().storeTermVectorPositions();
|
||||
} else if (field.highlighterType().equals("fast-vector-highlighter") || field.highlighterType().equals("fvh")) {
|
||||
if (!(mapper.fieldType().storeTermVectors() && mapper.fieldType().storeTermVectorOffsets() && mapper.fieldType().storeTermVectorPositions())) {
|
||||
throw new ElasticSearchIllegalArgumentException("the field [" + field.field() + "] should be indexed with term vector with position offsets to be used with fast vector highlighter");
|
||||
}
|
||||
useFastVectorHighlighter = true;
|
||||
} else if (field.highlighterType().equals("highlighter") || field.highlighterType().equals("plain")) {
|
||||
useFastVectorHighlighter = false;
|
||||
} else {
|
||||
throw new ElasticSearchIllegalArgumentException("unknown highlighter type [" + field.highlighterType() + "] for the field [" + field.field() + "]");
|
||||
}
|
||||
if (!useFastVectorHighlighter) {
|
||||
MapperHighlightEntry entry = cache.mappers.get(mapper);
|
||||
if (entry == null) {
|
||||
// Don't use the context.query() since it might be rewritten, and we need to pass the non rewritten queries to
|
||||
// let the highlighter handle MultiTerm ones
|
||||
|
||||
Query query = context.parsedQuery().query();
|
||||
QueryScorer queryScorer = new CustomQueryScorer(query, field.requireFieldMatch() ? mapper.names().indexName() : null);
|
||||
queryScorer.setExpandMultiTermQuery(true);
|
||||
Fragmenter fragmenter;
|
||||
if (field.numberOfFragments() == 0) {
|
||||
fragmenter = new NullFragmenter();
|
||||
} else if (field.fragmenter() == null) {
|
||||
fragmenter = new SimpleSpanFragmenter(queryScorer, field.fragmentCharSize());
|
||||
} else if ("simple".equals(field.fragmenter())) {
|
||||
fragmenter = new SimpleFragmenter(field.fragmentCharSize());
|
||||
} else if ("span".equals(field.fragmenter())) {
|
||||
fragmenter = new SimpleSpanFragmenter(queryScorer, field.fragmentCharSize());
|
||||
} else {
|
||||
throw new ElasticSearchIllegalArgumentException("unknown fragmenter option [" + field.fragmenter() + "] for the field [" + field.field() + "]");
|
||||
MapperService.SmartNameFieldMappers fullMapper = context.mapperService().smartName(fieldName);
|
||||
if (fullMapper == null || !fullMapper.hasDocMapper()) {
|
||||
//Save skipping missing fields
|
||||
continue;
|
||||
}
|
||||
Formatter formatter = new SimpleHTMLFormatter(field.preTags()[0], field.postTags()[0]);
|
||||
|
||||
|
||||
entry = new MapperHighlightEntry();
|
||||
entry.highlighter = new Highlighter(formatter, encoder, queryScorer);
|
||||
entry.highlighter.setTextFragmenter(fragmenter);
|
||||
// always highlight across all data
|
||||
entry.highlighter.setMaxDocCharsToAnalyze(Integer.MAX_VALUE);
|
||||
|
||||
cache.mappers.put(mapper, entry);
|
||||
}
|
||||
|
||||
List<Object> textsToHighlight;
|
||||
if (mapper.fieldType().stored()) {
|
||||
try {
|
||||
CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(ImmutableSet.of(mapper.names().indexName()), false);
|
||||
hitContext.reader().document(hitContext.docId(), fieldVisitor);
|
||||
textsToHighlight = fieldVisitor.fields().get(mapper.names().indexName());
|
||||
} catch (Exception e) {
|
||||
throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + field.field() + "]", e);
|
||||
if (!fullMapper.docMapper().type().equals(hitContext.hit().type())) {
|
||||
continue;
|
||||
}
|
||||
mapper = fullMapper.mapper();
|
||||
if (mapper == null) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
boolean useFastVectorHighlighter;
|
||||
if (field.highlighterType() == null) {
|
||||
// if we can do highlighting using Term Vectors, use FastVectorHighlighter, otherwise, use the
|
||||
// slower plain highlighter
|
||||
useFastVectorHighlighter = mapper.fieldType().storeTermVectors() && mapper.fieldType().storeTermVectorOffsets() && mapper.fieldType().storeTermVectorPositions();
|
||||
} else if (field.highlighterType().equals("fast-vector-highlighter") || field.highlighterType().equals("fvh")) {
|
||||
if (!(mapper.fieldType().storeTermVectors() && mapper.fieldType().storeTermVectorOffsets() && mapper.fieldType().storeTermVectorPositions())) {
|
||||
throw new ElasticSearchIllegalArgumentException("the field [" + fieldName + "] should be indexed with term vector with position offsets to be used with fast vector highlighter");
|
||||
}
|
||||
useFastVectorHighlighter = true;
|
||||
} else if (field.highlighterType().equals("highlighter") || field.highlighterType().equals("plain")) {
|
||||
useFastVectorHighlighter = false;
|
||||
} else {
|
||||
SearchLookup lookup = context.lookup();
|
||||
lookup.setNextReader(hitContext.readerContext());
|
||||
lookup.setNextDocId(hitContext.docId());
|
||||
textsToHighlight = lookup.source().extractRawValues(mapper.names().sourcePath());
|
||||
throw new ElasticSearchIllegalArgumentException("unknown highlighter type [" + field.highlighterType() + "] for the field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
// a HACK to make highlighter do highlighting, even though its using the single frag list builder
|
||||
int numberOfFragments = field.numberOfFragments() == 0 ? 1 : field.numberOfFragments();
|
||||
ArrayList<TextFragment> fragsList = new ArrayList<TextFragment>();
|
||||
try {
|
||||
for (Object textToHighlight : textsToHighlight) {
|
||||
String text = textToHighlight.toString();
|
||||
Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().type()).mappers().indexAnalyzer();
|
||||
TokenStream tokenStream = analyzer.tokenStream(mapper.names().indexName(), new FastStringReader(text));
|
||||
TextFragment[] bestTextFragments = entry.highlighter.getBestTextFragments(tokenStream, text, false, numberOfFragments);
|
||||
for (TextFragment bestTextFragment : bestTextFragments) {
|
||||
if (bestTextFragment != null && bestTextFragment.getScore() > 0) {
|
||||
fragsList.add(bestTextFragment);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + field.field() + "]", e);
|
||||
}
|
||||
if (field.scoreOrdered()) {
|
||||
Collections.sort(fragsList, new Comparator<TextFragment>() {
|
||||
public int compare(TextFragment o1, TextFragment o2) {
|
||||
return Math.round(o2.getScore() - o1.getScore());
|
||||
}
|
||||
});
|
||||
}
|
||||
String[] fragments = null;
|
||||
// number_of_fragments is set to 0 but we have a multivalued field
|
||||
if (field.numberOfFragments() == 0 && textsToHighlight.size() > 1 && fragsList.size() > 0) {
|
||||
fragments = new String[fragsList.size()];
|
||||
for (int i = 0; i < fragsList.size(); i++) {
|
||||
fragments[i] = fragsList.get(i).toString();
|
||||
}
|
||||
} else {
|
||||
// refine numberOfFragments if needed
|
||||
numberOfFragments = fragsList.size() < numberOfFragments ? fragsList.size() : numberOfFragments;
|
||||
fragments = new String[numberOfFragments];
|
||||
for (int i = 0; i < fragments.length; i++) {
|
||||
fragments[i] = fragsList.get(i).toString();
|
||||
}
|
||||
}
|
||||
|
||||
if (fragments != null && fragments.length > 0) {
|
||||
HighlightField highlightField = new HighlightField(field.field(), StringText.convertFromStringArray(fragments));
|
||||
highlightFields.put(highlightField.name(), highlightField);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
if (!useFastVectorHighlighter) {
|
||||
MapperHighlightEntry entry = cache.mappers.get(mapper);
|
||||
FieldQuery fieldQuery = null;
|
||||
if (entry == null) {
|
||||
FragListBuilder fragListBuilder;
|
||||
AbstractFragmentsBuilder fragmentsBuilder;
|
||||
|
||||
BoundaryScanner boundaryScanner = SimpleBoundaryScanner2.DEFAULT;
|
||||
if (field.boundaryMaxScan() != SimpleBoundaryScanner2.DEFAULT_MAX_SCAN || field.boundaryChars() != SimpleBoundaryScanner2.DEFAULT_BOUNDARY_CHARS) {
|
||||
boundaryScanner = new SimpleBoundaryScanner2(field.boundaryMaxScan(), field.boundaryChars());
|
||||
}
|
||||
// Don't use the context.query() since it might be rewritten, and we need to pass the non rewritten queries to
|
||||
// let the highlighter handle MultiTerm ones
|
||||
|
||||
Query query = context.parsedQuery().query();
|
||||
QueryScorer queryScorer = new CustomQueryScorer(query, field.requireFieldMatch() ? mapper.names().indexName() : null);
|
||||
queryScorer.setExpandMultiTermQuery(true);
|
||||
Fragmenter fragmenter;
|
||||
if (field.numberOfFragments() == 0) {
|
||||
fragListBuilder = new SingleFragListBuilder();
|
||||
|
||||
if (mapper.fieldType().stored()) {
|
||||
fragmentsBuilder = new XSimpleFragmentsBuilder(field.preTags(), field.postTags(), boundaryScanner);
|
||||
} else {
|
||||
fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, field.preTags(), field.postTags(), boundaryScanner);
|
||||
}
|
||||
fragmenter = new NullFragmenter();
|
||||
} else if (field.fragmenter() == null) {
|
||||
fragmenter = new SimpleSpanFragmenter(queryScorer, field.fragmentCharSize());
|
||||
} else if ("simple".equals(field.fragmenter())) {
|
||||
fragmenter = new SimpleFragmenter(field.fragmentCharSize());
|
||||
} else if ("span".equals(field.fragmenter())) {
|
||||
fragmenter = new SimpleSpanFragmenter(queryScorer, field.fragmentCharSize());
|
||||
} else {
|
||||
if (field.fragmentOffset() == -1)
|
||||
fragListBuilder = new SimpleFragListBuilder();
|
||||
else
|
||||
fragListBuilder = new SimpleFragListBuilder(field.fragmentOffset());
|
||||
throw new ElasticSearchIllegalArgumentException("unknown fragmenter option [" + field.fragmenter() + "] for the field [" + fieldName + "]");
|
||||
}
|
||||
Formatter formatter = new SimpleHTMLFormatter(field.preTags()[0], field.postTags()[0]);
|
||||
|
||||
if (field.scoreOrdered()) {
|
||||
if (mapper.fieldType().stored()) {
|
||||
fragmentsBuilder = new XScoreOrderFragmentsBuilder(field.preTags(), field.postTags(), boundaryScanner);
|
||||
} else {
|
||||
fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(mapper, context, field.preTags(), field.postTags(), boundaryScanner);
|
||||
|
||||
entry = new MapperHighlightEntry();
|
||||
entry.highlighter = new Highlighter(formatter, encoder, queryScorer);
|
||||
entry.highlighter.setTextFragmenter(fragmenter);
|
||||
// always highlight across all data
|
||||
entry.highlighter.setMaxDocCharsToAnalyze(Integer.MAX_VALUE);
|
||||
|
||||
cache.mappers.put(mapper, entry);
|
||||
}
|
||||
|
||||
List<Object> textsToHighlight;
|
||||
if (mapper.fieldType().stored()) {
|
||||
try {
|
||||
CustomFieldsVisitor fieldVisitor = new CustomFieldsVisitor(ImmutableSet.of(mapper.names().indexName()), false);
|
||||
hitContext.reader().document(hitContext.docId(), fieldVisitor);
|
||||
textsToHighlight = fieldVisitor.fields().get(mapper.names().indexName());
|
||||
} catch (Exception e) {
|
||||
throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + fieldName + "]", e);
|
||||
}
|
||||
} else {
|
||||
SearchLookup lookup = context.lookup();
|
||||
lookup.setNextReader(hitContext.readerContext());
|
||||
lookup.setNextDocId(hitContext.docId());
|
||||
textsToHighlight = lookup.source().extractRawValues(mapper.names().sourcePath());
|
||||
}
|
||||
|
||||
// a HACK to make highlighter do highlighting, even though its using the single frag list builder
|
||||
int numberOfFragments = field.numberOfFragments() == 0 ? 1 : field.numberOfFragments();
|
||||
ArrayList<TextFragment> fragsList = new ArrayList<TextFragment>();
|
||||
try {
|
||||
for (Object textToHighlight : textsToHighlight) {
|
||||
String text = textToHighlight.toString();
|
||||
Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().type()).mappers().indexAnalyzer();
|
||||
TokenStream tokenStream = analyzer.tokenStream(mapper.names().indexName(), new FastStringReader(text));
|
||||
TextFragment[] bestTextFragments = entry.highlighter.getBestTextFragments(tokenStream, text, false, numberOfFragments);
|
||||
for (TextFragment bestTextFragment : bestTextFragments) {
|
||||
if (bestTextFragment != null && bestTextFragment.getScore() > 0) {
|
||||
fragsList.add(bestTextFragment);
|
||||
}
|
||||
} else {
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + fieldName + "]", e);
|
||||
}
|
||||
if (field.scoreOrdered()) {
|
||||
Collections.sort(fragsList, new Comparator<TextFragment>() {
|
||||
public int compare(TextFragment o1, TextFragment o2) {
|
||||
return Math.round(o2.getScore() - o1.getScore());
|
||||
}
|
||||
});
|
||||
}
|
||||
String[] fragments = null;
|
||||
// number_of_fragments is set to 0 but we have a multivalued field
|
||||
if (field.numberOfFragments() == 0 && textsToHighlight.size() > 1 && fragsList.size() > 0) {
|
||||
fragments = new String[fragsList.size()];
|
||||
for (int i = 0; i < fragsList.size(); i++) {
|
||||
fragments[i] = fragsList.get(i).toString();
|
||||
}
|
||||
} else {
|
||||
// refine numberOfFragments if needed
|
||||
numberOfFragments = fragsList.size() < numberOfFragments ? fragsList.size() : numberOfFragments;
|
||||
fragments = new String[numberOfFragments];
|
||||
for (int i = 0; i < fragments.length; i++) {
|
||||
fragments[i] = fragsList.get(i).toString();
|
||||
}
|
||||
}
|
||||
|
||||
if (fragments != null && fragments.length > 0) {
|
||||
HighlightField highlightField = new HighlightField(fieldName, StringText.convertFromStringArray(fragments));
|
||||
highlightFields.put(highlightField.name(), highlightField);
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
MapperHighlightEntry entry = cache.mappers.get(mapper);
|
||||
FieldQuery fieldQuery = null;
|
||||
if (entry == null) {
|
||||
FragListBuilder fragListBuilder;
|
||||
AbstractFragmentsBuilder fragmentsBuilder;
|
||||
|
||||
BoundaryScanner boundaryScanner = SimpleBoundaryScanner2.DEFAULT;
|
||||
if (field.boundaryMaxScan() != SimpleBoundaryScanner2.DEFAULT_MAX_SCAN || field.boundaryChars() != SimpleBoundaryScanner2.DEFAULT_BOUNDARY_CHARS) {
|
||||
boundaryScanner = new SimpleBoundaryScanner2(field.boundaryMaxScan(), field.boundaryChars());
|
||||
}
|
||||
|
||||
if (field.numberOfFragments() == 0) {
|
||||
fragListBuilder = new SingleFragListBuilder();
|
||||
|
||||
if (mapper.fieldType().stored()) {
|
||||
fragmentsBuilder = new XSimpleFragmentsBuilder(field.preTags(), field.postTags(), boundaryScanner);
|
||||
} else {
|
||||
fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, field.preTags(), field.postTags(), boundaryScanner);
|
||||
}
|
||||
} else {
|
||||
if (field.fragmentOffset() == -1)
|
||||
fragListBuilder = new SimpleFragListBuilder();
|
||||
else
|
||||
fragListBuilder = new SimpleFragListBuilder(field.fragmentOffset());
|
||||
|
||||
if (field.scoreOrdered()) {
|
||||
if (mapper.fieldType().stored()) {
|
||||
fragmentsBuilder = new XScoreOrderFragmentsBuilder(field.preTags(), field.postTags(), boundaryScanner);
|
||||
} else {
|
||||
fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(mapper, context, field.preTags(), field.postTags(), boundaryScanner);
|
||||
}
|
||||
} else {
|
||||
if (mapper.fieldType().stored()) {
|
||||
fragmentsBuilder = new XSimpleFragmentsBuilder(field.preTags(), field.postTags(), boundaryScanner);
|
||||
} else {
|
||||
fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, field.preTags(), field.postTags(), boundaryScanner);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
fragmentsBuilder.setDiscreteMultiValueHighlighting(termVectorMultiValue);
|
||||
entry = new MapperHighlightEntry();
|
||||
entry.fragListBuilder = fragListBuilder;
|
||||
entry.fragmentsBuilder = fragmentsBuilder;
|
||||
if (cache.fvh == null) {
|
||||
// parameters to FVH are not requires since:
|
||||
// first two booleans are not relevant since they are set on the CustomFieldQuery (phrase and fieldMatch)
|
||||
// fragment builders are used explicitly
|
||||
cache.fvh = new FastVectorHighlighter();
|
||||
}
|
||||
CustomFieldQuery.highlightFilters.set(field.highlightFilter());
|
||||
if (field.requireFieldMatch()) {
|
||||
if (cache.fieldMatchFieldQuery == null) {
|
||||
// we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...)
|
||||
cache.fieldMatchFieldQuery = new CustomFieldQuery(context.parsedQuery().query(), hitContext.topLevelReader(), true, field.requireFieldMatch());
|
||||
fragmentsBuilder.setDiscreteMultiValueHighlighting(termVectorMultiValue);
|
||||
entry = new MapperHighlightEntry();
|
||||
entry.fragListBuilder = fragListBuilder;
|
||||
entry.fragmentsBuilder = fragmentsBuilder;
|
||||
if (cache.fvh == null) {
|
||||
// parameters to FVH are not requires since:
|
||||
// first two booleans are not relevant since they are set on the CustomFieldQuery (phrase and fieldMatch)
|
||||
// fragment builders are used explicitly
|
||||
cache.fvh = new FastVectorHighlighter();
|
||||
}
|
||||
fieldQuery = cache.fieldMatchFieldQuery;
|
||||
} else {
|
||||
if (cache.noFieldMatchFieldQuery == null) {
|
||||
// we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...)
|
||||
cache.noFieldMatchFieldQuery = new CustomFieldQuery(context.parsedQuery().query(), hitContext.topLevelReader(), true, field.requireFieldMatch());
|
||||
CustomFieldQuery.highlightFilters.set(field.highlightFilter());
|
||||
if (field.requireFieldMatch()) {
|
||||
if (cache.fieldMatchFieldQuery == null) {
|
||||
// we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...)
|
||||
cache.fieldMatchFieldQuery = new CustomFieldQuery(context.parsedQuery().query(), hitContext.topLevelReader(), true, field.requireFieldMatch());
|
||||
}
|
||||
fieldQuery = cache.fieldMatchFieldQuery;
|
||||
} else {
|
||||
if (cache.noFieldMatchFieldQuery == null) {
|
||||
// we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...)
|
||||
cache.noFieldMatchFieldQuery = new CustomFieldQuery(context.parsedQuery().query(), hitContext.topLevelReader(), true, field.requireFieldMatch());
|
||||
}
|
||||
fieldQuery = cache.noFieldMatchFieldQuery;
|
||||
}
|
||||
fieldQuery = cache.noFieldMatchFieldQuery;
|
||||
cache.mappers.put(mapper, entry);
|
||||
}
|
||||
cache.mappers.put(mapper, entry);
|
||||
|
||||
String[] fragments;
|
||||
|
||||
// a HACK to make highlighter do highlighting, even though its using the single frag list builder
|
||||
int numberOfFragments = field.numberOfFragments() == 0 ? Integer.MAX_VALUE : field.numberOfFragments();
|
||||
int fragmentCharSize = field.numberOfFragments() == 0 ? Integer.MAX_VALUE : field.fragmentCharSize();
|
||||
// we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible
|
||||
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.names().indexName(), fragmentCharSize, numberOfFragments,
|
||||
entry.fragListBuilder, entry.fragmentsBuilder, field.preTags(), field.postTags(), encoder);
|
||||
|
||||
if (fragments != null && fragments.length > 0) {
|
||||
HighlightField highlightField = new HighlightField(fieldName, StringText.convertFromStringArray(fragments));
|
||||
highlightFields.put(highlightField.name(), highlightField);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + fieldName + "]", e);
|
||||
}
|
||||
|
||||
String[] fragments;
|
||||
|
||||
// a HACK to make highlighter do highlighting, even though its using the single frag list builder
|
||||
int numberOfFragments = field.numberOfFragments() == 0 ? Integer.MAX_VALUE : field.numberOfFragments();
|
||||
int fragmentCharSize = field.numberOfFragments() == 0 ? Integer.MAX_VALUE : field.fragmentCharSize();
|
||||
// we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible
|
||||
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.names().indexName(), fragmentCharSize, numberOfFragments,
|
||||
entry.fragListBuilder, entry.fragmentsBuilder, field.preTags(), field.postTags(), encoder);
|
||||
|
||||
if (fragments != null && fragments.length > 0) {
|
||||
HighlightField highlightField = new HighlightField(field.field(), StringText.convertFromStringArray(fragments));
|
||||
highlightFields.put(highlightField.name(), highlightField);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new FetchPhaseExecutionException(context, "Failed to highlight field [" + field.field() + "]", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -257,6 +257,30 @@ public class HighlighterSearchTests extends AbstractNodesTests {
|
|||
assertThat(search.hits().hits()[0].highlightFields().get("titleTV").fragments()[1].string(), equalTo("<em>highlight</em> other text"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHighlightingOnWildcardFields() throws Exception {
|
||||
client.admin().indices().prepareDelete().execute().actionGet();
|
||||
client.admin().indices().prepareCreate("test").execute().actionGet();
|
||||
client.admin().cluster().prepareHealth("test").setWaitForGreenStatus().execute().actionGet();
|
||||
|
||||
client.prepareIndex("test", "type1")
|
||||
.setSource("field1", "this is a test", "field2", "this is another test")
|
||||
.setRefresh(true).execute().actionGet();
|
||||
|
||||
logger.info("--> highlighting and searching on field*");
|
||||
SearchSourceBuilder source = searchSource()
|
||||
.query(termQuery("field1", "test"))
|
||||
.from(0).size(60).explain(true)
|
||||
.highlight(highlight().field("field*").order("score").preTags("<xxx>").postTags("</xxx>"));
|
||||
|
||||
SearchResponse searchResponse = client.search(searchRequest("test").source(source).searchType(QUERY_THEN_FETCH).scroll(timeValueMinutes(10))).actionGet();
|
||||
assertThat("Failures " + Arrays.toString(searchResponse.shardFailures()), searchResponse.shardFailures().length, equalTo(0));
|
||||
assertThat(searchResponse.hits().totalHits(), equalTo(1l));
|
||||
|
||||
assertThat(searchResponse.hits().getAt(0).highlightFields().get("field1").fragments()[0].string(), equalTo("this is a <xxx>test</xxx>"));
|
||||
assertThat(searchResponse.hits().getAt(0).highlightFields().get("field2").fragments()[0].string(), equalTo("this is another <xxx>test</xxx>"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPlainHighlighter() throws Exception {
|
||||
try {
|
||||
|
|
Loading…
Reference in New Issue