Remove HighlighterParseElement

The old HighlightParseElement was only left because it was still
used in tests and some places in InnerHits. This PR removes it
and replaces the tests that checked that the original parse element
and the rafactored highlighter code produce the same output with
new tests that compare builder input to the SearchContextHighlight
that is created.
This commit is contained in:
Christoph Büscher 2016-03-23 10:37:21 +01:00
parent d7179cafcc
commit f012b73947
7 changed files with 141 additions and 336 deletions

View File

@ -28,6 +28,9 @@ import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement;
import org.elasticsearch.search.fetch.script.ScriptFieldsParseElement;
import org.elasticsearch.search.fetch.source.FetchSourceParseElement;
import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHit;
import org.elasticsearch.search.internal.InternalSearchHits; import org.elasticsearch.search.internal.InternalSearchHits;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;

View File

@ -21,6 +21,7 @@ package org.elasticsearch.search.highlight;
import org.apache.lucene.search.highlight.SimpleFragmenter; import org.apache.lucene.search.highlight.SimpleFragmenter;
import org.apache.lucene.search.highlight.SimpleSpanFragmenter; import org.apache.lucene.search.highlight.SimpleSpanFragmenter;
import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
@ -42,7 +43,7 @@ import java.util.Objects;
* This abstract class holds parameters shared by {@link HighlightBuilder} and {@link HighlightBuilder.Field} * This abstract class holds parameters shared by {@link HighlightBuilder} and {@link HighlightBuilder.Field}
* and provides the common setters, equality, hashCode calculation and common serialization * and provides the common setters, equality, hashCode calculation and common serialization
*/ */
public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterBuilder<?>> { public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterBuilder<?>> extends ToXContentToBytes {
public static final ParseField PRE_TAGS_FIELD = new ParseField("pre_tags"); public static final ParseField PRE_TAGS_FIELD = new ParseField("pre_tags");
public static final ParseField POST_TAGS_FIELD = new ParseField("post_tags"); public static final ParseField POST_TAGS_FIELD = new ParseField("post_tags");
@ -363,7 +364,7 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
* @return the value set by {@link #phraseLimit(Integer)} * @return the value set by {@link #phraseLimit(Integer)}
*/ */
public Integer phraseLimit() { public Integer phraseLimit() {
return this.noMatchSize; return this.phraseLimit;
} }
/** /**
@ -382,6 +383,16 @@ public abstract class AbstractHighlighterBuilder<HB extends AbstractHighlighterB
return this.forceSource; return this.forceSource;
} }
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
innerXContent(builder);
builder.endObject();
return builder;
}
protected abstract void innerXContent(XContentBuilder builder) throws IOException;
void commonOptionsToXContent(XContentBuilder builder) throws IOException { void commonOptionsToXContent(XContentBuilder builder) throws IOException {
if (preTags != null) { if (preTags != null) {
builder.array(PRE_TAGS_FIELD.getPreferredName(), preTags); builder.array(PRE_TAGS_FIELD.getPreferredName(), preTags);

View File

@ -21,21 +21,17 @@ package org.elasticsearch.search.highlight;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner; import org.apache.lucene.search.vectorhighlight.SimpleBoundaryScanner;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions; import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions;
import org.elasticsearch.search.highlight.SearchContextHighlight.FieldOptions.Builder;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
@ -54,7 +50,7 @@ import java.util.Set;
* *
* @see org.elasticsearch.search.builder.SearchSourceBuilder#highlight() * @see org.elasticsearch.search.builder.SearchSourceBuilder#highlight()
*/ */
public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilder> implements Writeable<HighlightBuilder>, ToXContent { public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilder> implements Writeable<HighlightBuilder> {
public static final HighlightBuilder PROTOTYPE = new HighlightBuilder(); public static final HighlightBuilder PROTOTYPE = new HighlightBuilder();
@ -92,16 +88,15 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
public static final String[] DEFAULT_STYLED_POST_TAGS = {"</em>"}; public static final String[] DEFAULT_STYLED_POST_TAGS = {"</em>"};
/** /**
* a {@link FieldOptions.Builder} with default settings * a {@link FieldOptions} with default settings
*/ */
public final static Builder defaultFieldOptions() { final static FieldOptions defaultOptions = new SearchContextHighlight.FieldOptions.Builder()
return new SearchContextHighlight.FieldOptions.Builder() .preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED)
.preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED).highlightFilter(DEFAULT_HIGHLIGHT_FILTER) .highlightFilter(DEFAULT_HIGHLIGHT_FILTER).requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH)
.requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH).forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE).numberOfFragments(DEFAULT_NUMBER_OF_FRAGMENTS) .forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE)
.encoder(DEFAULT_ENCODER).boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN) .numberOfFragments(DEFAULT_NUMBER_OF_FRAGMENTS).encoder(DEFAULT_ENCODER)
.boundaryChars(SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) .boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN).boundaryChars(SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS)
.noMatchSize(DEFAULT_NO_MATCH_SIZE).phraseLimit(DEFAULT_PHRASE_LIMIT); .noMatchSize(DEFAULT_NO_MATCH_SIZE).phraseLimit(DEFAULT_PHRASE_LIMIT).build();
}
private final List<Field> fields = new ArrayList<>(); private final List<Field> fields = new ArrayList<>();
@ -222,14 +217,6 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
return this.useExplicitFieldOrder; return this.useExplicitFieldOrder;
} }
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
innerXContent(builder);
builder.endObject();
return builder;
}
/** /**
* parse options only present in top level highlight builder (`tags_schema`, `encoder` and nested `fields`) * parse options only present in top level highlight builder (`tags_schema`, `encoder` and nested `fields`)
*/ */
@ -279,7 +266,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
transferOptions(this, globalOptionsBuilder, context); transferOptions(this, globalOptionsBuilder, context);
// overwrite unset global options by default values // overwrite unset global options by default values
globalOptionsBuilder.merge(defaultFieldOptions().build()); globalOptionsBuilder.merge(defaultOptions);
// create field options // create field options
Collection<org.elasticsearch.search.highlight.SearchContextHighlight.Field> fieldOptions = new ArrayList<>(); Collection<org.elasticsearch.search.highlight.SearchContextHighlight.Field> fieldOptions = new ArrayList<>();
@ -292,7 +279,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
fieldOptionsBuilder.matchedFields(matchedFields); fieldOptionsBuilder.matchedFields(matchedFields);
} }
transferOptions(field, fieldOptionsBuilder, context); transferOptions(field, fieldOptionsBuilder, context);
fieldOptions.add(new SearchContextHighlight.Field(field.name(), fieldOptionsBuilder.merge(globalOptionsBuilder.build()).build())); fieldOptions.add(new SearchContextHighlight.Field(field.name(), fieldOptionsBuilder
.merge(globalOptionsBuilder.build()).build()));
} }
return new SearchContextHighlight(fieldOptions); return new SearchContextHighlight(fieldOptions);
} }
@ -306,7 +294,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
* @throws IOException on errors parsing any optional nested highlight query * @throws IOException on errors parsing any optional nested highlight query
*/ */
@SuppressWarnings({ "rawtypes", "unchecked" }) @SuppressWarnings({ "rawtypes", "unchecked" })
private static void transferOptions(AbstractHighlighterBuilder highlighterBuilder, SearchContextHighlight.FieldOptions.Builder targetOptionsBuilder, QueryShardContext context) throws IOException { private static void transferOptions(AbstractHighlighterBuilder highlighterBuilder,
SearchContextHighlight.FieldOptions.Builder targetOptionsBuilder, QueryShardContext context) throws IOException {
if (highlighterBuilder.preTags != null) { if (highlighterBuilder.preTags != null) {
targetOptionsBuilder.preTags(highlighterBuilder.preTags); targetOptionsBuilder.preTags(highlighterBuilder.preTags);
} }
@ -357,7 +346,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
} }
} }
private static Character[] convertCharArray(char[] array) { static Character[] convertCharArray(char[] array) {
if (array == null) { if (array == null) {
return null; return null;
} }
@ -368,6 +357,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
return charArray; return charArray;
} }
@Override
public void innerXContent(XContentBuilder builder) throws IOException { public void innerXContent(XContentBuilder builder) throws IOException {
// first write common options // first write common options
commonOptionsToXContent(builder); commonOptionsToXContent(builder);
@ -398,18 +388,6 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
} }
} }
@Override
public final String toString() {
try {
XContentBuilder builder = XContentFactory.jsonBuilder();
builder.prettyPrint();
toXContent(builder, EMPTY_PARAMS);
return builder.string();
} catch (Exception e) {
return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}";
}
}
@Override @Override
protected HighlightBuilder createInstance(XContentParser parser) { protected HighlightBuilder createInstance(XContentParser parser) {
return new HighlightBuilder(); return new HighlightBuilder();
@ -483,6 +461,7 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
return this; return this;
} }
@Override
public void innerXContent(XContentBuilder builder) throws IOException { public void innerXContent(XContentBuilder builder) throws IOException {
builder.startObject(name); builder.startObject(name);
// write common options // write common options
@ -525,7 +504,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder<HighlightBuilde
String fieldname = parser.currentName(); String fieldname = parser.currentName();
return new Field(fieldname); return new Field(fieldname);
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "unknown token type [{}], expected field name", parser.currentToken()); throw new ParsingException(parser.getTokenLocation(), "unknown token type [{}], expected field name",
parser.currentToken());
} }
} }

View File

@ -39,15 +39,11 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import static java.util.Collections.singletonMap;
/** /**
* *
*/ */
public class HighlightPhase extends AbstractComponent implements FetchSubPhase { public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
private static final List<String> STANDARD_HIGHLIGHTERS_BY_PRECEDENCE = Arrays.asList("fvh", "postings", "plain"); private static final List<String> STANDARD_HIGHLIGHTERS_BY_PRECEDENCE = Arrays.asList("fvh", "postings", "plain");
private static final Map<String, ? extends SearchParseElement> PARSE_ELEMENTS = singletonMap("highlight",
new HighlighterParseElement());
private final Highlighters highlighters; private final Highlighters highlighters;
@ -57,9 +53,13 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
this.highlighters = highlighters; this.highlighters = highlighters;
} }
/**
* highlighters do not have a parse element, they use
* {@link HighlightBuilder#fromXContent(org.elasticsearch.index.query.QueryParseContext)} for parsing instead.
*/
@Override @Override
public Map<String, ? extends SearchParseElement> parseElements() { public Map<String, ? extends SearchParseElement> parseElements() {
return PARSE_ELEMENTS; return Collections.emptyMap();
} }
@Override @Override

View File

@ -1,246 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.highlight;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* <pre>
* highlight : {
* tags_schema : "styled",
* pre_tags : ["tag1", "tag2"],
* post_tags : ["tag1", "tag2"],
* order : "score",
* highlight_filter : true,
* fields : {
* field1 : { },
* field2 : { fragment_size : 100, number_of_fragments : 2 },
* field3 : { number_of_fragments : 5, order : "simple", tags_schema : "styled" },
* field4 : { number_of_fragments: 0, pre_tags : ["openingTagA", "openingTagB"], post_tags : ["closingTag"] }
* }
* }
* </pre>
*/
public class HighlighterParseElement implements SearchParseElement {
@Override
public void parse(XContentParser parser, SearchContext context) throws Exception {
try {
context.highlight(parse(parser, context.getQueryShardContext()));
} catch (IllegalArgumentException ex) {
throw new SearchParseException(context, "Error while trying to parse Highlighter element in request", parser.getTokenLocation());
}
}
public SearchContextHighlight parse(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
XContentParser.Token token;
String topLevelFieldName = null;
final List<Tuple<String, SearchContextHighlight.FieldOptions.Builder>> fieldsOptions = new ArrayList<>();
final SearchContextHighlight.FieldOptions.Builder globalOptionsBuilder = HighlightBuilder.defaultFieldOptions();
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
topLevelFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if ("pre_tags".equals(topLevelFieldName) || "preTags".equals(topLevelFieldName)) {
List<String> preTagsList = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
preTagsList.add(parser.text());
}
globalOptionsBuilder.preTags(preTagsList.toArray(new String[preTagsList.size()]));
} else if ("post_tags".equals(topLevelFieldName) || "postTags".equals(topLevelFieldName)) {
List<String> postTagsList = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
postTagsList.add(parser.text());
}
globalOptionsBuilder.postTags(postTagsList.toArray(new String[postTagsList.size()]));
} else if ("fields".equals(topLevelFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
String highlightFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
if (highlightFieldName != null) {
throw new IllegalArgumentException("If highlighter fields is an array it must contain objects containing a single field");
}
highlightFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryShardContext)));
}
}
} else {
throw new IllegalArgumentException("If highlighter fields is an array it must contain objects containing a single field");
}
}
}
} else if (token.isValue()) {
if ("order".equals(topLevelFieldName)) {
globalOptionsBuilder.scoreOrdered("score".equals(parser.text()));
} else if ("tags_schema".equals(topLevelFieldName) || "tagsSchema".equals(topLevelFieldName)) {
String schema = parser.text();
if ("styled".equals(schema)) {
globalOptionsBuilder.preTags(HighlightBuilder.DEFAULT_STYLED_PRE_TAG);
globalOptionsBuilder.postTags(HighlightBuilder.DEFAULT_STYLED_POST_TAGS);
}
} else if ("highlight_filter".equals(topLevelFieldName) || "highlightFilter".equals(topLevelFieldName)) {
globalOptionsBuilder.highlightFilter(parser.booleanValue());
} else if ("fragment_size".equals(topLevelFieldName) || "fragmentSize".equals(topLevelFieldName)) {
globalOptionsBuilder.fragmentCharSize(parser.intValue());
} else if ("number_of_fragments".equals(topLevelFieldName) || "numberOfFragments".equals(topLevelFieldName)) {
globalOptionsBuilder.numberOfFragments(parser.intValue());
} else if ("encoder".equals(topLevelFieldName)) {
globalOptionsBuilder.encoder(parser.text());
} else if ("require_field_match".equals(topLevelFieldName) || "requireFieldMatch".equals(topLevelFieldName)) {
globalOptionsBuilder.requireFieldMatch(parser.booleanValue());
} else if ("boundary_max_scan".equals(topLevelFieldName) || "boundaryMaxScan".equals(topLevelFieldName)) {
globalOptionsBuilder.boundaryMaxScan(parser.intValue());
} else if ("boundary_chars".equals(topLevelFieldName) || "boundaryChars".equals(topLevelFieldName)) {
char[] charsArr = parser.text().toCharArray();
Character[] globalBoundaryChars = new Character[charsArr.length];
for (int i = 0; i < charsArr.length; i++) {
globalBoundaryChars[i] = charsArr[i];
}
globalOptionsBuilder.boundaryChars(globalBoundaryChars);
} else if ("type".equals(topLevelFieldName)) {
globalOptionsBuilder.highlighterType(parser.text());
} else if ("fragmenter".equals(topLevelFieldName)) {
globalOptionsBuilder.fragmenter(parser.text());
} else if ("no_match_size".equals(topLevelFieldName) || "noMatchSize".equals(topLevelFieldName)) {
globalOptionsBuilder.noMatchSize(parser.intValue());
} else if ("force_source".equals(topLevelFieldName) || "forceSource".equals(topLevelFieldName)) {
globalOptionsBuilder.forceSource(parser.booleanValue());
} else if ("phrase_limit".equals(topLevelFieldName) || "phraseLimit".equals(topLevelFieldName)) {
globalOptionsBuilder.phraseLimit(parser.intValue());
}
} else if (token == XContentParser.Token.START_OBJECT && "options".equals(topLevelFieldName)) {
globalOptionsBuilder.options(parser.map());
} else if (token == XContentParser.Token.START_OBJECT) {
if ("fields".equals(topLevelFieldName)) {
String highlightFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
highlightFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryShardContext)));
}
}
} else if ("highlight_query".equals(topLevelFieldName) || "highlightQuery".equals(topLevelFieldName)) {
globalOptionsBuilder.highlightQuery(queryShardContext.parse(parser).query());
}
}
}
final SearchContextHighlight.FieldOptions globalOptions = globalOptionsBuilder.build();
if (globalOptions.preTags() != null && globalOptions.postTags() == null) {
throw new IllegalArgumentException("Highlighter global preTags are set, but global postTags are not set");
}
final List<SearchContextHighlight.Field> fields = new ArrayList<>();
// now, go over and fill all fieldsOptions with default values from the global state
for (final Tuple<String, SearchContextHighlight.FieldOptions.Builder> tuple : fieldsOptions) {
fields.add(new SearchContextHighlight.Field(tuple.v1(), tuple.v2().merge(globalOptions).build()));
}
return new SearchContextHighlight(fields);
}
private static SearchContextHighlight.FieldOptions.Builder parseFields(XContentParser parser, QueryShardContext queryShardContext) throws IOException {
XContentParser.Token token;
final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder();
String fieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
fieldName = parser.currentName();
} else if (token == XContentParser.Token.START_ARRAY) {
if ("pre_tags".equals(fieldName) || "preTags".equals(fieldName)) {
List<String> preTagsList = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
preTagsList.add(parser.text());
}
fieldOptionsBuilder.preTags(preTagsList.toArray(new String[preTagsList.size()]));
} else if ("post_tags".equals(fieldName) || "postTags".equals(fieldName)) {
List<String> postTagsList = new ArrayList<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
postTagsList.add(parser.text());
}
fieldOptionsBuilder.postTags(postTagsList.toArray(new String[postTagsList.size()]));
} else if ("matched_fields".equals(fieldName) || "matchedFields".equals(fieldName)) {
Set<String> matchedFields = new HashSet<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
matchedFields.add(parser.text());
}
fieldOptionsBuilder.matchedFields(matchedFields);
}
} else if (token.isValue()) {
if ("fragment_size".equals(fieldName) || "fragmentSize".equals(fieldName)) {
fieldOptionsBuilder.fragmentCharSize(parser.intValue());
} else if ("number_of_fragments".equals(fieldName) || "numberOfFragments".equals(fieldName)) {
fieldOptionsBuilder.numberOfFragments(parser.intValue());
} else if ("fragment_offset".equals(fieldName) || "fragmentOffset".equals(fieldName)) {
fieldOptionsBuilder.fragmentOffset(parser.intValue());
} else if ("highlight_filter".equals(fieldName) || "highlightFilter".equals(fieldName)) {
fieldOptionsBuilder.highlightFilter(parser.booleanValue());
} else if ("order".equals(fieldName)) {
fieldOptionsBuilder.scoreOrdered("score".equals(parser.text()));
} else if ("require_field_match".equals(fieldName) || "requireFieldMatch".equals(fieldName)) {
fieldOptionsBuilder.requireFieldMatch(parser.booleanValue());
} else if ("boundary_max_scan".equals(fieldName) || "boundaryMaxScan".equals(fieldName)) {
fieldOptionsBuilder.boundaryMaxScan(parser.intValue());
} else if ("boundary_chars".equals(fieldName) || "boundaryChars".equals(fieldName)) {
char[] charsArr = parser.text().toCharArray();
Character[] boundaryChars = new Character[charsArr.length];
for (int i = 0; i < charsArr.length; i++) {
boundaryChars[i] = charsArr[i];
}
fieldOptionsBuilder.boundaryChars(boundaryChars);
} else if ("type".equals(fieldName)) {
fieldOptionsBuilder.highlighterType(parser.text());
} else if ("fragmenter".equals(fieldName)) {
fieldOptionsBuilder.fragmenter(parser.text());
} else if ("no_match_size".equals(fieldName) || "noMatchSize".equals(fieldName)) {
fieldOptionsBuilder.noMatchSize(parser.intValue());
} else if ("force_source".equals(fieldName) || "forceSource".equals(fieldName)) {
fieldOptionsBuilder.forceSource(parser.booleanValue());
} else if ("phrase_limit".equals(fieldName) || "phraseLimit".equals(fieldName)) {
fieldOptionsBuilder.phraseLimit(parser.intValue());
}
} else if (token == XContentParser.Token.START_OBJECT) {
if ("highlight_query".equals(fieldName) || "highlightQuery".equals(fieldName)) {
fieldOptionsBuilder.highlightQuery(queryShardContext.parse(parser).query());
} else if ("options".equals(fieldName)) {
fieldOptionsBuilder.options(parser.map());
}
}
}
return fieldOptionsBuilder;
}
}

View File

@ -354,7 +354,6 @@ public class SearchContextHighlight {
if (fieldOptions.phraseLimit == -1) { if (fieldOptions.phraseLimit == -1) {
fieldOptions.phraseLimit = globalOptions.phraseLimit; fieldOptions.phraseLimit = globalOptions.phraseLimit;
} }
return this; return this;
} }
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.highlight; package org.elasticsearch.search.highlight;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
@ -59,9 +60,13 @@ import org.junit.BeforeClass;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.function.BiConsumer;
import java.util.function.Function;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
@ -266,8 +271,8 @@ public class HighlightBuilderTests extends ESTestCase {
} }
/** /**
* test that build() outputs a {@link SearchContextHighlight} that is similar to the one * test that build() outputs a {@link SearchContextHighlight} that is has similar parameters
* we would get when parsing the xContent the test highlight builder is rendering out * than what we have in the random {@link HighlightBuilder}
*/ */
public void testBuildSearchContextHighlight() throws IOException { public void testBuildSearchContextHighlight() throws IOException {
Settings indexSettings = Settings.settingsBuilder() Settings indexSettings = Settings.settingsBuilder()
@ -287,48 +292,97 @@ public class HighlightBuilderTests extends ESTestCase {
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
HighlightBuilder highlightBuilder = randomHighlighterBuilder(); HighlightBuilder highlightBuilder = randomHighlighterBuilder();
SearchContextHighlight highlight = highlightBuilder.build(mockShardContext); SearchContextHighlight highlight = highlightBuilder.build(mockShardContext);
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); for (SearchContextHighlight.Field field : highlight.fields()) {
if (randomBoolean()) { String encoder = highlightBuilder.encoder() != null ? highlightBuilder.encoder() : HighlightBuilder.DEFAULT_ENCODER;
builder.prettyPrint(); assertEquals(encoder, field.fieldOptions().encoder());
} final Field fieldBuilder = getFieldBuilderByName(highlightBuilder, field.field());
builder.startObject(); assertNotNull("expected a highlight builder for field " + field.field(), fieldBuilder);
highlightBuilder.innerXContent(builder); FieldOptions fieldOptions = field.fieldOptions();
builder.endObject();
XContentParser parser = XContentHelper.createParser(builder.bytes());
SearchContextHighlight parsedHighlight = new HighlighterParseElement().parse(parser, mockShardContext); BiConsumer<Function<AbstractHighlighterBuilder<?>, Object>, Function<FieldOptions, Object>> checkSame =
assertNotSame(highlight, parsedHighlight); mergeBeforeChek(highlightBuilder, fieldBuilder, fieldOptions);
assertEquals(highlight.globalForceSource(), parsedHighlight.globalForceSource());
assertEquals(highlight.fields().size(), parsedHighlight.fields().size());
Iterator<org.elasticsearch.search.highlight.SearchContextHighlight.Field> iterator = parsedHighlight.fields().iterator(); checkSame.accept(AbstractHighlighterBuilder::boundaryChars, FieldOptions::boundaryChars);
for (org.elasticsearch.search.highlight.SearchContextHighlight.Field field : highlight.fields()) { checkSame.accept(AbstractHighlighterBuilder::boundaryMaxScan, FieldOptions::boundaryMaxScan);
org.elasticsearch.search.highlight.SearchContextHighlight.Field otherField = iterator.next(); checkSame.accept(AbstractHighlighterBuilder::fragmentSize, FieldOptions::fragmentCharSize);
assertEquals(field.field(), otherField.field()); checkSame.accept(AbstractHighlighterBuilder::fragmenter, FieldOptions::fragmenter);
FieldOptions options = field.fieldOptions(); checkSame.accept(AbstractHighlighterBuilder::requireFieldMatch, FieldOptions::requireFieldMatch);
FieldOptions otherOptions = otherField.fieldOptions(); checkSame.accept(AbstractHighlighterBuilder::noMatchSize, FieldOptions::noMatchSize);
assertArrayEquals(options.boundaryChars(), options.boundaryChars()); checkSame.accept(AbstractHighlighterBuilder::numOfFragments, FieldOptions::numberOfFragments);
assertEquals(options.boundaryMaxScan(), otherOptions.boundaryMaxScan()); checkSame.accept(AbstractHighlighterBuilder::phraseLimit, FieldOptions::phraseLimit);
assertEquals(options.encoder(), otherOptions.encoder()); checkSame.accept(AbstractHighlighterBuilder::highlighterType, FieldOptions::highlighterType);
assertEquals(options.fragmentCharSize(), otherOptions.fragmentCharSize()); checkSame.accept(AbstractHighlighterBuilder::highlightFilter, FieldOptions::highlightFilter);
assertEquals(options.fragmenter(), otherOptions.fragmenter()); checkSame.accept(AbstractHighlighterBuilder::preTags, FieldOptions::preTags);
assertEquals(options.fragmentOffset(), otherOptions.fragmentOffset()); checkSame.accept(AbstractHighlighterBuilder::postTags, FieldOptions::postTags);
assertEquals(options.highlighterType(), otherOptions.highlighterType()); checkSame.accept(AbstractHighlighterBuilder::options, FieldOptions::options);
assertEquals(options.highlightFilter(), otherOptions.highlightFilter()); checkSame.accept(AbstractHighlighterBuilder::order, op -> op.scoreOrdered() ? Order.SCORE : Order.NONE);
assertEquals(options.highlightQuery(), otherOptions.highlightQuery()); assertEquals(fieldBuilder.fragmentOffset, fieldOptions.fragmentOffset());
assertEquals(options.matchedFields(), otherOptions.matchedFields()); if (fieldBuilder.matchedFields != null) {
assertEquals(options.noMatchSize(), otherOptions.noMatchSize()); String[] copy = Arrays.copyOf(fieldBuilder.matchedFields, fieldBuilder.matchedFields.length);
assertEquals(options.numberOfFragments(), otherOptions.numberOfFragments()); Arrays.sort(copy);
assertEquals(options.options(), otherOptions.options()); assertArrayEquals(copy,
assertEquals(options.phraseLimit(), otherOptions.phraseLimit()); new TreeSet<String>(fieldOptions.matchedFields()).toArray(new String[fieldOptions.matchedFields().size()]));
assertArrayEquals(options.preTags(), otherOptions.preTags()); } else {
assertArrayEquals(options.postTags(), otherOptions.postTags()); assertNull(fieldOptions.matchedFields());
assertEquals(options.requireFieldMatch(), otherOptions.requireFieldMatch()); }
assertEquals(options.scoreOrdered(), otherOptions.scoreOrdered()); Query expectedValue = null;
if (fieldBuilder.highlightQuery != null) {
expectedValue = QueryBuilder.rewriteQuery(fieldBuilder.highlightQuery, mockShardContext).toQuery(mockShardContext);
} else if (highlightBuilder.highlightQuery != null) {
expectedValue = QueryBuilder.rewriteQuery(highlightBuilder.highlightQuery, mockShardContext).toQuery(mockShardContext);
}
assertEquals(expectedValue, fieldOptions.highlightQuery());
} }
} }
} }
/**
* Create a generic helper function that performs all the work of merging the global highlight builder parameter,
* the (potential) overwrite on the field level and the default value from {@link HighlightBuilder#defaultOptions}
* before making the assertion that the value in the highlight builder and the actual value in the {@link FieldOptions}
* passed in is the same.
*
* @param highlightBuilder provides the (optional) global builder parameter
* @param fieldBuilder provides the (optional) field level parameter, if present this overwrites the global value
* @param options the target field options that are checked
*/
private static BiConsumer<Function<AbstractHighlighterBuilder<?>, Object>, Function<FieldOptions, Object>> mergeBeforeChek(
HighlightBuilder highlightBuilder, Field fieldBuilder, FieldOptions options) {
return (highlightBuilderParameterAccessor, fieldOptionsParameterAccessor) -> {
Object expectedValue = null;
Object globalLevelValue = highlightBuilderParameterAccessor.apply(highlightBuilder);
Object fieldLevelValue = highlightBuilderParameterAccessor.apply(fieldBuilder);
if (fieldLevelValue != null) {
expectedValue = fieldLevelValue;
} else if (globalLevelValue != null) {
expectedValue = globalLevelValue;
} else {
expectedValue = fieldOptionsParameterAccessor.apply(HighlightBuilder.defaultOptions);
}
Object actualValue = fieldOptionsParameterAccessor.apply(options);
if (actualValue instanceof String[]) {
assertArrayEquals((String[]) expectedValue, (String[]) actualValue);
} else if (actualValue instanceof Character[]) {
if (expectedValue instanceof char[]) {
assertArrayEquals(HighlightBuilder.convertCharArray((char[]) expectedValue), (Character[]) actualValue);
} else {
assertArrayEquals((Character[]) expectedValue, (Character[]) actualValue);
}
} else {
assertEquals(expectedValue, actualValue);
}
};
}
private static Field getFieldBuilderByName(HighlightBuilder highlightBuilder, String fieldName) {
for (Field hbfield : highlightBuilder.fields()) {
if (hbfield.name().equals(fieldName)) {
return hbfield;
}
}
return null;
}
/** /**
* `tags_schema` is not produced by toXContent in the builder but should be parseable, so this * `tags_schema` is not produced by toXContent in the builder but should be parseable, so this
* adds a simple json test for this. * adds a simple json test for this.
@ -447,7 +501,7 @@ public class HighlightBuilderTests extends ESTestCase {
} }
int numberOfFields = randomIntBetween(1,5); int numberOfFields = randomIntBetween(1,5);
for (int i = 0; i < numberOfFields; i++) { for (int i = 0; i < numberOfFields; i++) {
Field field = new Field(randomAsciiOfLengthBetween(1, 10)); Field field = new Field(i + "_" + randomAsciiOfLengthBetween(1, 10));
setRandomCommonOptions(field); setRandomCommonOptions(field);
if (randomBoolean()) { if (randomBoolean()) {
field.fragmentOffset(randomIntBetween(1, 100)); field.fragmentOffset(randomIntBetween(1, 100));
@ -617,13 +671,17 @@ public class HighlightBuilderTests extends ESTestCase {
} }
} }
/**
* Create array of unique Strings. If not unique, e.g. duplicates field names
* would be dropped in {@link FieldOptions.Builder#matchedFields(Set)}, resulting in test glitches
*/
private static String[] randomStringArray(int minSize, int maxSize) { private static String[] randomStringArray(int minSize, int maxSize) {
int size = randomIntBetween(minSize, maxSize); int size = randomIntBetween(minSize, maxSize);
String[] randomStrings = new String[size]; Set<String> randomStrings = new HashSet<String>(size);
for (int f = 0; f < size; f++) { for (int f = 0; f < size; f++) {
randomStrings[f] = randomAsciiOfLengthBetween(1, 10); randomStrings.add(randomAsciiOfLengthBetween(3, 10));
} }
return randomStrings; return randomStrings.toArray(new String[randomStrings.size()]);
} }
/** /**