From f012b739476bfa5e15f75e4ac51aa3eefacf8b32 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 23 Mar 2016 10:37:21 +0100 Subject: [PATCH] Remove HighlighterParseElement The old HighlightParseElement was only left because it was still used in tests and some places in InnerHits. This PR removes it and replaces the tests that checked that the original parse element and the rafactored highlighter code produce the same output with new tests that compare builder input to the SearchContextHighlight that is created. --- .../innerhits/InnerHitsFetchSubPhase.java | 3 + .../highlight/AbstractHighlighterBuilder.java | 15 +- .../search/highlight/HighlightBuilder.java | 58 ++--- .../search/highlight/HighlightPhase.java | 10 +- .../highlight/HighlighterParseElement.java | 246 ------------------ .../highlight/SearchContextHighlight.java | 1 - .../highlight/HighlightBuilderTests.java | 144 +++++++--- 7 files changed, 141 insertions(+), 336 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java index cd849a65aa8..ea7edcc3dd4 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsFetchSubPhase.java @@ -28,6 +28,9 @@ import org.elasticsearch.search.SearchParseElement; import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSubPhase; +import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement; +import org.elasticsearch.search.fetch.script.ScriptFieldsParseElement; +import org.elasticsearch.search.fetch.source.FetchSourceParseElement; import org.elasticsearch.search.internal.InternalSearchHit; import org.elasticsearch.search.internal.InternalSearchHits; import org.elasticsearch.search.internal.SearchContext; diff --git a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java index b4de465cc74..05bc805e1e9 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/AbstractHighlighterBuilder.java @@ -21,6 +21,7 @@ package org.elasticsearch.search.highlight; import org.apache.lucene.search.highlight.SimpleFragmenter; import org.apache.lucene.search.highlight.SimpleSpanFragmenter; +import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -42,7 +43,7 @@ import java.util.Objects; * This abstract class holds parameters shared by {@link HighlightBuilder} and {@link HighlightBuilder.Field} * and provides the common setters, equality, hashCode calculation and common serialization */ -public abstract class AbstractHighlighterBuilder> { +public abstract class AbstractHighlighterBuilder> extends ToXContentToBytes { public static final ParseField PRE_TAGS_FIELD = new ParseField("pre_tags"); public static final ParseField POST_TAGS_FIELD = new ParseField("post_tags"); @@ -363,7 +364,7 @@ public abstract class AbstractHighlighterBuilder implements Writeable, ToXContent { +public class HighlightBuilder extends AbstractHighlighterBuilder implements Writeable { public static final HighlightBuilder PROTOTYPE = new HighlightBuilder(); @@ -92,16 +88,15 @@ public class HighlightBuilder extends AbstractHighlighterBuilder"}; /** - * a {@link FieldOptions.Builder} with default settings + * a {@link FieldOptions} with default settings */ - public final static Builder defaultFieldOptions() { - return new SearchContextHighlight.FieldOptions.Builder() - .preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED).highlightFilter(DEFAULT_HIGHLIGHT_FILTER) - .requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH).forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE).numberOfFragments(DEFAULT_NUMBER_OF_FRAGMENTS) - .encoder(DEFAULT_ENCODER).boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN) - .boundaryChars(SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) - .noMatchSize(DEFAULT_NO_MATCH_SIZE).phraseLimit(DEFAULT_PHRASE_LIMIT); - } + final static FieldOptions defaultOptions = new SearchContextHighlight.FieldOptions.Builder() + .preTags(DEFAULT_PRE_TAGS).postTags(DEFAULT_POST_TAGS).scoreOrdered(DEFAULT_SCORE_ORDERED) + .highlightFilter(DEFAULT_HIGHLIGHT_FILTER).requireFieldMatch(DEFAULT_REQUIRE_FIELD_MATCH) + .forceSource(DEFAULT_FORCE_SOURCE).fragmentCharSize(DEFAULT_FRAGMENT_CHAR_SIZE) + .numberOfFragments(DEFAULT_NUMBER_OF_FRAGMENTS).encoder(DEFAULT_ENCODER) + .boundaryMaxScan(SimpleBoundaryScanner.DEFAULT_MAX_SCAN).boundaryChars(SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) + .noMatchSize(DEFAULT_NO_MATCH_SIZE).phraseLimit(DEFAULT_PHRASE_LIMIT).build(); private final List fields = new ArrayList<>(); @@ -222,14 +217,6 @@ public class HighlightBuilder extends AbstractHighlighterBuilder fieldOptions = new ArrayList<>(); @@ -292,7 +279,8 @@ public class HighlightBuilder extends AbstractHighlighterBuilder STANDARD_HIGHLIGHTERS_BY_PRECEDENCE = Arrays.asList("fvh", "postings", "plain"); - private static final Map PARSE_ELEMENTS = singletonMap("highlight", - new HighlighterParseElement()); private final Highlighters highlighters; @@ -57,9 +53,13 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase { this.highlighters = highlighters; } + /** + * highlighters do not have a parse element, they use + * {@link HighlightBuilder#fromXContent(org.elasticsearch.index.query.QueryParseContext)} for parsing instead. + */ @Override public Map parseElements() { - return PARSE_ELEMENTS; + return Collections.emptyMap(); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java b/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java deleted file mode 100644 index b774acad2ff..00000000000 --- a/core/src/main/java/org/elasticsearch/search/highlight/HighlighterParseElement.java +++ /dev/null @@ -1,246 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.search.highlight; - -import org.elasticsearch.common.collect.Tuple; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.search.SearchParseElement; -import org.elasticsearch.search.SearchParseException; -import org.elasticsearch.search.internal.SearchContext; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashSet; -import java.util.List; -import java.util.Set; - -/** - *
- * highlight : {
- *  tags_schema : "styled",
- *  pre_tags : ["tag1", "tag2"],
- *  post_tags : ["tag1", "tag2"],
- *  order : "score",
- *  highlight_filter : true,
- *  fields : {
- *      field1 : {  },
- *      field2 : { fragment_size : 100, number_of_fragments : 2 },
- *      field3 : { number_of_fragments : 5, order : "simple", tags_schema : "styled" },
- *      field4 : { number_of_fragments: 0, pre_tags : ["openingTagA", "openingTagB"], post_tags : ["closingTag"] }
- *  }
- * }
- * 
- */ -public class HighlighterParseElement implements SearchParseElement { - - @Override - public void parse(XContentParser parser, SearchContext context) throws Exception { - try { - context.highlight(parse(parser, context.getQueryShardContext())); - } catch (IllegalArgumentException ex) { - throw new SearchParseException(context, "Error while trying to parse Highlighter element in request", parser.getTokenLocation()); - } - } - - public SearchContextHighlight parse(XContentParser parser, QueryShardContext queryShardContext) throws IOException { - XContentParser.Token token; - String topLevelFieldName = null; - final List> fieldsOptions = new ArrayList<>(); - - final SearchContextHighlight.FieldOptions.Builder globalOptionsBuilder = HighlightBuilder.defaultFieldOptions(); - - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - topLevelFieldName = parser.currentName(); - } else if (token == XContentParser.Token.START_ARRAY) { - if ("pre_tags".equals(topLevelFieldName) || "preTags".equals(topLevelFieldName)) { - List preTagsList = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - preTagsList.add(parser.text()); - } - globalOptionsBuilder.preTags(preTagsList.toArray(new String[preTagsList.size()])); - } else if ("post_tags".equals(topLevelFieldName) || "postTags".equals(topLevelFieldName)) { - List postTagsList = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - postTagsList.add(parser.text()); - } - globalOptionsBuilder.postTags(postTagsList.toArray(new String[postTagsList.size()])); - } else if ("fields".equals(topLevelFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.START_OBJECT) { - String highlightFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - if (highlightFieldName != null) { - throw new IllegalArgumentException("If highlighter fields is an array it must contain objects containing a single field"); - } - highlightFieldName = parser.currentName(); - } else if (token == XContentParser.Token.START_OBJECT) { - fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryShardContext))); - } - } - } else { - throw new IllegalArgumentException("If highlighter fields is an array it must contain objects containing a single field"); - } - } - } - } else if (token.isValue()) { - if ("order".equals(topLevelFieldName)) { - globalOptionsBuilder.scoreOrdered("score".equals(parser.text())); - } else if ("tags_schema".equals(topLevelFieldName) || "tagsSchema".equals(topLevelFieldName)) { - String schema = parser.text(); - if ("styled".equals(schema)) { - globalOptionsBuilder.preTags(HighlightBuilder.DEFAULT_STYLED_PRE_TAG); - globalOptionsBuilder.postTags(HighlightBuilder.DEFAULT_STYLED_POST_TAGS); - } - } else if ("highlight_filter".equals(topLevelFieldName) || "highlightFilter".equals(topLevelFieldName)) { - globalOptionsBuilder.highlightFilter(parser.booleanValue()); - } else if ("fragment_size".equals(topLevelFieldName) || "fragmentSize".equals(topLevelFieldName)) { - globalOptionsBuilder.fragmentCharSize(parser.intValue()); - } else if ("number_of_fragments".equals(topLevelFieldName) || "numberOfFragments".equals(topLevelFieldName)) { - globalOptionsBuilder.numberOfFragments(parser.intValue()); - } else if ("encoder".equals(topLevelFieldName)) { - globalOptionsBuilder.encoder(parser.text()); - } else if ("require_field_match".equals(topLevelFieldName) || "requireFieldMatch".equals(topLevelFieldName)) { - globalOptionsBuilder.requireFieldMatch(parser.booleanValue()); - } else if ("boundary_max_scan".equals(topLevelFieldName) || "boundaryMaxScan".equals(topLevelFieldName)) { - globalOptionsBuilder.boundaryMaxScan(parser.intValue()); - } else if ("boundary_chars".equals(topLevelFieldName) || "boundaryChars".equals(topLevelFieldName)) { - char[] charsArr = parser.text().toCharArray(); - Character[] globalBoundaryChars = new Character[charsArr.length]; - for (int i = 0; i < charsArr.length; i++) { - globalBoundaryChars[i] = charsArr[i]; - } - globalOptionsBuilder.boundaryChars(globalBoundaryChars); - } else if ("type".equals(topLevelFieldName)) { - globalOptionsBuilder.highlighterType(parser.text()); - } else if ("fragmenter".equals(topLevelFieldName)) { - globalOptionsBuilder.fragmenter(parser.text()); - } else if ("no_match_size".equals(topLevelFieldName) || "noMatchSize".equals(topLevelFieldName)) { - globalOptionsBuilder.noMatchSize(parser.intValue()); - } else if ("force_source".equals(topLevelFieldName) || "forceSource".equals(topLevelFieldName)) { - globalOptionsBuilder.forceSource(parser.booleanValue()); - } else if ("phrase_limit".equals(topLevelFieldName) || "phraseLimit".equals(topLevelFieldName)) { - globalOptionsBuilder.phraseLimit(parser.intValue()); - } - } else if (token == XContentParser.Token.START_OBJECT && "options".equals(topLevelFieldName)) { - globalOptionsBuilder.options(parser.map()); - } else if (token == XContentParser.Token.START_OBJECT) { - if ("fields".equals(topLevelFieldName)) { - String highlightFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - highlightFieldName = parser.currentName(); - } else if (token == XContentParser.Token.START_OBJECT) { - fieldsOptions.add(Tuple.tuple(highlightFieldName, parseFields(parser, queryShardContext))); - } - } - } else if ("highlight_query".equals(topLevelFieldName) || "highlightQuery".equals(topLevelFieldName)) { - globalOptionsBuilder.highlightQuery(queryShardContext.parse(parser).query()); - } - } - } - - final SearchContextHighlight.FieldOptions globalOptions = globalOptionsBuilder.build(); - if (globalOptions.preTags() != null && globalOptions.postTags() == null) { - throw new IllegalArgumentException("Highlighter global preTags are set, but global postTags are not set"); - } - - final List fields = new ArrayList<>(); - // now, go over and fill all fieldsOptions with default values from the global state - for (final Tuple tuple : fieldsOptions) { - fields.add(new SearchContextHighlight.Field(tuple.v1(), tuple.v2().merge(globalOptions).build())); - } - return new SearchContextHighlight(fields); - } - - private static SearchContextHighlight.FieldOptions.Builder parseFields(XContentParser parser, QueryShardContext queryShardContext) throws IOException { - XContentParser.Token token; - - final SearchContextHighlight.FieldOptions.Builder fieldOptionsBuilder = new SearchContextHighlight.FieldOptions.Builder(); - String fieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - fieldName = parser.currentName(); - } else if (token == XContentParser.Token.START_ARRAY) { - if ("pre_tags".equals(fieldName) || "preTags".equals(fieldName)) { - List preTagsList = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - preTagsList.add(parser.text()); - } - fieldOptionsBuilder.preTags(preTagsList.toArray(new String[preTagsList.size()])); - } else if ("post_tags".equals(fieldName) || "postTags".equals(fieldName)) { - List postTagsList = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - postTagsList.add(parser.text()); - } - fieldOptionsBuilder.postTags(postTagsList.toArray(new String[postTagsList.size()])); - } else if ("matched_fields".equals(fieldName) || "matchedFields".equals(fieldName)) { - Set matchedFields = new HashSet<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - matchedFields.add(parser.text()); - } - fieldOptionsBuilder.matchedFields(matchedFields); - } - } else if (token.isValue()) { - if ("fragment_size".equals(fieldName) || "fragmentSize".equals(fieldName)) { - fieldOptionsBuilder.fragmentCharSize(parser.intValue()); - } else if ("number_of_fragments".equals(fieldName) || "numberOfFragments".equals(fieldName)) { - fieldOptionsBuilder.numberOfFragments(parser.intValue()); - } else if ("fragment_offset".equals(fieldName) || "fragmentOffset".equals(fieldName)) { - fieldOptionsBuilder.fragmentOffset(parser.intValue()); - } else if ("highlight_filter".equals(fieldName) || "highlightFilter".equals(fieldName)) { - fieldOptionsBuilder.highlightFilter(parser.booleanValue()); - } else if ("order".equals(fieldName)) { - fieldOptionsBuilder.scoreOrdered("score".equals(parser.text())); - } else if ("require_field_match".equals(fieldName) || "requireFieldMatch".equals(fieldName)) { - fieldOptionsBuilder.requireFieldMatch(parser.booleanValue()); - } else if ("boundary_max_scan".equals(fieldName) || "boundaryMaxScan".equals(fieldName)) { - fieldOptionsBuilder.boundaryMaxScan(parser.intValue()); - } else if ("boundary_chars".equals(fieldName) || "boundaryChars".equals(fieldName)) { - char[] charsArr = parser.text().toCharArray(); - Character[] boundaryChars = new Character[charsArr.length]; - for (int i = 0; i < charsArr.length; i++) { - boundaryChars[i] = charsArr[i]; - } - fieldOptionsBuilder.boundaryChars(boundaryChars); - } else if ("type".equals(fieldName)) { - fieldOptionsBuilder.highlighterType(parser.text()); - } else if ("fragmenter".equals(fieldName)) { - fieldOptionsBuilder.fragmenter(parser.text()); - } else if ("no_match_size".equals(fieldName) || "noMatchSize".equals(fieldName)) { - fieldOptionsBuilder.noMatchSize(parser.intValue()); - } else if ("force_source".equals(fieldName) || "forceSource".equals(fieldName)) { - fieldOptionsBuilder.forceSource(parser.booleanValue()); - } else if ("phrase_limit".equals(fieldName) || "phraseLimit".equals(fieldName)) { - fieldOptionsBuilder.phraseLimit(parser.intValue()); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if ("highlight_query".equals(fieldName) || "highlightQuery".equals(fieldName)) { - fieldOptionsBuilder.highlightQuery(queryShardContext.parse(parser).query()); - } else if ("options".equals(fieldName)) { - fieldOptionsBuilder.options(parser.map()); - } - } - } - return fieldOptionsBuilder; - } -} diff --git a/core/src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java b/core/src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java index 293143fb1db..26f638b15a9 100644 --- a/core/src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java +++ b/core/src/main/java/org/elasticsearch/search/highlight/SearchContextHighlight.java @@ -354,7 +354,6 @@ public class SearchContextHighlight { if (fieldOptions.phraseLimit == -1) { fieldOptions.phraseLimit = globalOptions.phraseLimit; } - return this; } } diff --git a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java index bc2c38ef601..6cadddfe76a 100644 --- a/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/highlight/HighlightBuilderTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.highlight; +import org.apache.lucene.search.Query; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseFieldMatcher; @@ -59,9 +60,13 @@ import org.junit.BeforeClass; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; -import java.util.Iterator; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.TreeSet; +import java.util.function.BiConsumer; +import java.util.function.Function; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; @@ -266,8 +271,8 @@ public class HighlightBuilderTests extends ESTestCase { } /** - * test that build() outputs a {@link SearchContextHighlight} that is similar to the one - * we would get when parsing the xContent the test highlight builder is rendering out + * test that build() outputs a {@link SearchContextHighlight} that is has similar parameters + * than what we have in the random {@link HighlightBuilder} */ public void testBuildSearchContextHighlight() throws IOException { Settings indexSettings = Settings.settingsBuilder() @@ -287,48 +292,97 @@ public class HighlightBuilderTests extends ESTestCase { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { HighlightBuilder highlightBuilder = randomHighlighterBuilder(); SearchContextHighlight highlight = highlightBuilder.build(mockShardContext); - XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - if (randomBoolean()) { - builder.prettyPrint(); - } - builder.startObject(); - highlightBuilder.innerXContent(builder); - builder.endObject(); - XContentParser parser = XContentHelper.createParser(builder.bytes()); + for (SearchContextHighlight.Field field : highlight.fields()) { + String encoder = highlightBuilder.encoder() != null ? highlightBuilder.encoder() : HighlightBuilder.DEFAULT_ENCODER; + assertEquals(encoder, field.fieldOptions().encoder()); + final Field fieldBuilder = getFieldBuilderByName(highlightBuilder, field.field()); + assertNotNull("expected a highlight builder for field " + field.field(), fieldBuilder); + FieldOptions fieldOptions = field.fieldOptions(); - SearchContextHighlight parsedHighlight = new HighlighterParseElement().parse(parser, mockShardContext); - assertNotSame(highlight, parsedHighlight); - assertEquals(highlight.globalForceSource(), parsedHighlight.globalForceSource()); - assertEquals(highlight.fields().size(), parsedHighlight.fields().size()); + BiConsumer, Object>, Function> checkSame = + mergeBeforeChek(highlightBuilder, fieldBuilder, fieldOptions); - Iterator iterator = parsedHighlight.fields().iterator(); - for (org.elasticsearch.search.highlight.SearchContextHighlight.Field field : highlight.fields()) { - org.elasticsearch.search.highlight.SearchContextHighlight.Field otherField = iterator.next(); - assertEquals(field.field(), otherField.field()); - FieldOptions options = field.fieldOptions(); - FieldOptions otherOptions = otherField.fieldOptions(); - assertArrayEquals(options.boundaryChars(), options.boundaryChars()); - assertEquals(options.boundaryMaxScan(), otherOptions.boundaryMaxScan()); - assertEquals(options.encoder(), otherOptions.encoder()); - assertEquals(options.fragmentCharSize(), otherOptions.fragmentCharSize()); - assertEquals(options.fragmenter(), otherOptions.fragmenter()); - assertEquals(options.fragmentOffset(), otherOptions.fragmentOffset()); - assertEquals(options.highlighterType(), otherOptions.highlighterType()); - assertEquals(options.highlightFilter(), otherOptions.highlightFilter()); - assertEquals(options.highlightQuery(), otherOptions.highlightQuery()); - assertEquals(options.matchedFields(), otherOptions.matchedFields()); - assertEquals(options.noMatchSize(), otherOptions.noMatchSize()); - assertEquals(options.numberOfFragments(), otherOptions.numberOfFragments()); - assertEquals(options.options(), otherOptions.options()); - assertEquals(options.phraseLimit(), otherOptions.phraseLimit()); - assertArrayEquals(options.preTags(), otherOptions.preTags()); - assertArrayEquals(options.postTags(), otherOptions.postTags()); - assertEquals(options.requireFieldMatch(), otherOptions.requireFieldMatch()); - assertEquals(options.scoreOrdered(), otherOptions.scoreOrdered()); + checkSame.accept(AbstractHighlighterBuilder::boundaryChars, FieldOptions::boundaryChars); + checkSame.accept(AbstractHighlighterBuilder::boundaryMaxScan, FieldOptions::boundaryMaxScan); + checkSame.accept(AbstractHighlighterBuilder::fragmentSize, FieldOptions::fragmentCharSize); + checkSame.accept(AbstractHighlighterBuilder::fragmenter, FieldOptions::fragmenter); + checkSame.accept(AbstractHighlighterBuilder::requireFieldMatch, FieldOptions::requireFieldMatch); + checkSame.accept(AbstractHighlighterBuilder::noMatchSize, FieldOptions::noMatchSize); + checkSame.accept(AbstractHighlighterBuilder::numOfFragments, FieldOptions::numberOfFragments); + checkSame.accept(AbstractHighlighterBuilder::phraseLimit, FieldOptions::phraseLimit); + checkSame.accept(AbstractHighlighterBuilder::highlighterType, FieldOptions::highlighterType); + checkSame.accept(AbstractHighlighterBuilder::highlightFilter, FieldOptions::highlightFilter); + checkSame.accept(AbstractHighlighterBuilder::preTags, FieldOptions::preTags); + checkSame.accept(AbstractHighlighterBuilder::postTags, FieldOptions::postTags); + checkSame.accept(AbstractHighlighterBuilder::options, FieldOptions::options); + checkSame.accept(AbstractHighlighterBuilder::order, op -> op.scoreOrdered() ? Order.SCORE : Order.NONE); + assertEquals(fieldBuilder.fragmentOffset, fieldOptions.fragmentOffset()); + if (fieldBuilder.matchedFields != null) { + String[] copy = Arrays.copyOf(fieldBuilder.matchedFields, fieldBuilder.matchedFields.length); + Arrays.sort(copy); + assertArrayEquals(copy, + new TreeSet(fieldOptions.matchedFields()).toArray(new String[fieldOptions.matchedFields().size()])); + } else { + assertNull(fieldOptions.matchedFields()); + } + Query expectedValue = null; + if (fieldBuilder.highlightQuery != null) { + expectedValue = QueryBuilder.rewriteQuery(fieldBuilder.highlightQuery, mockShardContext).toQuery(mockShardContext); + } else if (highlightBuilder.highlightQuery != null) { + expectedValue = QueryBuilder.rewriteQuery(highlightBuilder.highlightQuery, mockShardContext).toQuery(mockShardContext); + } + assertEquals(expectedValue, fieldOptions.highlightQuery()); } } } + /** + * Create a generic helper function that performs all the work of merging the global highlight builder parameter, + * the (potential) overwrite on the field level and the default value from {@link HighlightBuilder#defaultOptions} + * before making the assertion that the value in the highlight builder and the actual value in the {@link FieldOptions} + * passed in is the same. + * + * @param highlightBuilder provides the (optional) global builder parameter + * @param fieldBuilder provides the (optional) field level parameter, if present this overwrites the global value + * @param options the target field options that are checked + */ + private static BiConsumer, Object>, Function> mergeBeforeChek( + HighlightBuilder highlightBuilder, Field fieldBuilder, FieldOptions options) { + return (highlightBuilderParameterAccessor, fieldOptionsParameterAccessor) -> { + Object expectedValue = null; + Object globalLevelValue = highlightBuilderParameterAccessor.apply(highlightBuilder); + Object fieldLevelValue = highlightBuilderParameterAccessor.apply(fieldBuilder); + if (fieldLevelValue != null) { + expectedValue = fieldLevelValue; + } else if (globalLevelValue != null) { + expectedValue = globalLevelValue; + } else { + expectedValue = fieldOptionsParameterAccessor.apply(HighlightBuilder.defaultOptions); + } + Object actualValue = fieldOptionsParameterAccessor.apply(options); + if (actualValue instanceof String[]) { + assertArrayEquals((String[]) expectedValue, (String[]) actualValue); + } else if (actualValue instanceof Character[]) { + if (expectedValue instanceof char[]) { + assertArrayEquals(HighlightBuilder.convertCharArray((char[]) expectedValue), (Character[]) actualValue); + } else { + assertArrayEquals((Character[]) expectedValue, (Character[]) actualValue); + } + } else { + assertEquals(expectedValue, actualValue); + } + }; + } + + private static Field getFieldBuilderByName(HighlightBuilder highlightBuilder, String fieldName) { + for (Field hbfield : highlightBuilder.fields()) { + if (hbfield.name().equals(fieldName)) { + return hbfield; + } + } + return null; + } + /** * `tags_schema` is not produced by toXContent in the builder but should be parseable, so this * adds a simple json test for this. @@ -447,7 +501,7 @@ public class HighlightBuilderTests extends ESTestCase { } int numberOfFields = randomIntBetween(1,5); for (int i = 0; i < numberOfFields; i++) { - Field field = new Field(randomAsciiOfLengthBetween(1, 10)); + Field field = new Field(i + "_" + randomAsciiOfLengthBetween(1, 10)); setRandomCommonOptions(field); if (randomBoolean()) { field.fragmentOffset(randomIntBetween(1, 100)); @@ -617,13 +671,17 @@ public class HighlightBuilderTests extends ESTestCase { } } + /** + * Create array of unique Strings. If not unique, e.g. duplicates field names + * would be dropped in {@link FieldOptions.Builder#matchedFields(Set)}, resulting in test glitches + */ private static String[] randomStringArray(int minSize, int maxSize) { int size = randomIntBetween(minSize, maxSize); - String[] randomStrings = new String[size]; + Set randomStrings = new HashSet(size); for (int f = 0; f < size; f++) { - randomStrings[f] = randomAsciiOfLengthBetween(1, 10); + randomStrings.add(randomAsciiOfLengthBetween(3, 10)); } - return randomStrings; + return randomStrings.toArray(new String[randomStrings.size()]); } /**