mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 10:25:15 +00:00
Checkstyle
This commit is contained in:
parent
cf6e1a4362
commit
153b2ae180
@ -575,18 +575,6 @@
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSearchResult.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]FetchSubPhaseParseElement.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]explain[/\\]ExplainFetchSubPhase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]fielddata[/\\]FieldDataFieldsParseElement.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]innerhits[/\\]InnerHitsParseElement.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]fetch[/\\]source[/\\]FetchSourceContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]FastVectorHighlighter.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]HighlightPhase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]HighlightUtils.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]HighlighterParseElement.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]PlainHighlighter.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]PostingsHighlighter.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]vectorhighlight[/\\]SimpleFragmentsBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]highlight[/\\]vectorhighlight[/\\]SourceScoreOrderFragmentsBuilder.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]DefaultSearchContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]FilteredSearchContext.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]search[/\\]internal[/\\]InternalSearchHit.java" checks="LineLength" />
|
||||
|
@ -18,8 +18,8 @@
|
||||
*/
|
||||
|
||||
/**
|
||||
* Search phase that fetches the top resutls from the shards after the results of the query phase have been merged. Pluggable by
|
||||
* implementing {@link org.elasticsearch.search.fetch.FetchSubPhase} and
|
||||
* Search phase that fetches the top hits from the shards after the results of the query phase have been merged. Pluggable by implementing
|
||||
* {@link org.elasticsearch.search.fetch.FetchSubPhase} and
|
||||
* {@link org.elasticsearch.plugins.SearchPlugin#getFetchSubPhases(org.elasticsearch.plugins.SearchPlugin.FetchPhaseConstructionContext)}.
|
||||
*/
|
||||
package org.elasticsearch.search.fetch;
|
||||
|
@ -21,14 +21,13 @@ package org.elasticsearch.search.fetch.subphase;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.InternalSearchHit;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.rescore.RescoreSearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
* Explains the scoring calculations for the top hits.
|
||||
*/
|
||||
public final class ExplainFetchSubPhase implements FetchSubPhase {
|
||||
|
||||
@ -47,7 +46,8 @@ public final class ExplainFetchSubPhase implements FetchSubPhase {
|
||||
// we use the top level doc id, since we work with the top level searcher
|
||||
hitContext.hit().explanation(explanation);
|
||||
} catch (IOException e) {
|
||||
throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().type() + "#" + hitContext.hit().id() + "]", e);
|
||||
throw new FetchPhaseExecutionException(context, "Failed to explain doc [" + hitContext.hit().type() + "#"
|
||||
+ hitContext.hit().id() + "]", e);
|
||||
} finally {
|
||||
context.clearReleasables(SearchContext.Lifetime.COLLECTION);
|
||||
}
|
||||
|
@ -38,6 +38,7 @@ import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Context used to fetch the {@code _source}.
|
||||
*/
|
||||
public class FetchSourceContext implements Streamable, ToXContent {
|
||||
|
||||
@ -175,8 +176,8 @@ public class FetchSourceContext implements Streamable, ToXContent {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
includesList.add(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
|
||||
parser.getTokenLocation());
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token
|
||||
+ " in [" + currentFieldName + "].", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
includes = includesList.toArray(new String[includesList.size()]);
|
||||
@ -186,14 +187,14 @@ public class FetchSourceContext implements Streamable, ToXContent {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
excludesList.add(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
|
||||
parser.getTokenLocation());
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token
|
||||
+ " in [" + currentFieldName + "].", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
excludes = excludesList.toArray(new String[excludesList.size()]);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
|
||||
parser.getTokenLocation());
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token
|
||||
+ " in [" + currentFieldName + "].", parser.getTokenLocation());
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
if (context.getParseFieldMatcher().match(currentFieldName, INCLUDES_FIELD)) {
|
||||
|
@ -65,7 +65,8 @@ public class FastVectorHighlighter implements Highlighter {
|
||||
FieldMapper mapper = highlighterContext.mapper;
|
||||
|
||||
if (canHighlight(mapper) == false) {
|
||||
throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + "] should be indexed with term vector with position offsets to be used with fast vector highlighter");
|
||||
throw new IllegalArgumentException("the field [" + highlighterContext.fieldName
|
||||
+ "] should be indexed with term vector with position offsets to be used with fast vector highlighter");
|
||||
}
|
||||
|
||||
Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT;
|
||||
@ -79,14 +80,22 @@ public class FastVectorHighlighter implements Highlighter {
|
||||
FieldQuery fieldQuery;
|
||||
if (field.fieldOptions().requireFieldMatch()) {
|
||||
if (cache.fieldMatchFieldQuery == null) {
|
||||
// we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...)
|
||||
cache.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch());
|
||||
/*
|
||||
* we use top level reader to rewrite the query against all readers, with use caching it across hits (and across
|
||||
* readers...)
|
||||
*/
|
||||
cache.fieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(),
|
||||
true, field.fieldOptions().requireFieldMatch());
|
||||
}
|
||||
fieldQuery = cache.fieldMatchFieldQuery;
|
||||
} else {
|
||||
if (cache.noFieldMatchFieldQuery == null) {
|
||||
// we use top level reader to rewrite the query against all readers, with use caching it across hits (and across readers...)
|
||||
cache.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(), true, field.fieldOptions().requireFieldMatch());
|
||||
/*
|
||||
* we use top level reader to rewrite the query against all readers, with use caching it across hits (and across
|
||||
* readers...)
|
||||
*/
|
||||
cache.noFieldMatchFieldQuery = new CustomFieldQuery(highlighterContext.query, hitContext.topLevelReader(),
|
||||
true, field.fieldOptions().requireFieldMatch());
|
||||
}
|
||||
fieldQuery = cache.noFieldMatchFieldQuery;
|
||||
}
|
||||
@ -97,31 +106,40 @@ public class FastVectorHighlighter implements Highlighter {
|
||||
BaseFragmentsBuilder fragmentsBuilder;
|
||||
|
||||
BoundaryScanner boundaryScanner = DEFAULT_BOUNDARY_SCANNER;
|
||||
if (field.fieldOptions().boundaryMaxScan() != SimpleBoundaryScanner.DEFAULT_MAX_SCAN || field.fieldOptions().boundaryChars() != SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) {
|
||||
boundaryScanner = new SimpleBoundaryScanner(field.fieldOptions().boundaryMaxScan(), field.fieldOptions().boundaryChars());
|
||||
if (field.fieldOptions().boundaryMaxScan() != SimpleBoundaryScanner.DEFAULT_MAX_SCAN
|
||||
|| field.fieldOptions().boundaryChars() != SimpleBoundaryScanner.DEFAULT_BOUNDARY_CHARS) {
|
||||
boundaryScanner = new SimpleBoundaryScanner(field.fieldOptions().boundaryMaxScan(),
|
||||
field.fieldOptions().boundaryChars());
|
||||
}
|
||||
boolean forceSource = context.highlight().forceSource(field);
|
||||
if (field.fieldOptions().numberOfFragments() == 0) {
|
||||
fragListBuilder = new SingleFragListBuilder();
|
||||
|
||||
if (!forceSource && mapper.fieldType().stored()) {
|
||||
fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
|
||||
fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), boundaryScanner);
|
||||
} else {
|
||||
fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
|
||||
fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context,
|
||||
field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
|
||||
}
|
||||
} else {
|
||||
fragListBuilder = field.fieldOptions().fragmentOffset() == -1 ? new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset());
|
||||
fragListBuilder = field.fieldOptions().fragmentOffset() == -1 ?
|
||||
new SimpleFragListBuilder() : new SimpleFragListBuilder(field.fieldOptions().fragmentOffset());
|
||||
if (field.fieldOptions().scoreOrdered()) {
|
||||
if (!forceSource && mapper.fieldType().stored()) {
|
||||
fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
|
||||
fragmentsBuilder = new ScoreOrderFragmentsBuilder(field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), boundaryScanner);
|
||||
} else {
|
||||
fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(mapper, context, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
|
||||
fragmentsBuilder = new SourceScoreOrderFragmentsBuilder(mapper, context,
|
||||
field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
|
||||
}
|
||||
} else {
|
||||
if (!forceSource && mapper.fieldType().stored()) {
|
||||
fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
|
||||
fragmentsBuilder = new SimpleFragmentsBuilder(mapper, field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), boundaryScanner);
|
||||
} else {
|
||||
fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(), field.fieldOptions().postTags(), boundaryScanner);
|
||||
fragmentsBuilder = new SourceSimpleFragmentsBuilder(mapper, context, field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), boundaryScanner);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -143,16 +161,20 @@ public class FastVectorHighlighter implements Highlighter {
|
||||
String[] fragments;
|
||||
|
||||
// a HACK to make highlighter do highlighting, even though its using the single frag list builder
|
||||
int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? Integer.MAX_VALUE : field.fieldOptions().numberOfFragments();
|
||||
int fragmentCharSize = field.fieldOptions().numberOfFragments() == 0 ? Integer.MAX_VALUE : field.fieldOptions().fragmentCharSize();
|
||||
int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ?
|
||||
Integer.MAX_VALUE : field.fieldOptions().numberOfFragments();
|
||||
int fragmentCharSize = field.fieldOptions().numberOfFragments() == 0 ?
|
||||
Integer.MAX_VALUE : field.fieldOptions().fragmentCharSize();
|
||||
// we highlight against the low level reader and docId, because if we load source, we want to reuse it if possible
|
||||
// Only send matched fields if they were requested to save time.
|
||||
if (field.fieldOptions().matchedFields() != null && !field.fieldOptions().matchedFields().isEmpty()) {
|
||||
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), field.fieldOptions().matchedFields(), fragmentCharSize,
|
||||
numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder);
|
||||
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(),
|
||||
field.fieldOptions().matchedFields(), fragmentCharSize, numberOfFragments, entry.fragListBuilder,
|
||||
entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder);
|
||||
} else {
|
||||
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(), fragmentCharSize,
|
||||
numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(), field.fieldOptions().postTags(), encoder);
|
||||
fragments = cache.fvh.getBestFragments(fieldQuery, hitContext.reader(), hitContext.docId(), mapper.fieldType().name(),
|
||||
fragmentCharSize, numberOfFragments, entry.fragListBuilder, entry.fragmentsBuilder, field.fieldOptions().preTags(),
|
||||
field.fieldOptions().postTags(), encoder);
|
||||
}
|
||||
|
||||
if (fragments != null && fragments.length > 0) {
|
||||
@ -180,14 +202,13 @@ public class FastVectorHighlighter implements Highlighter {
|
||||
|
||||
@Override
|
||||
public boolean canHighlight(FieldMapper fieldMapper) {
|
||||
return fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets() && fieldMapper.fieldType().storeTermVectorPositions();
|
||||
return fieldMapper.fieldType().storeTermVectors() && fieldMapper.fieldType().storeTermVectorOffsets()
|
||||
&& fieldMapper.fieldType().storeTermVectorPositions();
|
||||
}
|
||||
|
||||
private class MapperHighlightEntry {
|
||||
public FragListBuilder fragListBuilder;
|
||||
public FragmentsBuilder fragmentsBuilder;
|
||||
|
||||
public org.apache.lucene.search.highlight.Highlighter highlighter;
|
||||
}
|
||||
|
||||
private class HighlighterEntry {
|
||||
|
@ -67,7 +67,8 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
||||
if (context.highlight().forceSource(field)) {
|
||||
SourceFieldMapper sourceFieldMapper = context.mapperService().documentMapper(hitContext.hit().type()).sourceMapper();
|
||||
if (!sourceFieldMapper.enabled()) {
|
||||
throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight + " but type [" + hitContext.hit().type() + "] has disabled _source");
|
||||
throw new IllegalArgumentException("source is forced for fields " + fieldNamesToHighlight
|
||||
+ " but type [" + hitContext.hit().type() + "] has disabled _source");
|
||||
}
|
||||
}
|
||||
|
||||
@ -105,11 +106,16 @@ public class HighlightPhase extends AbstractComponent implements FetchSubPhase {
|
||||
}
|
||||
Highlighter highlighter = highlighters.get(highlighterType);
|
||||
if (highlighter == null) {
|
||||
throw new IllegalArgumentException("unknown highlighter type [" + highlighterType + "] for the field [" + fieldName + "]");
|
||||
throw new IllegalArgumentException("unknown highlighter type [" + highlighterType
|
||||
+ "] for the field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
Query highlightQuery = field.fieldOptions().highlightQuery() == null ? context.parsedQuery().query() : field.fieldOptions().highlightQuery();
|
||||
HighlighterContext highlighterContext = new HighlighterContext(fieldName, field, fieldMapper, context, hitContext, highlightQuery);
|
||||
Query highlightQuery = field.fieldOptions().highlightQuery();
|
||||
if (highlightQuery == null) {
|
||||
highlightQuery = context.parsedQuery().query();
|
||||
}
|
||||
HighlighterContext highlighterContext = new HighlighterContext(fieldName, field, fieldMapper, context,
|
||||
hitContext, highlightQuery);
|
||||
|
||||
if ((highlighter.canHighlight(fieldMapper) == false) && fieldNameContainsWildcards) {
|
||||
// if several fieldnames matched the wildcard then we want to skip those that we cannot highlight
|
||||
|
@ -43,7 +43,11 @@ public final class HighlightUtils {
|
||||
|
||||
}
|
||||
|
||||
static List<Object> loadFieldValues(SearchContextHighlight.Field field, FieldMapper mapper, SearchContext searchContext, FetchSubPhase.HitContext hitContext) throws IOException {
|
||||
/**
|
||||
* Load field values for highlighting.
|
||||
*/
|
||||
public static List<Object> loadFieldValues(SearchContextHighlight.Field field, FieldMapper mapper, SearchContext searchContext,
|
||||
FetchSubPhase.HitContext hitContext) throws IOException {
|
||||
//percolator needs to always load from source, thus it sets the global force source to true
|
||||
boolean forceSource = searchContext.highlight().forceSource(field);
|
||||
List<Object> textsToHighlight;
|
||||
@ -65,7 +69,7 @@ public final class HighlightUtils {
|
||||
}
|
||||
|
||||
static class Encoders {
|
||||
static Encoder DEFAULT = new DefaultEncoder();
|
||||
static Encoder HTML = new SimpleHTMLEncoder();
|
||||
static final Encoder DEFAULT = new DefaultEncoder();
|
||||
static final Encoder HTML = new SimpleHTMLEncoder();
|
||||
}
|
||||
}
|
||||
|
@ -68,11 +68,13 @@ public class PlainHighlighter implements Highlighter {
|
||||
hitContext.cache().put(CACHE_KEY, mappers);
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter> cache = (Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter>) hitContext.cache().get(CACHE_KEY);
|
||||
Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter> cache =
|
||||
(Map<FieldMapper, org.apache.lucene.search.highlight.Highlighter>) hitContext.cache().get(CACHE_KEY);
|
||||
|
||||
org.apache.lucene.search.highlight.Highlighter entry = cache.get(mapper);
|
||||
if (entry == null) {
|
||||
QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query, field.fieldOptions().requireFieldMatch() ? mapper.fieldType().name() : null);
|
||||
QueryScorer queryScorer = new CustomQueryScorer(highlighterContext.query,
|
||||
field.fieldOptions().requireFieldMatch() ? mapper.fieldType().name() : null);
|
||||
queryScorer.setExpandMultiTermQuery(true);
|
||||
Fragmenter fragmenter;
|
||||
if (field.fieldOptions().numberOfFragments() == 0) {
|
||||
@ -84,7 +86,8 @@ public class PlainHighlighter implements Highlighter {
|
||||
} else if ("span".equals(field.fieldOptions().fragmenter())) {
|
||||
fragmenter = new SimpleSpanFragmenter(queryScorer, field.fieldOptions().fragmentCharSize());
|
||||
} else {
|
||||
throw new IllegalArgumentException("unknown fragmenter option [" + field.fieldOptions().fragmenter() + "] for the field [" + highlighterContext.fieldName + "]");
|
||||
throw new IllegalArgumentException("unknown fragmenter option [" + field.fieldOptions().fragmenter()
|
||||
+ "] for the field [" + highlighterContext.fieldName + "]");
|
||||
}
|
||||
Formatter formatter = new SimpleHTMLFormatter(field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0]);
|
||||
|
||||
@ -181,7 +184,8 @@ public class PlainHighlighter implements Highlighter {
|
||||
return true;
|
||||
}
|
||||
|
||||
private static int findGoodEndForNoHighlightExcerpt(int noMatchSize, Analyzer analyzer, String fieldName, String contents) throws IOException {
|
||||
private static int findGoodEndForNoHighlightExcerpt(int noMatchSize, Analyzer analyzer, String fieldName, String contents)
|
||||
throws IOException {
|
||||
try (TokenStream tokenStream = analyzer.tokenStream(fieldName, contents)) {
|
||||
if (!tokenStream.hasAttribute(OffsetAttribute.class)) {
|
||||
// Can't split on term boundaries without offsets
|
||||
|
@ -33,6 +33,7 @@ import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.search.fetch.FetchPhaseExecutionException;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightUtils.Encoders;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.text.BreakIterator;
|
||||
@ -53,7 +54,8 @@ public class PostingsHighlighter implements Highlighter {
|
||||
FieldMapper fieldMapper = highlighterContext.mapper;
|
||||
SearchContextHighlight.Field field = highlighterContext.field;
|
||||
if (canHighlight(fieldMapper) == false) {
|
||||
throw new IllegalArgumentException("the field [" + highlighterContext.fieldName + "] should be indexed with positions and offsets in the postings list to be used with postings highlighter");
|
||||
throw new IllegalArgumentException("the field [" + highlighterContext.fieldName
|
||||
+ "] should be indexed with positions and offsets in the postings list to be used with postings highlighter");
|
||||
}
|
||||
|
||||
SearchContext context = highlighterContext.context;
|
||||
@ -67,8 +69,9 @@ public class PostingsHighlighter implements Highlighter {
|
||||
MapperHighlighterEntry mapperHighlighterEntry = highlighterEntry.mappers.get(fieldMapper);
|
||||
|
||||
if (mapperHighlighterEntry == null) {
|
||||
Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT;
|
||||
CustomPassageFormatter passageFormatter = new CustomPassageFormatter(field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0], encoder);
|
||||
Encoder encoder = field.fieldOptions().encoder().equals("html") ? Encoders.HTML : Encoders.DEFAULT;
|
||||
CustomPassageFormatter passageFormatter = new CustomPassageFormatter(
|
||||
field.fieldOptions().preTags()[0], field.fieldOptions().postTags()[0], encoder);
|
||||
mapperHighlighterEntry = new MapperHighlighterEntry(passageFormatter);
|
||||
}
|
||||
|
||||
@ -83,17 +86,20 @@ public class PostingsHighlighter implements Highlighter {
|
||||
//so we don't lose the distinction between the different values of a field and we get back a snippet per value
|
||||
String fieldValue = mergeFieldValues(fieldValues, HighlightUtils.NULL_SEPARATOR);
|
||||
CustomSeparatorBreakIterator breakIterator = new CustomSeparatorBreakIterator(HighlightUtils.NULL_SEPARATOR);
|
||||
highlighter = new CustomPostingsHighlighter(analyzer, mapperHighlighterEntry.passageFormatter, breakIterator, fieldValue, field.fieldOptions().noMatchSize() > 0);
|
||||
highlighter = new CustomPostingsHighlighter(analyzer, mapperHighlighterEntry.passageFormatter, breakIterator,
|
||||
fieldValue, field.fieldOptions().noMatchSize() > 0);
|
||||
numberOfFragments = fieldValues.size(); //we are highlighting the whole content, one snippet per value
|
||||
} else {
|
||||
//using paragraph separator we make sure that each field value holds a discrete passage for highlighting
|
||||
String fieldValue = mergeFieldValues(fieldValues, HighlightUtils.PARAGRAPH_SEPARATOR);
|
||||
highlighter = new CustomPostingsHighlighter(analyzer, mapperHighlighterEntry.passageFormatter, fieldValue, field.fieldOptions().noMatchSize() > 0);
|
||||
highlighter = new CustomPostingsHighlighter(analyzer, mapperHighlighterEntry.passageFormatter,
|
||||
fieldValue, field.fieldOptions().noMatchSize() > 0);
|
||||
numberOfFragments = field.fieldOptions().numberOfFragments();
|
||||
}
|
||||
|
||||
IndexSearcher searcher = new IndexSearcher(hitContext.reader());
|
||||
Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.fieldType().name(), highlighterContext.query, searcher, hitContext.docId(), numberOfFragments);
|
||||
Snippet[] fieldSnippets = highlighter.highlightField(fieldMapper.fieldType().name(), highlighterContext.query, searcher,
|
||||
hitContext.docId(), numberOfFragments);
|
||||
for (Snippet fieldSnippet : fieldSnippets) {
|
||||
if (Strings.hasText(fieldSnippet.getText())) {
|
||||
snippets.add(fieldSnippet);
|
||||
|
@ -40,6 +40,7 @@ public class SimpleFragmentsBuilder extends org.apache.lucene.search.vectorhighl
|
||||
@Override
|
||||
protected String makeFragment( StringBuilder buffer, int[] index, Field[] values, WeightedFragInfo fragInfo,
|
||||
String[] preTags, String[] postTags, Encoder encoder ){
|
||||
return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo), preTags, postTags, encoder);
|
||||
return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo),
|
||||
preTags, postTags, encoder);
|
||||
}
|
||||
}
|
||||
|
@ -66,6 +66,7 @@ public class SourceScoreOrderFragmentsBuilder extends ScoreOrderFragmentsBuilder
|
||||
@Override
|
||||
protected String makeFragment( StringBuilder buffer, int[] index, Field[] values, WeightedFragInfo fragInfo,
|
||||
String[] preTags, String[] postTags, Encoder encoder ){
|
||||
return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo), preTags, postTags, encoder);
|
||||
return super.makeFragment(buffer, index, values, FragmentBuilderHelper.fixWeightedFragInfo(mapper, values, fragInfo),
|
||||
preTags, postTags, encoder);
|
||||
}
|
||||
}
|
||||
|
@ -17,18 +17,17 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.innerhits;
|
||||
package org.elasticsearch.search.fetch.subphase;
|
||||
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.util.ArrayUtil;
|
||||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.InnerHitBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.MockScriptPlugin;
|
||||
@ -36,7 +35,6 @@ import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.FieldSortBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
@ -168,14 +166,16 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
new InnerHitBuilder().setHighlightBuilder(new HighlightBuilder().field("comments.message"))
|
||||
.setExplain(true)
|
||||
.addDocValueField("comments.message")
|
||||
.addScriptField("script", new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap()))
|
||||
.addScriptField("script",
|
||||
new Script("5", ScriptService.ScriptType.INLINE, MockScriptEngine.NAME, Collections.emptyMap()))
|
||||
.setSize(1)
|
||||
)).get();
|
||||
assertNoFailures(response);
|
||||
innerHits = response.getHits().getAt(0).getInnerHits().get("comments");
|
||||
assertThat(innerHits.getTotalHits(), equalTo(2L));
|
||||
assertThat(innerHits.getHits().length, equalTo(1));
|
||||
assertThat(innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(), equalTo("<em>fox</em> eat quick"));
|
||||
assertThat(innerHits.getAt(0).getHighlightFields().get("comments.message").getFragments()[0].string(),
|
||||
equalTo("<em>fox</em> eat quick"));
|
||||
assertThat(innerHits.getAt(0).explanation().toString(), containsString("weight(comments.message:fox in"));
|
||||
assertThat(innerHits.getAt(0).getFields().get("comments.message").getValue().toString(), equalTo("eat"));
|
||||
assertThat(innerHits.getAt(0).getFields().get("script").getValue().toString(), equalTo("5"));
|
||||
@ -334,12 +334,14 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
int numChildDocs = child1InnerObjects[parent] = scaledRandomIntBetween(1, numDocs);
|
||||
int limit = child1 + numChildDocs;
|
||||
for (; child1 < limit; child1++) {
|
||||
requestBuilders.add(client().prepareIndex("idx", "child1", String.format(Locale.ENGLISH, "%04d", child1)).setParent(parentId).setSource("{}"));
|
||||
requestBuilders.add(client().prepareIndex("idx", "child1",
|
||||
String.format(Locale.ENGLISH, "%04d", child1)).setParent(parentId).setSource("{}"));
|
||||
}
|
||||
numChildDocs = child2InnerObjects[parent] = scaledRandomIntBetween(1, numDocs);
|
||||
limit = child2 + numChildDocs;
|
||||
for (; child2 < limit; child2++) {
|
||||
requestBuilders.add(client().prepareIndex("idx", "child2", String.format(Locale.ENGLISH, "%04d", child2)).setParent(parentId).setSource("{}"));
|
||||
requestBuilders.add(client().prepareIndex("idx", "child2",
|
||||
String.format(Locale.ENGLISH, "%04d", child2)).setParent(parentId).setSource("{}"));
|
||||
}
|
||||
}
|
||||
indexRandom(true, requestBuilders);
|
||||
@ -399,10 +401,14 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.addMapping("answer", "_parent", "type=question", "body", "type=text")
|
||||
);
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
requests.add(client().prepareIndex("stack", "question", "1").setSource("body", "I'm using HTTPS + Basic authentication to protect a resource. How can I throttle authentication attempts to protect against brute force attacks?"));
|
||||
requests.add(client().prepareIndex("stack", "answer", "1").setParent("1").setSource("body", "install fail2ban and enable rules for apache"));
|
||||
requests.add(client().prepareIndex("stack", "question", "2").setSource("body", "I have firewall rules set up and also denyhosts installed.\\ndo I also need to install fail2ban?"));
|
||||
requests.add(client().prepareIndex("stack", "answer", "2").setParent("2").setSource("body", "Denyhosts protects only ssh; Fail2Ban protects all daemons."));
|
||||
requests.add(client().prepareIndex("stack", "question", "1").setSource("body", "I'm using HTTPS + Basic authentication "
|
||||
+ "to protect a resource. How can I throttle authentication attempts to protect against brute force attacks?"));
|
||||
requests.add(client().prepareIndex("stack", "answer", "1").setParent("1").setSource("body",
|
||||
"install fail2ban and enable rules for apache"));
|
||||
requests.add(client().prepareIndex("stack", "question", "2").setSource("body",
|
||||
"I have firewall rules set up and also denyhosts installed.\\ndo I also need to install fail2ban?"));
|
||||
requests.add(client().prepareIndex("stack", "answer", "2").setParent("2").setSource("body",
|
||||
"Denyhosts protects only ssh; Fail2Ban protects all daemons."));
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("stack")
|
||||
@ -491,24 +497,25 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void testNestedMultipleLayers() throws Exception {
|
||||
assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject().startObject("article").startObject("properties")
|
||||
.startObject("comments")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.startObject("remarks")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message").field("type", "text").endObject()
|
||||
assertAcked(prepareCreate("articles").addMapping("article", jsonBuilder().startObject()
|
||||
.startObject("article").startObject("properties")
|
||||
.startObject("comments")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.startObject("remarks")
|
||||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("message").field("type", "text").endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("title")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.startObject("title")
|
||||
.field("type", "text")
|
||||
.endObject()
|
||||
.endObject().endObject().endObject()));
|
||||
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
@ -560,8 +567,8 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
|
||||
// Directly refer to the second level:
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg).innerHit(new InnerHitBuilder()))
|
||||
.get();
|
||||
.setQuery(nestedQuery("comments.remarks", matchQuery("comments.remarks.message", "bad"), ScoreMode.Avg)
|
||||
.innerHit(new InnerHitBuilder())).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertSearchHit(response, 1, hasId("2"));
|
||||
@ -621,7 +628,8 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getTotalHits(), equalTo(1L));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(), equalTo("comments"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getField().string(),
|
||||
equalTo("comments"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments").getAt(0).getNestedIdentity().getChild(), nullValue());
|
||||
}
|
||||
@ -654,28 +662,32 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder()))
|
||||
.get();
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg)
|
||||
.innerHit(new InnerHitBuilder())).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1L));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getChild(), nullValue());
|
||||
SearchHit hit = response.getHits().getAt(0);
|
||||
assertThat(hit.id(), equalTo("1"));
|
||||
SearchHits messages = hit.getInnerHits().get("comments.messages");
|
||||
assertThat(messages.getTotalHits(), equalTo(1L));
|
||||
assertThat(messages.getAt(0).id(), equalTo("1"));
|
||||
assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
|
||||
assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
|
||||
assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue());
|
||||
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear"), ScoreMode.Avg).innerHit(new InnerHitBuilder()))
|
||||
.get();
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "bear"), ScoreMode.Avg)
|
||||
.innerHit(new InnerHitBuilder())).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1L));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getOffset(), equalTo(1));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getChild(), nullValue());
|
||||
hit = response.getHits().getAt(0);
|
||||
assertThat(hit.id(), equalTo("1"));
|
||||
messages = hit.getInnerHits().get("comments.messages");
|
||||
assertThat(messages.getTotalHits(), equalTo(1L));
|
||||
assertThat(messages.getAt(0).id(), equalTo("1"));
|
||||
assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
|
||||
assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(1));
|
||||
assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue());
|
||||
|
||||
// index the message in an object form instead of an array
|
||||
requests = new ArrayList<>();
|
||||
@ -685,16 +697,18 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.endObject()));
|
||||
indexRandom(true, requests);
|
||||
response = client().prepareSearch("articles")
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg).innerHit(new InnerHitBuilder()))
|
||||
.get();
|
||||
.setQuery(nestedQuery("comments.messages", matchQuery("comments.messages.message", "fox"), ScoreMode.Avg)
|
||||
.innerHit(new InnerHitBuilder())).get();
|
||||
assertNoFailures(response);
|
||||
assertHitCount(response, 1);
|
||||
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getTotalHits(), equalTo(1L));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).id(), equalTo("1"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
|
||||
assertThat(response.getHits().getAt(0).getInnerHits().get("comments.messages").getAt(0).getNestedIdentity().getChild(), nullValue());
|
||||
hit = response.getHits().getAt(0);;
|
||||
assertThat(hit.id(), equalTo("1"));
|
||||
messages = hit.getInnerHits().get("comments.messages");
|
||||
assertThat(messages.getTotalHits(), equalTo(1L));
|
||||
assertThat(messages.getAt(0).id(), equalTo("1"));
|
||||
assertThat(messages.getAt(0).getNestedIdentity().getField().string(), equalTo("comments.messages"));
|
||||
assertThat(messages.getAt(0).getNestedIdentity().getOffset(), equalTo(0));
|
||||
assertThat(messages.getAt(0).getNestedIdentity().getChild(), nullValue());
|
||||
}
|
||||
|
||||
public void testRoyals() throws Exception {
|
||||
@ -841,12 +855,14 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
indexRandom(true, requests);
|
||||
waitForRelocation(ClusterHealthStatus.GREEN);
|
||||
|
||||
QueryBuilder query = boolQuery()
|
||||
.should(termQuery("nested1.n_field1", "n_value1_1").queryName("test1"))
|
||||
.should(termQuery("nested1.n_field1", "n_value1_3").queryName("test2"))
|
||||
.should(termQuery("nested1.n_field2", "n_value2_2").queryName("test3"));
|
||||
query = nestedQuery("nested1", query, ScoreMode.Avg).innerHit(
|
||||
new InnerHitBuilder().addSort(new FieldSortBuilder("nested1.n_field1").order(SortOrder.ASC)));
|
||||
SearchResponse searchResponse = client().prepareSearch("test")
|
||||
.setQuery(nestedQuery("nested1", boolQuery()
|
||||
.should(termQuery("nested1.n_field1", "n_value1_1").queryName("test1"))
|
||||
.should(termQuery("nested1.n_field1", "n_value1_3").queryName("test2"))
|
||||
.should(termQuery("nested1.n_field2", "n_value2_2").queryName("test3")),
|
||||
ScoreMode.Avg).innerHit(new InnerHitBuilder().addSort(new FieldSortBuilder("nested1.n_field1").order(SortOrder.ASC))))
|
||||
.setQuery(query)
|
||||
.setSize(numDocs)
|
||||
.addSort("field1", SortOrder.ASC)
|
||||
.get();
|
||||
@ -885,7 +901,8 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
indexRandom(true, requests);
|
||||
|
||||
SearchResponse response = client().prepareSearch("index")
|
||||
.setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None).innerHit(new InnerHitBuilder()))
|
||||
.setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None)
|
||||
.innerHit(new InnerHitBuilder()))
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.get();
|
||||
assertHitCount(response, 2);
|
||||
@ -899,8 +916,10 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries().length, equalTo(1));
|
||||
assertThat(response.getHits().getAt(1).getInnerHits().get("child").getAt(0).getMatchedQueries()[0], equalTo("_name1"));
|
||||
|
||||
QueryBuilder query = hasChildQuery("child", matchQuery("field", "value2").queryName("_name2"), ScoreMode.None)
|
||||
.innerHit(new InnerHitBuilder());
|
||||
response = client().prepareSearch("index")
|
||||
.setQuery(hasChildQuery("child", matchQuery("field", "value2").queryName("_name2"), ScoreMode.None).innerHit(new InnerHitBuilder()))
|
||||
.setQuery(query)
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.get();
|
||||
assertHitCount(response, 1);
|
||||
@ -917,8 +936,10 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
requests.add(client().prepareIndex("index1", "child", "1").setParent("1").setSource("field", "value1"));
|
||||
indexRandom(true, requests);
|
||||
|
||||
QueryBuilder query = hasChildQuery("child", matchQuery("field", "value1"), ScoreMode.None)
|
||||
.innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1));
|
||||
SearchResponse response = client().prepareSearch("index1")
|
||||
.setQuery(hasChildQuery("child", matchQuery("field", "value1"), ScoreMode.None).innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1)))
|
||||
.setQuery(query)
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.get();
|
||||
assertNoFailures(response);
|
||||
@ -935,8 +956,10 @@ public class InnerHitsIT extends ESIntegTestCase {
|
||||
.setRefreshPolicy(IMMEDIATE)
|
||||
.get();
|
||||
|
||||
query = nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg)
|
||||
.innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1));
|
||||
response = client().prepareSearch("index2")
|
||||
.setQuery(nestedQuery("nested", matchQuery("nested.field", "value1"), ScoreMode.Avg).innerHit(new InnerHitBuilder().setSize(ArrayUtil.MAX_ARRAY_LENGTH - 1)))
|
||||
.setQuery(query)
|
||||
.addSort("_uid", SortOrder.ASC)
|
||||
.get();
|
||||
assertNoFailures(response);
|
@ -17,7 +17,7 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.matchedqueries;
|
||||
package org.elasticsearch.search.fetch.subphase;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
@ -69,8 +69,10 @@ public class MatchedQueriesIT extends ESIntegTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(boolQuery().should(rangeQuery("number").lte(2).queryName("test1")).should(rangeQuery("number").gt(2).queryName("test2"))).get();
|
||||
searchResponse = client().prepareSearch().setQuery(
|
||||
boolQuery()
|
||||
.should(rangeQuery("number").lte(2).queryName("test1"))
|
||||
.should(rangeQuery("number").gt(2).queryName("test2"))).get();
|
||||
assertHitCount(searchResponse, 3L);
|
||||
for (SearchHit hit : searchResponse.getHits()) {
|
||||
if (hit.id().equals("1") || hit.id().equals("2")) {
|
File diff suppressed because it is too large
Load Diff
Loading…
x
Reference in New Issue
Block a user