Completion suggestions to be reduced once instead of twice (#39255)

We have been calling `reduce` against completion suggestions twice, once
in `SearchPhaseController#reducedQueryPhase` where all suggestions get
reduced, and once more in `SearchPhaseController#sortDocs` where we
add the top completion suggestions to the `TopDocs` so their docs can
be fetched. There is no need to do reduction twice. All suggestions can
be reduced in one call, then we can filter the result and pass only the
already reduced completion suggestions over to `sortDocs`. The small
important detail is that `shardIndex`, which is currently used only
to fetch suggestions hits, needs to be set before the first reduction,
hence outside of `sortDocs` where we have been doing it until now.
This commit is contained in:
Luca Cavanna 2019-02-26 11:41:22 +01:00
parent 24e478c58e
commit c09773a76e
5 changed files with 150 additions and 53 deletions

View File

@ -54,7 +54,6 @@ import org.elasticsearch.search.profile.SearchProfileShardResults;
import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.Suggest.Suggestion; import org.elasticsearch.search.suggest.Suggest.Suggestion;
import org.elasticsearch.search.suggest.Suggest.Suggestion.Entry;
import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import java.util.ArrayList; import java.util.ArrayList;
@ -155,12 +154,12 @@ public final class SearchPhaseController {
* @param size the number of hits to return from the merged top docs * @param size the number of hits to return from the merged top docs
*/ */
static SortedTopDocs sortDocs(boolean ignoreFrom, Collection<? extends SearchPhaseResult> results, static SortedTopDocs sortDocs(boolean ignoreFrom, Collection<? extends SearchPhaseResult> results,
final Collection<TopDocs> bufferedTopDocs, final TopDocsStats topDocsStats, int from, int size) { final Collection<TopDocs> bufferedTopDocs, final TopDocsStats topDocsStats, int from, int size,
List<CompletionSuggestion> reducedCompletionSuggestions) {
if (results.isEmpty()) { if (results.isEmpty()) {
return SortedTopDocs.EMPTY; return SortedTopDocs.EMPTY;
} }
final Collection<TopDocs> topDocs = bufferedTopDocs == null ? new ArrayList<>() : bufferedTopDocs; final Collection<TopDocs> topDocs = bufferedTopDocs == null ? new ArrayList<>() : bufferedTopDocs;
final Map<String, List<Suggestion<CompletionSuggestion.Entry>>> groupedCompletionSuggestions = new HashMap<>();
for (SearchPhaseResult sortedResult : results) { // TODO we can move this loop into the reduce call to only loop over this once for (SearchPhaseResult sortedResult : results) { // TODO we can move this loop into the reduce call to only loop over this once
/* We loop over all results once, group together the completion suggestions if there are any and collect relevant /* We loop over all results once, group together the completion suggestions if there are any and collect relevant
* top docs results. Each top docs gets it's shard index set on all top docs to simplify top docs merging down the road * top docs results. Each top docs gets it's shard index set on all top docs to simplify top docs merging down the road
@ -177,36 +176,22 @@ public final class SearchPhaseController {
topDocs.add(td.topDocs); topDocs.add(td.topDocs);
} }
} }
if (queryResult.hasSuggestHits()) {
Suggest shardSuggest = queryResult.suggest();
for (CompletionSuggestion suggestion : shardSuggest.filter(CompletionSuggestion.class)) {
suggestion.setShardIndex(sortedResult.getShardIndex());
List<Suggestion<CompletionSuggestion.Entry>> suggestions =
groupedCompletionSuggestions.computeIfAbsent(suggestion.getName(), s -> new ArrayList<>());
suggestions.add(suggestion);
}
}
} }
final boolean hasHits = (groupedCompletionSuggestions.isEmpty() && topDocs.isEmpty()) == false; final boolean hasHits = (reducedCompletionSuggestions.isEmpty() && topDocs.isEmpty()) == false;
if (hasHits) { if (hasHits) {
final TopDocs mergedTopDocs = mergeTopDocs(topDocs, size, ignoreFrom ? 0 : from); final TopDocs mergedTopDocs = mergeTopDocs(topDocs, size, ignoreFrom ? 0 : from);
final ScoreDoc[] mergedScoreDocs = mergedTopDocs == null ? EMPTY_DOCS : mergedTopDocs.scoreDocs; final ScoreDoc[] mergedScoreDocs = mergedTopDocs == null ? EMPTY_DOCS : mergedTopDocs.scoreDocs;
ScoreDoc[] scoreDocs = mergedScoreDocs; ScoreDoc[] scoreDocs = mergedScoreDocs;
if (groupedCompletionSuggestions.isEmpty() == false) { if (reducedCompletionSuggestions.isEmpty() == false) {
int numSuggestDocs = 0; int numSuggestDocs = 0;
List<Suggestion<? extends Entry<? extends Entry.Option>>> completionSuggestions = for (CompletionSuggestion completionSuggestion : reducedCompletionSuggestions) {
new ArrayList<>(groupedCompletionSuggestions.size());
for (List<Suggestion<CompletionSuggestion.Entry>> groupedSuggestions : groupedCompletionSuggestions.values()) {
final CompletionSuggestion completionSuggestion = CompletionSuggestion.reduceTo(groupedSuggestions);
assert completionSuggestion != null; assert completionSuggestion != null;
numSuggestDocs += completionSuggestion.getOptions().size(); numSuggestDocs += completionSuggestion.getOptions().size();
completionSuggestions.add(completionSuggestion);
} }
scoreDocs = new ScoreDoc[mergedScoreDocs.length + numSuggestDocs]; scoreDocs = new ScoreDoc[mergedScoreDocs.length + numSuggestDocs];
System.arraycopy(mergedScoreDocs, 0, scoreDocs, 0, mergedScoreDocs.length); System.arraycopy(mergedScoreDocs, 0, scoreDocs, 0, mergedScoreDocs.length);
int offset = mergedScoreDocs.length; int offset = mergedScoreDocs.length;
Suggest suggestions = new Suggest(completionSuggestions); for (CompletionSuggestion completionSuggestion : reducedCompletionSuggestions) {
for (CompletionSuggestion completionSuggestion : suggestions.filter(CompletionSuggestion.class)) {
for (CompletionSuggestion.Entry.Option option : completionSuggestion.getOptions()) { for (CompletionSuggestion.Entry.Option option : completionSuggestion.getOptions()) {
scoreDocs[offset++] = option.getDoc(); scoreDocs[offset++] = option.getDoc();
} }
@ -479,6 +464,10 @@ public final class SearchPhaseController {
for (Suggestion<? extends Suggestion.Entry<? extends Suggestion.Entry.Option>> suggestion : result.suggest()) { for (Suggestion<? extends Suggestion.Entry<? extends Suggestion.Entry.Option>> suggestion : result.suggest()) {
List<Suggestion> suggestionList = groupedSuggestions.computeIfAbsent(suggestion.getName(), s -> new ArrayList<>()); List<Suggestion> suggestionList = groupedSuggestions.computeIfAbsent(suggestion.getName(), s -> new ArrayList<>());
suggestionList.add(suggestion); suggestionList.add(suggestion);
if (suggestion instanceof CompletionSuggestion) {
CompletionSuggestion completionSuggestion = (CompletionSuggestion) suggestion;
completionSuggestion.setShardIndex(result.getShardIndex());
}
} }
} }
if (consumeAggs) { if (consumeAggs) {
@ -489,15 +478,24 @@ public final class SearchPhaseController {
profileResults.put(key, result.consumeProfileResult()); profileResults.put(key, result.consumeProfileResult());
} }
} }
final Suggest suggest = groupedSuggestions.isEmpty() ? null : new Suggest(Suggest.reduce(groupedSuggestions)); final Suggest reducedSuggest;
final List<CompletionSuggestion> reducedCompletionSuggestions;
if (groupedSuggestions.isEmpty()) {
reducedSuggest = null;
reducedCompletionSuggestions = Collections.emptyList();
} else {
reducedSuggest = new Suggest(Suggest.reduce(groupedSuggestions));
reducedCompletionSuggestions = reducedSuggest.filter(CompletionSuggestion.class);
}
ReduceContext reduceContext = reduceContextFunction.apply(performFinalReduce); ReduceContext reduceContext = reduceContextFunction.apply(performFinalReduce);
final InternalAggregations aggregations = aggregationsList.isEmpty() ? null : reduceAggs(aggregationsList, final InternalAggregations aggregations = aggregationsList.isEmpty() ? null : reduceAggs(aggregationsList,
firstResult.pipelineAggregators(), reduceContext); firstResult.pipelineAggregators(), reduceContext);
final SearchProfileShardResults shardResults = profileResults.isEmpty() ? null : new SearchProfileShardResults(profileResults); final SearchProfileShardResults shardResults = profileResults.isEmpty() ? null : new SearchProfileShardResults(profileResults);
final SortedTopDocs sortedTopDocs = sortDocs(isScrollRequest, queryResults, bufferedTopDocs, topDocsStats, from, size); final SortedTopDocs sortedTopDocs = sortDocs(isScrollRequest, queryResults, bufferedTopDocs, topDocsStats, from, size,
reducedCompletionSuggestions);
final TotalHits totalHits = topDocsStats.getTotalHits(); final TotalHits totalHits = topDocsStats.getTotalHits();
return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.getMaxScore(), return new ReducedQueryPhase(totalHits, topDocsStats.fetchHits, topDocsStats.getMaxScore(),
topDocsStats.timedOut, topDocsStats.terminatedEarly, suggest, aggregations, shardResults, sortedTopDocs, topDocsStats.timedOut, topDocsStats.terminatedEarly, reducedSuggest, aggregations, shardResults, sortedTopDocs,
firstResult.sortValueFormats(), numReducePhases, size, from, false); firstResult.sortValueFormats(), numReducePhases, size, from, false);
} }

View File

@ -202,7 +202,7 @@ public class Suggest implements Iterable<Suggest.Suggestion<? extends Entry<? ex
public static List<Suggestion<? extends Entry<? extends Option>>> reduce(Map<String, List<Suggest.Suggestion>> groupedSuggestions) { public static List<Suggestion<? extends Entry<? extends Option>>> reduce(Map<String, List<Suggest.Suggestion>> groupedSuggestions) {
List<Suggestion<? extends Entry<? extends Option>>> reduced = new ArrayList<>(groupedSuggestions.size()); List<Suggestion<? extends Entry<? extends Option>>> reduced = new ArrayList<>(groupedSuggestions.size());
for (java.util.Map.Entry<String, List<Suggestion>> unmergedResults : groupedSuggestions.entrySet()) { for (Map.Entry<String, List<Suggestion>> unmergedResults : groupedSuggestions.entrySet()) {
List<Suggestion> value = unmergedResults.getValue(); List<Suggestion> value = unmergedResults.getValue();
Class<? extends Suggestion> suggestionClass = null; Class<? extends Suggestion> suggestionClass = null;
for (Suggestion suggestion : value) { for (Suggestion suggestion : value) {

View File

@ -169,11 +169,8 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
} }
} }
/** @Override
* Reduces suggestions to a single suggestion containing at most public CompletionSuggestion reduce(List<Suggest.Suggestion<Entry>> toReduce) {
* top {@link CompletionSuggestion#getSize()} options across <code>toReduce</code>
*/
public static CompletionSuggestion reduceTo(List<Suggest.Suggestion<Entry>> toReduce) {
if (toReduce.isEmpty()) { if (toReduce.isEmpty()) {
return null; return null;
} else { } else {
@ -209,7 +206,7 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
pq.pop(); pq.pop();
} }
if (leader.skipDuplicates == false || if (leader.skipDuplicates == false ||
seenSurfaceForms.add(current.getText().toString())) { seenSurfaceForms.add(current.getText().toString())) {
options.add(current); options.add(current);
if (options.size() >= size) { if (options.size() >= size) {
break; break;
@ -223,11 +220,6 @@ public final class CompletionSuggestion extends Suggest.Suggestion<CompletionSug
} }
} }
@Override
public Suggest.Suggestion<Entry> reduce(List<Suggest.Suggestion<Entry>> toReduce) {
return reduceTo(toReduce);
}
public void setShardIndex(int shardIndex) { public void setShardIndex(int shardIndex) {
if (entries.isEmpty() == false) { if (entries.isEmpty() == false) {
for (Entry.Option option : getOptions()) { for (Entry.Option option : getOptions()) {

View File

@ -51,12 +51,16 @@ import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.suggest.SortBy;
import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.search.suggest.phrase.PhraseSuggestion;
import org.elasticsearch.search.suggest.term.TermSuggestion;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.Before; import org.junit.Before;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -87,7 +91,7 @@ public class SearchPhaseControllerTests extends ESTestCase {
}); });
} }
public void testSort() { public void testSortDocs() {
List<CompletionSuggestion> suggestions = new ArrayList<>(); List<CompletionSuggestion> suggestions = new ArrayList<>();
for (int i = 0; i < randomIntBetween(1, 5); i++) { for (int i = 0; i < randomIntBetween(1, 5); i++) {
suggestions.add(new CompletionSuggestion(randomAlphaOfLength(randomIntBetween(1, 5)), randomIntBetween(1, 20), false)); suggestions.add(new CompletionSuggestion(randomAlphaOfLength(randomIntBetween(1, 5)), randomIntBetween(1, 20), false));
@ -102,16 +106,18 @@ public class SearchPhaseControllerTests extends ESTestCase {
size = first.get().queryResult().size(); size = first.get().queryResult().size();
} }
int accumulatedLength = Math.min(queryResultSize, getTotalQueryHits(results)); int accumulatedLength = Math.min(queryResultSize, getTotalQueryHits(results));
ScoreDoc[] sortedDocs = SearchPhaseController.sortDocs(true, results.asList(), null, List<CompletionSuggestion> reducedCompletionSuggestions = reducedSuggest(results);
new SearchPhaseController.TopDocsStats(SearchContext.TRACK_TOTAL_HITS_ACCURATE), from, size).scoreDocs; for (Suggest.Suggestion<?> suggestion : reducedCompletionSuggestions) {
for (Suggest.Suggestion<?> suggestion : reducedSuggest(results)) {
int suggestionSize = suggestion.getEntries().get(0).getOptions().size(); int suggestionSize = suggestion.getEntries().get(0).getOptions().size();
accumulatedLength += suggestionSize; accumulatedLength += suggestionSize;
} }
ScoreDoc[] sortedDocs = SearchPhaseController.sortDocs(true, results.asList(), null,
new SearchPhaseController.TopDocsStats(SearchContext.TRACK_TOTAL_HITS_ACCURATE), from, size,
reducedCompletionSuggestions).scoreDocs;
assertThat(sortedDocs.length, equalTo(accumulatedLength)); assertThat(sortedDocs.length, equalTo(accumulatedLength));
} }
public void testSortIsIdempotent() throws Exception { public void testSortDocsIsIdempotent() throws Exception {
int nShards = randomIntBetween(1, 20); int nShards = randomIntBetween(1, 20);
int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2); int queryResultSize = randomBoolean() ? 0 : randomIntBetween(1, nShards * 2);
long randomSeed = randomLong(); long randomSeed = randomLong();
@ -126,12 +132,14 @@ public class SearchPhaseControllerTests extends ESTestCase {
size = first.get().queryResult().size(); size = first.get().queryResult().size();
} }
SearchPhaseController.TopDocsStats topDocsStats = new SearchPhaseController.TopDocsStats(SearchContext.TRACK_TOTAL_HITS_ACCURATE); SearchPhaseController.TopDocsStats topDocsStats = new SearchPhaseController.TopDocsStats(SearchContext.TRACK_TOTAL_HITS_ACCURATE);
ScoreDoc[] sortedDocs = SearchPhaseController.sortDocs(ignoreFrom, results.asList(), null, topDocsStats, from, size).scoreDocs; ScoreDoc[] sortedDocs = SearchPhaseController.sortDocs(ignoreFrom, results.asList(), null, topDocsStats, from, size,
Collections.emptyList()).scoreDocs;
results = generateSeededQueryResults(randomSeed, nShards, Collections.emptyList(), queryResultSize, results = generateSeededQueryResults(randomSeed, nShards, Collections.emptyList(), queryResultSize,
useConstantScore); useConstantScore);
SearchPhaseController.TopDocsStats topDocsStats2 = new SearchPhaseController.TopDocsStats(SearchContext.TRACK_TOTAL_HITS_ACCURATE); SearchPhaseController.TopDocsStats topDocsStats2 = new SearchPhaseController.TopDocsStats(SearchContext.TRACK_TOTAL_HITS_ACCURATE);
ScoreDoc[] sortedDocs2 = SearchPhaseController.sortDocs(ignoreFrom, results.asList(), null, topDocsStats2, from, size).scoreDocs; ScoreDoc[] sortedDocs2 = SearchPhaseController.sortDocs(ignoreFrom, results.asList(), null, topDocsStats2, from, size,
Collections.emptyList()).scoreDocs;
assertEquals(sortedDocs.length, sortedDocs2.length); assertEquals(sortedDocs.length, sortedDocs2.length);
for (int i = 0; i < sortedDocs.length; i++) { for (int i = 0; i < sortedDocs.length; i++) {
assertEquals(sortedDocs[i].doc, sortedDocs2[i].doc); assertEquals(sortedDocs[i].doc, sortedDocs2[i].doc);
@ -204,9 +212,16 @@ public class SearchPhaseControllerTests extends ESTestCase {
} }
} }
private static AtomicArray<SearchPhaseResult> generateQueryResults(int nShards, /**
List<CompletionSuggestion> suggestions, * Generate random query results received from the provided number of shards, including the provided
int searchHitsSize, boolean useConstantScore) { * number of search hits and randomly generated completion suggestions based on the name and size of the provided ones.
* Note that <code>shardIndex</code> is already set to the generated completion suggestions to simulate what
* {@link SearchPhaseController#reducedQueryPhase(Collection, boolean, int, boolean)} does, meaning that the returned query results
* can be fed directly to
* {@link SearchPhaseController#sortDocs(boolean, Collection, Collection, SearchPhaseController.TopDocsStats, int, int, List)}
*/
private static AtomicArray<SearchPhaseResult> generateQueryResults(int nShards, List<CompletionSuggestion> suggestions,
int searchHitsSize, boolean useConstantScore) {
AtomicArray<SearchPhaseResult> queryResults = new AtomicArray<>(nShards); AtomicArray<SearchPhaseResult> queryResults = new AtomicArray<>(nShards);
for (int shardIndex = 0; shardIndex < nShards; shardIndex++) { for (int shardIndex = 0; shardIndex < nShards; shardIndex++) {
String clusterAlias = randomBoolean() ? null : "remote"; String clusterAlias = randomBoolean() ? null : "remote";
@ -265,17 +280,17 @@ public class SearchPhaseControllerTests extends ESTestCase {
return resultCount; return resultCount;
} }
private static Suggest reducedSuggest(AtomicArray<SearchPhaseResult> results) { private static List<CompletionSuggestion> reducedSuggest(AtomicArray<SearchPhaseResult> results) {
Map<String, List<Suggest.Suggestion<CompletionSuggestion.Entry>>> groupedSuggestion = new HashMap<>(); Map<String, List<Suggest.Suggestion<CompletionSuggestion.Entry>>> groupedSuggestion = new HashMap<>();
for (SearchPhaseResult entry : results.asList()) { for (SearchPhaseResult entry : results.asList()) {
for (Suggest.Suggestion<?> suggestion : entry.queryResult().suggest()) { for (Suggest.Suggestion<?> suggestion : entry.queryResult().suggest()) {
List<Suggest.Suggestion<CompletionSuggestion.Entry>> suggests = List<Suggest.Suggestion<CompletionSuggestion.Entry>> suggests =
groupedSuggestion.computeIfAbsent(suggestion.getName(), s -> new ArrayList<>()); groupedSuggestion.computeIfAbsent(suggestion.getName(), s -> new ArrayList<>());
suggests.add((Suggest.Suggestion<CompletionSuggestion.Entry>) suggestion); suggests.add((CompletionSuggestion) suggestion);
} }
} }
return new Suggest(groupedSuggestion.values().stream().map(CompletionSuggestion::reduceTo) CompletionSuggestion completionSuggestion = new CompletionSuggestion(null, -1, randomBoolean());
.collect(Collectors.toList())); return groupedSuggestion.values().stream().map(completionSuggestion::reduce).collect(Collectors.toList());
} }
private static AtomicArray<SearchPhaseResult> generateFetchResults(int nShards, ScoreDoc[] mergedSearchDocs, Suggest mergedSuggest) { private static AtomicArray<SearchPhaseResult> generateFetchResults(int nShards, ScoreDoc[] mergedSearchDocs, Suggest mergedSuggest) {
@ -645,4 +660,96 @@ public class SearchPhaseControllerTests extends ESTestCase {
assertEquals("field", reduce.sortedTopDocs.collapseField); assertEquals("field", reduce.sortedTopDocs.collapseField);
assertArrayEquals(collapseValues, reduce.sortedTopDocs.collapseValues); assertArrayEquals(collapseValues, reduce.sortedTopDocs.collapseValues);
} }
public void testConsumerSuggestions() {
int expectedNumResults = randomIntBetween(1, 100);
int bufferSize = randomIntBetween(2, 200);
SearchRequest request = randomSearchRequest();
request.setBatchedReduceSize(bufferSize);
InitialSearchPhase.ArraySearchPhaseResults<SearchPhaseResult> consumer =
searchPhaseController.newSearchPhaseResults(request, expectedNumResults);
int maxScoreTerm = -1;
int maxScorePhrase = -1;
int maxScoreCompletion = -1;
for (int i = 0; i < expectedNumResults; i++) {
QuerySearchResult result = new QuerySearchResult(i, new SearchShardTarget("node", new ShardId("a", "b", i),
null, OriginalIndices.NONE));
List<Suggest.Suggestion<? extends Suggest.Suggestion.Entry<? extends Suggest.Suggestion.Entry.Option>>> suggestions =
new ArrayList<>();
{
TermSuggestion termSuggestion = new TermSuggestion("term", 1, SortBy.SCORE);
TermSuggestion.Entry entry = new TermSuggestion.Entry(new Text("entry"), 0, 10);
int numOptions = randomIntBetween(1, 10);
for (int j = 0; j < numOptions; j++) {
int score = numOptions - j;
maxScoreTerm = Math.max(maxScoreTerm, score);
entry.addOption(new TermSuggestion.Entry.Option(new Text("option"), randomInt(), score));
}
termSuggestion.addTerm(entry);
suggestions.add(termSuggestion);
}
{
PhraseSuggestion phraseSuggestion = new PhraseSuggestion("phrase", 1);
PhraseSuggestion.Entry entry = new PhraseSuggestion.Entry(new Text("entry"), 0, 10);
int numOptions = randomIntBetween(1, 10);
for (int j = 0; j < numOptions; j++) {
int score = numOptions - j;
maxScorePhrase = Math.max(maxScorePhrase, score);
entry.addOption(new PhraseSuggestion.Entry.Option(new Text("option"), new Text("option"), score));
}
phraseSuggestion.addTerm(entry);
suggestions.add(phraseSuggestion);
}
{
CompletionSuggestion completionSuggestion = new CompletionSuggestion("completion", 1, false);
CompletionSuggestion.Entry entry = new CompletionSuggestion.Entry(new Text("entry"), 0, 10);
int numOptions = randomIntBetween(1, 10);
for (int j = 0; j < numOptions; j++) {
int score = numOptions - j;
maxScoreCompletion = Math.max(maxScoreCompletion, score);
CompletionSuggestion.Entry.Option option = new CompletionSuggestion.Entry.Option(j,
new Text("option"), score, Collections.emptyMap());
entry.addOption(option);
}
completionSuggestion.addTerm(entry);
suggestions.add(completionSuggestion);
}
result.suggest(new Suggest(suggestions));
result.topDocs(new TopDocsAndMaxScore(Lucene.EMPTY_TOP_DOCS, Float.NaN), new DocValueFormat[0]);
result.setShardIndex(i);
result.size(0);
consumer.consumeResult(result);
}
SearchPhaseController.ReducedQueryPhase reduce = consumer.reduce();
assertEquals(3, reduce.suggest.size());
{
TermSuggestion term = reduce.suggest.getSuggestion("term");
assertEquals(1, term.getEntries().size());
assertEquals(1, term.getEntries().get(0).getOptions().size());
assertEquals(maxScoreTerm, term.getEntries().get(0).getOptions().get(0).getScore(), 0f);
}
{
PhraseSuggestion phrase = reduce.suggest.getSuggestion("phrase");
assertEquals(1, phrase.getEntries().size());
assertEquals(1, phrase.getEntries().get(0).getOptions().size());
assertEquals(maxScorePhrase, phrase.getEntries().get(0).getOptions().get(0).getScore(), 0f);
}
{
CompletionSuggestion completion = reduce.suggest.getSuggestion("completion");
assertEquals(1, completion.getSize());
assertEquals(1, completion.getOptions().size());
CompletionSuggestion.Entry.Option option = completion.getOptions().get(0);
assertEquals(maxScoreCompletion, option.getScore(), 0f);
}
assertFinalReduction(request);
assertEquals(1, reduce.sortedTopDocs.scoreDocs.length);
assertEquals(maxScoreCompletion, reduce.sortedTopDocs.scoreDocs[0].score, 0f);
assertEquals(0, reduce.sortedTopDocs.scoreDocs[0].doc);
assertNotEquals(-1, reduce.sortedTopDocs.scoreDocs[0].shardIndex);
assertEquals(0, reduce.totalHits.value);
assertFalse(reduce.sortedTopDocs.isSortedByField);
assertNull(reduce.sortedTopDocs.sortFields);
assertNull(reduce.sortedTopDocs.collapseField);
assertNull(reduce.sortedTopDocs.collapseValues);
}
} }

View File

@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo;
public class CompletionSuggestionTests extends ESTestCase { public class CompletionSuggestionTests extends ESTestCase {
public void testToReduce() throws Exception { public void testToReduce() {
List<Suggest.Suggestion<CompletionSuggestion.Entry>> shardSuggestions = new ArrayList<>(); List<Suggest.Suggestion<CompletionSuggestion.Entry>> shardSuggestions = new ArrayList<>();
int nShards = randomIntBetween(1, 10); int nShards = randomIntBetween(1, 10);
String name = randomAlphaOfLength(10); String name = randomAlphaOfLength(10);
@ -54,7 +54,7 @@ public class CompletionSuggestionTests extends ESTestCase {
maxScore - i, Collections.emptyMap())); maxScore - i, Collections.emptyMap()));
} }
} }
CompletionSuggestion reducedSuggestion = CompletionSuggestion.reduceTo(shardSuggestions); CompletionSuggestion reducedSuggestion = (CompletionSuggestion) shardSuggestions.get(0).reduce(shardSuggestions);
assertNotNull(reducedSuggestion); assertNotNull(reducedSuggestion);
assertThat(reducedSuggestion.getOptions().size(), lessThanOrEqualTo(size)); assertThat(reducedSuggestion.getOptions().size(), lessThanOrEqualTo(size));
int count = 0; int count = 0;
@ -95,7 +95,7 @@ public class CompletionSuggestionTests extends ESTestCase {
.distinct() .distinct()
.limit(size) .limit(size)
.collect(Collectors.toList()); .collect(Collectors.toList());
CompletionSuggestion reducedSuggestion = CompletionSuggestion.reduceTo(shardSuggestions); CompletionSuggestion reducedSuggestion = (CompletionSuggestion) shardSuggestions.get(0).reduce(shardSuggestions);
assertNotNull(reducedSuggestion); assertNotNull(reducedSuggestion);
assertThat(reducedSuggestion.getOptions().size(), lessThanOrEqualTo(size)); assertThat(reducedSuggestion.getOptions().size(), lessThanOrEqualTo(size));
assertEquals(expected, reducedSuggestion.getOptions()); assertEquals(expected, reducedSuggestion.getOptions());