mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 02:14:54 +00:00
Add search_after parameter in the Search API.
The search_after parameter provides a way to efficiently paginate from one page to the next. This parameter accepts an array of sort values, those values are then used by the searcher to sort the top hits from the first document that is greater to the sort values. This parameter must be used in conjunction with the sort parameter, it must contain exactly the same number of values than the number of fields to sort on. NOTE: A field with one unique value per document should be used as the last element of the sort specification. Otherwise the sort order for documents that have the same sort values would be undefined. The recommended way is to use the field `_uuid` which is certain to contain one unique value for each document. Fixes #8192
This commit is contained in:
parent
54dd819616
commit
aea7660e37
@ -28,6 +28,7 @@ import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
@ -343,6 +344,15 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the sort values that indicates which docs this request should "search after".
|
||||
*
|
||||
*/
|
||||
public SearchRequestBuilder searchAfter(Object[] values) {
|
||||
sourceBuilder().searchAfter(values);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies when sorting, and controls if scores will be tracked as well. Defaults to
|
||||
* <tt>false</tt>.
|
||||
|
@ -26,7 +26,7 @@ import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.action.percolate.PercolateShardRequest;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
@ -48,7 +48,6 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.percolator.PercolatorQueriesRegistry;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
@ -82,7 +81,6 @@ import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
*/
|
||||
@ -518,6 +516,16 @@ public class PercolateContext extends SearchContext {
|
||||
return trackScores;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext searchAfter(FieldDoc searchAfter) {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDoc searchAfter() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext parsedPostFilter(ParsedQuery postFilter) {
|
||||
throw new UnsupportedOperationException();
|
||||
|
@ -27,6 +27,7 @@ import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
@ -101,6 +102,7 @@ import org.elasticsearch.search.query.QuerySearchResult;
|
||||
import org.elasticsearch.search.query.QuerySearchResultProvider;
|
||||
import org.elasticsearch.search.query.ScrollQuerySearchResult;
|
||||
import org.elasticsearch.search.rescore.RescoreBuilder;
|
||||
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -864,6 +866,16 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
||||
if (source.stats() != null) {
|
||||
context.groupStats(source.stats());
|
||||
}
|
||||
if (source.searchAfter() != null && source.searchAfter().length > 0) {
|
||||
if (context.scrollContext() != null) {
|
||||
throw new SearchContextException(context, "`search_after` cannot be used in a scroll context.");
|
||||
}
|
||||
if (context.from() > 0) {
|
||||
throw new SearchContextException(context, "`from` parameter must be set to 0 when `search_after` is used.");
|
||||
}
|
||||
FieldDoc fieldDoc = SearchAfterBuilder.buildFieldDoc(context.sort(), source.searchAfter());
|
||||
context.searchAfter(fieldDoc);
|
||||
}
|
||||
}
|
||||
|
||||
private static final int[] EMPTY_DOC_IDS = new int[0];
|
||||
|
@ -41,6 +41,7 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
|
||||
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
@ -94,6 +95,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
public static final ParseField STATS_FIELD = new ParseField("stats");
|
||||
public static final ParseField EXT_FIELD = new ParseField("ext");
|
||||
public static final ParseField PROFILE_FIELD = new ParseField("profile");
|
||||
public static final ParseField SEARCH_AFTER = new ParseField("search_after");
|
||||
|
||||
private static final SearchSourceBuilder PROTOTYPE = new SearchSourceBuilder();
|
||||
|
||||
@ -135,6 +137,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
|
||||
private boolean trackScores = false;
|
||||
|
||||
private SearchAfterBuilder searchAfterBuilder;
|
||||
|
||||
private Float minScore;
|
||||
|
||||
private long timeoutInMillis = -1;
|
||||
@ -381,6 +385,28 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
return trackScores;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* The sort values that indicates which docs this request should "search after".
|
||||
* The sort values of the search_after must be equal to the number of sort fields in the query and they should be
|
||||
* of the same type (or parsable as such).
|
||||
* Defaults to <tt>null</tt>.
|
||||
*/
|
||||
public Object[] searchAfter() {
|
||||
if (searchAfterBuilder == null) {
|
||||
return null;
|
||||
}
|
||||
return searchAfterBuilder.getSortValues();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the sort values that indicates which docs this request should "search after".
|
||||
*/
|
||||
public SearchSourceBuilder searchAfter(Object[] values) {
|
||||
this.searchAfterBuilder = new SearchAfterBuilder().setSortValues(values);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an aggregation to perform as part of the search.
|
||||
*/
|
||||
@ -890,6 +916,8 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
builder.stats = stats;
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, _SOURCE_FIELD)) {
|
||||
builder.fetchSourceContext = FetchSourceContext.parse(parser, context);
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, SEARCH_AFTER)) {
|
||||
builder.searchAfterBuilder = SearchAfterBuilder.PROTOTYPE.fromXContent(parser, context.parseFieldMatcher());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].",
|
||||
parser.getTokenLocation());
|
||||
@ -996,6 +1024,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
builder.field(TRACK_SCORES_FIELD.getPreferredName(), true);
|
||||
}
|
||||
|
||||
if (searchAfterBuilder != null) {
|
||||
builder.field(SEARCH_AFTER.getPreferredName(), searchAfterBuilder.getSortValues());
|
||||
}
|
||||
|
||||
if (indexBoost != null) {
|
||||
builder.startObject(INDICES_BOOST_FIELD.getPreferredName());
|
||||
assert !indexBoost.containsKey(null);
|
||||
@ -1234,6 +1266,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
} else {
|
||||
builder.profile = false;
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
builder.searchAfterBuilder = SearchAfterBuilder.PROTOTYPE.readFrom(in);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@ -1350,13 +1385,18 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
if (out.getVersion().onOrAfter(Version.V_2_2_0)) {
|
||||
out.writeBoolean(profile);
|
||||
}
|
||||
boolean hasSearchAfter = searchAfterBuilder != null;
|
||||
out.writeBoolean(hasSearchAfter);
|
||||
if (hasSearchAfter) {
|
||||
searchAfterBuilder.writeTo(out);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(aggregations, explain, fetchSourceContext, fieldDataFields, fieldNames, from,
|
||||
highlightBuilder, indexBoost, innerHitsBuilder, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields,
|
||||
size, sorts, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile);
|
||||
size, sorts, searchAfterBuilder, stats, suggestBuilder, terminateAfter, timeoutInMillis, trackScores, version, profile);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -1384,6 +1424,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
||||
&& Objects.equals(scriptFields, other.scriptFields)
|
||||
&& Objects.equals(size, other.size)
|
||||
&& Objects.equals(sorts, other.sorts)
|
||||
&& Objects.equals(searchAfterBuilder, other.searchAfterBuilder)
|
||||
&& Objects.equals(stats, other.stats)
|
||||
&& Objects.equals(suggestBuilder, other.suggestBuilder)
|
||||
&& Objects.equals(terminateAfter, other.terminateAfter)
|
||||
|
@ -27,7 +27,9 @@ import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
@ -115,6 +117,7 @@ public class DefaultSearchContext extends SearchContext {
|
||||
private Sort sort;
|
||||
private Float minimumScore;
|
||||
private boolean trackScores = false; // when sorting, track scores as well...
|
||||
private FieldDoc searchAfter;
|
||||
/**
|
||||
* The original query as sent by the user without the types and aliases
|
||||
* applied. Putting things in here leaks them into highlighting so don't add
|
||||
@ -549,6 +552,17 @@ public class DefaultSearchContext extends SearchContext {
|
||||
return this.trackScores;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext searchAfter(FieldDoc searchAfter) {
|
||||
this.searchAfter = searchAfter;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDoc searchAfter() {
|
||||
return searchAfter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext parsedPostFilter(ParsedQuery postFilter) {
|
||||
this.postFilter = postFilter;
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.search.internal;
|
||||
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Counter;
|
||||
@ -336,6 +337,16 @@ public abstract class FilteredSearchContext extends SearchContext {
|
||||
return in.trackScores();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext searchAfter(FieldDoc searchAfter) {
|
||||
return in.searchAfter(searchAfter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDoc searchAfter() {
|
||||
return in.searchAfter();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext parsedPostFilter(ParsedQuery postFilter) {
|
||||
return in.parsedPostFilter(postFilter);
|
||||
|
@ -20,6 +20,7 @@ package org.elasticsearch.search.internal;
|
||||
|
||||
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Counter;
|
||||
@ -240,6 +241,10 @@ public abstract class SearchContext extends DelegatingHasContextAndHeaders imple
|
||||
|
||||
public abstract boolean trackScores();
|
||||
|
||||
public abstract SearchContext searchAfter(FieldDoc searchAfter);
|
||||
|
||||
public abstract FieldDoc searchAfter();
|
||||
|
||||
public abstract SearchContext parsedPostFilter(ParsedQuery postFilter);
|
||||
|
||||
public abstract ParsedQuery parsedPostFilter();
|
||||
|
@ -192,10 +192,10 @@ public class QueryPhase implements SearchPhase {
|
||||
final ScrollContext scrollContext = searchContext.scrollContext();
|
||||
assert (scrollContext != null) == (searchContext.request().scroll() != null);
|
||||
final TopDocsCollector<?> topDocsCollector;
|
||||
ScoreDoc lastEmittedDoc;
|
||||
ScoreDoc after = null;
|
||||
if (searchContext.request().scroll() != null) {
|
||||
numDocs = Math.min(searchContext.size(), totalNumDocs);
|
||||
lastEmittedDoc = scrollContext.lastEmittedDoc;
|
||||
after = scrollContext.lastEmittedDoc;
|
||||
|
||||
if (returnsDocsInOrder(query, searchContext.sort())) {
|
||||
if (scrollContext.totalHits == -1) {
|
||||
@ -209,7 +209,7 @@ public class QueryPhase implements SearchPhase {
|
||||
if (scrollContext.lastEmittedDoc != null) {
|
||||
BooleanQuery bq = new BooleanQuery.Builder()
|
||||
.add(query, BooleanClause.Occur.MUST)
|
||||
.add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER)
|
||||
.add(new MinDocQuery(after.doc + 1), BooleanClause.Occur.FILTER)
|
||||
.build();
|
||||
query = bq;
|
||||
}
|
||||
@ -217,7 +217,7 @@ public class QueryPhase implements SearchPhase {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
lastEmittedDoc = null;
|
||||
after = searchContext.searchAfter();
|
||||
}
|
||||
if (totalNumDocs == 0) {
|
||||
// top collectors don't like a size of 0
|
||||
@ -226,13 +226,13 @@ public class QueryPhase implements SearchPhase {
|
||||
assert numDocs > 0;
|
||||
if (searchContext.sort() != null) {
|
||||
topDocsCollector = TopFieldCollector.create(searchContext.sort(), numDocs,
|
||||
(FieldDoc) lastEmittedDoc, true, searchContext.trackScores(), searchContext.trackScores());
|
||||
(FieldDoc) after, true, searchContext.trackScores(), searchContext.trackScores());
|
||||
} else {
|
||||
rescore = !searchContext.rescore().isEmpty();
|
||||
for (RescoreSearchContext rescoreContext : searchContext.rescore()) {
|
||||
numDocs = Math.max(rescoreContext.window(), numDocs);
|
||||
}
|
||||
topDocsCollector = TopScoreDocCollector.create(numDocs, lastEmittedDoc);
|
||||
topDocsCollector = TopScoreDocCollector.create(numDocs, after);
|
||||
}
|
||||
collector = topDocsCollector;
|
||||
if (doProfile) {
|
||||
|
@ -0,0 +1,303 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.searchafter;
|
||||
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.FromXContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class SearchAfterBuilder implements ToXContent, FromXContentBuilder<SearchAfterBuilder>, Writeable<SearchAfterBuilder> {
|
||||
public static final SearchAfterBuilder PROTOTYPE = new SearchAfterBuilder();
|
||||
public static final ParseField SEARCH_AFTER = new ParseField("search_after");
|
||||
private static final Object[] EMPTY_SORT_VALUES = new Object[0];
|
||||
|
||||
private Object[] sortValues = EMPTY_SORT_VALUES;
|
||||
|
||||
public SearchAfterBuilder setSortValues(Object[] values) {
|
||||
if (values == null) {
|
||||
throw new NullPointerException("Values cannot be null.");
|
||||
}
|
||||
if (values.length == 0) {
|
||||
throw new IllegalArgumentException("Values must contains at least one value.");
|
||||
}
|
||||
sortValues = new Object[values.length];
|
||||
System.arraycopy(values, 0, sortValues, 0, values.length);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Object[] getSortValues() {
|
||||
return sortValues;
|
||||
}
|
||||
|
||||
public static FieldDoc buildFieldDoc(Sort sort, Object[] values) {
|
||||
if (sort == null || sort.getSort() == null || sort.getSort().length == 0) {
|
||||
throw new IllegalArgumentException("Sort must contain at least one field.");
|
||||
}
|
||||
|
||||
SortField[] sortFields = sort.getSort();
|
||||
if (sortFields.length != values.length) {
|
||||
throw new IllegalArgumentException(SEARCH_AFTER.getPreferredName() + " has " + values.length + " value(s) but sort has " + sort.getSort().length + ".");
|
||||
}
|
||||
Object[] fieldValues = new Object[sortFields.length];
|
||||
for (int i = 0; i < sortFields.length; i++) {
|
||||
SortField sortField = sortFields[i];
|
||||
fieldValues[i] = convertValueFromSortField(values[i], sortField);
|
||||
}
|
||||
// We set the doc id to Integer.MAX_VALUE in order to make sure that the search starts "after" the first document that is equal to the field values.
|
||||
return new FieldDoc(Integer.MAX_VALUE, 0, fieldValues);
|
||||
}
|
||||
|
||||
private static Object convertValueFromSortField(Object value, SortField sortField) {
|
||||
if (sortField.getComparatorSource() instanceof IndexFieldData.XFieldComparatorSource) {
|
||||
IndexFieldData.XFieldComparatorSource cmpSource = (IndexFieldData.XFieldComparatorSource) sortField.getComparatorSource();
|
||||
return convertValueFromSortType(sortField.getField(), cmpSource.reducedType(), value);
|
||||
}
|
||||
return convertValueFromSortType(sortField.getField(), sortField.getType(), value);
|
||||
}
|
||||
|
||||
private static Object convertValueFromSortType(String fieldName, SortField.Type sortType, Object value) {
|
||||
try {
|
||||
switch (sortType) {
|
||||
case DOC:
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).intValue();
|
||||
}
|
||||
return Integer.parseInt(value.toString());
|
||||
|
||||
case SCORE:
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).floatValue();
|
||||
}
|
||||
return Float.parseFloat(value.toString());
|
||||
|
||||
case INT:
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).intValue();
|
||||
}
|
||||
return Integer.parseInt(value.toString());
|
||||
|
||||
case DOUBLE:
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).doubleValue();
|
||||
}
|
||||
return Double.parseDouble(value.toString());
|
||||
|
||||
case LONG:
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).longValue();
|
||||
}
|
||||
return Long.parseLong(value.toString());
|
||||
|
||||
case FLOAT:
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).floatValue();
|
||||
}
|
||||
return Float.parseFloat(value.toString());
|
||||
|
||||
case STRING_VAL:
|
||||
case STRING:
|
||||
return new BytesRef(value.toString());
|
||||
|
||||
default:
|
||||
throw new IllegalArgumentException("Comparator type [" + sortType.name() + "] for field [" + fieldName + "] is not supported.");
|
||||
}
|
||||
} catch(NumberFormatException e) {
|
||||
throw new IllegalArgumentException("Failed to parse " + SEARCH_AFTER.getPreferredName() + " value for field [" + fieldName + "].", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
innerToXContent(builder);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
void innerToXContent(XContentBuilder builder) throws IOException {
|
||||
builder.field(SEARCH_AFTER.getPreferredName(), sortValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchAfterBuilder fromXContent(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
|
||||
SearchAfterBuilder builder = new SearchAfterBuilder();
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
List<Object> values = new ArrayList<> ();
|
||||
if (token == XContentParser.Token.START_ARRAY) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
switch (parser.numberType()) {
|
||||
case INT:
|
||||
values.add(parser.intValue());
|
||||
break;
|
||||
|
||||
case LONG:
|
||||
values.add(parser.longValue());
|
||||
break;
|
||||
|
||||
case DOUBLE:
|
||||
values.add(parser.doubleValue());
|
||||
break;
|
||||
|
||||
case FLOAT:
|
||||
values.add(parser.floatValue());
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new AssertionError("Unknown number type []" + parser.numberType());
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
values.add(parser.text());
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
values.add(parser.booleanValue());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING + "] or [" + XContentParser.Token.VALUE_NUMBER + "] or [" + XContentParser.Token.VALUE_BOOLEAN + "] but found [" + token + "] inside search_after.", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_ARRAY + "] in [" + SEARCH_AFTER.getPreferredName() + "] but found [" + token + "] inside search_after", parser.getTokenLocation());
|
||||
}
|
||||
builder.setSortValues(values.toArray());
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(sortValues.length);
|
||||
for (Object fieldValue : sortValues) {
|
||||
if (fieldValue == null) {
|
||||
throw new IOException("Can't handle " + SEARCH_AFTER.getPreferredName() + " field value of type [null]");
|
||||
}
|
||||
Class type = fieldValue.getClass();
|
||||
if (type == String.class) {
|
||||
out.writeByte((byte) 1);
|
||||
out.writeString((String) fieldValue);
|
||||
} else if (type == Integer.class) {
|
||||
out.writeByte((byte) 2);
|
||||
out.writeInt((Integer) fieldValue);
|
||||
} else if (type == Long.class) {
|
||||
out.writeByte((byte) 3);
|
||||
out.writeLong((Long) fieldValue);
|
||||
} else if (type == Float.class) {
|
||||
out.writeByte((byte) 4);
|
||||
out.writeFloat((Float) fieldValue);
|
||||
} else if (type == Double.class) {
|
||||
out.writeByte((byte) 5);
|
||||
out.writeDouble((Double) fieldValue);
|
||||
} else if (type == Byte.class) {
|
||||
out.writeByte((byte) 6);
|
||||
out.writeByte((Byte) fieldValue);
|
||||
} else if (type == Short.class) {
|
||||
out.writeByte((byte) 7);
|
||||
out.writeShort((Short) fieldValue);
|
||||
} else if (type == Boolean.class) {
|
||||
out.writeByte((byte) 8);
|
||||
out.writeBoolean((Boolean) fieldValue);
|
||||
} else if (fieldValue instanceof Text) {
|
||||
out.writeByte((byte) 9);
|
||||
out.writeText((Text) fieldValue);
|
||||
} else {
|
||||
throw new IOException("Can't handle " + SEARCH_AFTER.getPreferredName() + " field value of type [" + type + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchAfterBuilder readFrom(StreamInput in) throws IOException {
|
||||
SearchAfterBuilder builder = new SearchAfterBuilder();
|
||||
int size = in.readVInt();
|
||||
Object[] values = new Object[size];
|
||||
for (int i = 0; i < size; i++) {
|
||||
byte type = in.readByte();
|
||||
if (type == 1) {
|
||||
values[i] = in.readString();
|
||||
} else if (type == 2) {
|
||||
values[i] = in.readInt();
|
||||
} else if (type == 3) {
|
||||
values[i] = in.readLong();
|
||||
} else if (type == 4) {
|
||||
values[i] = in.readFloat();
|
||||
} else if (type == 5) {
|
||||
values[i] = in.readDouble();
|
||||
} else if (type == 6) {
|
||||
values[i] = in.readByte();
|
||||
} else if (type == 7) {
|
||||
values[i] = in.readShort();
|
||||
} else if (type == 8) {
|
||||
values[i] = in.readBoolean();
|
||||
} else if (type == 9) {
|
||||
values[i] = in.readText();
|
||||
} else {
|
||||
throw new IOException("Can't match type [" + type + "]");
|
||||
}
|
||||
}
|
||||
builder.setSortValues(values);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (! (other instanceof SearchAfterBuilder)) {
|
||||
return false;
|
||||
}
|
||||
return Arrays.equals(sortValues, ((SearchAfterBuilder) other).sortValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(this.sortValues);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
try {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.prettyPrint();
|
||||
toXContent(builder, EMPTY_PARAMS);
|
||||
return builder.string();
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchException("Failed to build xcontent.", e);
|
||||
}
|
||||
}
|
||||
}
|
@ -39,7 +39,6 @@ import java.io.IOException;
|
||||
import static org.elasticsearch.client.Requests.clusterHealthRequest;
|
||||
import static org.elasticsearch.client.Requests.refreshRequest;
|
||||
import static org.elasticsearch.client.Requests.searchRequest;
|
||||
import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
|
@ -33,6 +33,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsFilter;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
@ -54,6 +55,7 @@ import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder.InnerHit;
|
||||
import org.elasticsearch.search.fetch.source.FetchSourceContext;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilderTests;
|
||||
import org.elasticsearch.search.rescore.QueryRescoreBuilderTests;
|
||||
import org.elasticsearch.search.searchafter.SearchAfterBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilder;
|
||||
@ -262,6 +264,56 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (randomBoolean()) {
|
||||
int numSearchFrom = randomIntBetween(1, 5);
|
||||
// We build a json version of the search_from first in order to
|
||||
// ensure that every number type remain the same before/after xcontent (de)serialization.
|
||||
// This is not a problem because the final type of each field value is extracted from associated sort field.
|
||||
// This little trick ensure that equals and hashcode are the same when using the xcontent serialization.
|
||||
XContentBuilder jsonBuilder = XContentFactory.jsonBuilder();
|
||||
jsonBuilder.startObject();
|
||||
jsonBuilder.startArray("search_from");
|
||||
for (int i = 0; i < numSearchFrom; i++) {
|
||||
int branch = randomInt(8);
|
||||
switch (branch) {
|
||||
case 0:
|
||||
jsonBuilder.value(randomInt());
|
||||
break;
|
||||
case 1:
|
||||
jsonBuilder.value(randomFloat());
|
||||
break;
|
||||
case 2:
|
||||
jsonBuilder.value(randomLong());
|
||||
break;
|
||||
case 3:
|
||||
jsonBuilder.value(randomDouble());
|
||||
break;
|
||||
case 4:
|
||||
jsonBuilder.value(randomAsciiOfLengthBetween(5, 20));
|
||||
break;
|
||||
case 5:
|
||||
jsonBuilder.value(randomBoolean());
|
||||
break;
|
||||
case 6:
|
||||
jsonBuilder.value(randomByte());
|
||||
break;
|
||||
case 7:
|
||||
jsonBuilder.value(randomShort());
|
||||
break;
|
||||
case 8:
|
||||
jsonBuilder.value(new Text(randomAsciiOfLengthBetween(5, 20)));
|
||||
break;
|
||||
}
|
||||
}
|
||||
jsonBuilder.endArray();
|
||||
jsonBuilder.endObject();
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(jsonBuilder.bytes());
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
builder.searchAfter(SearchAfterBuilder.PROTOTYPE.fromXContent(parser, null).getSortValues());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
builder.highlighter(HighlightBuilderTests.randomHighlighterBuilder());
|
||||
}
|
||||
|
@ -0,0 +1,257 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.searchafter;
|
||||
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.index.query.MatchAllQueryParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class SearchAfterBuilderTests extends ESTestCase {
|
||||
private static final int NUMBER_OF_TESTBUILDERS = 20;
|
||||
private static NamedWriteableRegistry namedWriteableRegistry;
|
||||
private static IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
|
||||
/**
|
||||
* setup for the whole base test class
|
||||
*/
|
||||
@BeforeClass
|
||||
public static void init() {
|
||||
namedWriteableRegistry = new NamedWriteableRegistry();
|
||||
indicesQueriesRegistry = new IndicesQueriesRegistry(Settings.settingsBuilder().build(),
|
||||
Collections.singletonMap("match_all", new MatchAllQueryParser()));
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClass() throws Exception {
|
||||
namedWriteableRegistry = null;
|
||||
indicesQueriesRegistry = null;
|
||||
}
|
||||
|
||||
private final SearchAfterBuilder randomSearchFromBuilder() throws IOException {
|
||||
int numSearchFrom = randomIntBetween(1, 10);
|
||||
SearchAfterBuilder searchAfterBuilder = new SearchAfterBuilder();
|
||||
Object[] values = new Object[numSearchFrom];
|
||||
for (int i = 0; i < numSearchFrom; i++) {
|
||||
int branch = randomInt(8);
|
||||
switch (branch) {
|
||||
case 0:
|
||||
values[i] = randomInt();
|
||||
break;
|
||||
case 1:
|
||||
values[i] = randomFloat();
|
||||
break;
|
||||
case 2:
|
||||
values[i] = randomLong();
|
||||
break;
|
||||
case 3:
|
||||
values[i] = randomDouble();
|
||||
break;
|
||||
case 4:
|
||||
values[i] = randomAsciiOfLengthBetween(5, 20);
|
||||
break;
|
||||
case 5:
|
||||
values[i] = randomBoolean();
|
||||
break;
|
||||
case 6:
|
||||
values[i] = randomByte();
|
||||
break;
|
||||
case 7:
|
||||
values[i] = randomShort();
|
||||
break;
|
||||
case 8:
|
||||
values[i] = new Text(randomAsciiOfLengthBetween(5, 20));
|
||||
break;
|
||||
}
|
||||
}
|
||||
searchAfterBuilder.setSortValues(values);
|
||||
return searchAfterBuilder;
|
||||
}
|
||||
|
||||
// We build a json version of the search_after first in order to
|
||||
// ensure that every number type remain the same before/after xcontent (de)serialization.
|
||||
// This is not a problem because the final type of each field value is extracted from associated sort field.
|
||||
// This little trick ensure that equals and hashcode are the same when using the xcontent serialization.
|
||||
private final SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException {
|
||||
int numSearchAfter = randomIntBetween(1, 10);
|
||||
XContentBuilder jsonBuilder = XContentFactory.jsonBuilder();
|
||||
jsonBuilder.startObject();
|
||||
jsonBuilder.startArray("search_after");
|
||||
for (int i = 0; i < numSearchAfter; i++) {
|
||||
int branch = randomInt(8);
|
||||
switch (branch) {
|
||||
case 0:
|
||||
jsonBuilder.value(randomInt());
|
||||
break;
|
||||
case 1:
|
||||
jsonBuilder.value(randomFloat());
|
||||
break;
|
||||
case 2:
|
||||
jsonBuilder.value(randomLong());
|
||||
break;
|
||||
case 3:
|
||||
jsonBuilder.value(randomDouble());
|
||||
break;
|
||||
case 4:
|
||||
jsonBuilder.value(randomAsciiOfLengthBetween(5, 20));
|
||||
break;
|
||||
case 5:
|
||||
jsonBuilder.value(randomBoolean());
|
||||
break;
|
||||
case 6:
|
||||
jsonBuilder.value(randomByte());
|
||||
break;
|
||||
case 7:
|
||||
jsonBuilder.value(randomShort());
|
||||
break;
|
||||
case 8:
|
||||
jsonBuilder.value(new Text(randomAsciiOfLengthBetween(5, 20)));
|
||||
break;
|
||||
}
|
||||
}
|
||||
jsonBuilder.endArray();
|
||||
jsonBuilder.endObject();
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(jsonBuilder.bytes());
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
return SearchAfterBuilder.PROTOTYPE.fromXContent(parser, null);
|
||||
}
|
||||
|
||||
private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException {
|
||||
try (BytesStreamOutput output = new BytesStreamOutput()) {
|
||||
original.writeTo(output);
|
||||
try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(output.bytes()), namedWriteableRegistry)) {
|
||||
return SearchAfterBuilder.PROTOTYPE.readFrom(in);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testSerialization() throws Exception {
|
||||
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
|
||||
SearchAfterBuilder original = randomSearchFromBuilder();
|
||||
SearchAfterBuilder deserialized = serializedCopy(original);
|
||||
assertEquals(deserialized, original);
|
||||
assertEquals(deserialized.hashCode(), original.hashCode());
|
||||
assertNotSame(deserialized, original);
|
||||
}
|
||||
}
|
||||
|
||||
public void testEqualsAndHashcode() throws Exception {
|
||||
for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) {
|
||||
SearchAfterBuilder firstBuilder = randomSearchFromBuilder();
|
||||
assertFalse("searchFrom is equal to null", firstBuilder.equals(null));
|
||||
assertFalse("searchFrom is equal to incompatible type", firstBuilder.equals(""));
|
||||
assertTrue("searchFrom is not equal to self", firstBuilder.equals(firstBuilder));
|
||||
assertThat("same searchFrom's hashcode returns different values if called multiple times", firstBuilder.hashCode(),
|
||||
equalTo(firstBuilder.hashCode()));
|
||||
|
||||
SearchAfterBuilder secondBuilder = serializedCopy(firstBuilder);
|
||||
assertTrue("searchFrom is not equal to self", secondBuilder.equals(secondBuilder));
|
||||
assertTrue("searchFrom is not equal to its copy", firstBuilder.equals(secondBuilder));
|
||||
assertTrue("equals is not symmetric", secondBuilder.equals(firstBuilder));
|
||||
assertThat("searchFrom copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(firstBuilder.hashCode()));
|
||||
|
||||
SearchAfterBuilder thirdBuilder = serializedCopy(secondBuilder);
|
||||
assertTrue("searchFrom is not equal to self", thirdBuilder.equals(thirdBuilder));
|
||||
assertTrue("searchFrom is not equal to its copy", secondBuilder.equals(thirdBuilder));
|
||||
assertThat("searchFrom copy's hashcode is different from original hashcode", secondBuilder.hashCode(), equalTo(thirdBuilder.hashCode()));
|
||||
assertTrue("equals is not transitive", firstBuilder.equals(thirdBuilder));
|
||||
assertThat("searchFrom copy's hashcode is different from original hashcode", firstBuilder.hashCode(), equalTo(thirdBuilder.hashCode()));
|
||||
assertTrue("searchFrom is not symmetric", thirdBuilder.equals(secondBuilder));
|
||||
assertTrue("searchFrom is not symmetric", thirdBuilder.equals(firstBuilder));
|
||||
}
|
||||
}
|
||||
|
||||
public void testFromXContent() throws Exception {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry);
|
||||
context.parseFieldMatcher(new ParseFieldMatcher(Settings.EMPTY));
|
||||
for (int runs = 0; runs < 20; runs++) {
|
||||
SearchAfterBuilder searchAfterBuilder = randomJsonSearchFromBuilder();
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()));
|
||||
if (randomBoolean()) {
|
||||
builder.prettyPrint();
|
||||
}
|
||||
builder.startObject();
|
||||
searchAfterBuilder.innerToXContent(builder);
|
||||
builder.endObject();
|
||||
XContentParser parser = XContentHelper.createParser(builder.bytes());
|
||||
context.reset(parser);
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
parser.nextToken();
|
||||
SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.PROTOTYPE.fromXContent(parser, null);
|
||||
assertNotSame(searchAfterBuilder, secondSearchAfterBuilder);
|
||||
assertEquals(searchAfterBuilder, secondSearchAfterBuilder);
|
||||
assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode());
|
||||
}
|
||||
}
|
||||
|
||||
public void testWithNullValue() throws Exception {
|
||||
SearchAfterBuilder builder = new SearchAfterBuilder();
|
||||
builder.setSortValues(new Object[] {1, "1", null});
|
||||
try {
|
||||
serializedCopy(builder);
|
||||
fail("Should fail on null values");
|
||||
} catch (IOException e) {
|
||||
assertThat(e.getMessage(), Matchers.equalTo("Can't handle search_after field value of type [null]"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testWithNullArray() throws Exception {
|
||||
SearchAfterBuilder builder = new SearchAfterBuilder();
|
||||
try {
|
||||
builder.setSortValues(null);
|
||||
fail("Should fail on null array.");
|
||||
} catch (NullPointerException e) {
|
||||
assertThat(e.getMessage(), Matchers.equalTo("Values cannot be null."));
|
||||
}
|
||||
}
|
||||
|
||||
public void testWithEmptyArray() throws Exception {
|
||||
SearchAfterBuilder builder = new SearchAfterBuilder();
|
||||
try {
|
||||
builder.setSortValues(new Object[0]);
|
||||
fail("Should fail on empty array.");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), Matchers.equalTo("Values must contains at least one value."));
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,314 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.searchafter;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.SearchContextException;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.transport.RemoteTransportException;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Comparator;
|
||||
import java.util.Collections;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class SearchAfterIT extends ESIntegTestCase {
|
||||
private static final String INDEX_NAME = "test";
|
||||
private static final String TYPE_NAME = "type1";
|
||||
private static final int NUM_DOCS = 100;
|
||||
|
||||
public void testsShouldFail() throws Exception {
|
||||
client().admin().indices().prepareCreate("test").execute().actionGet();
|
||||
client().prepareIndex("test", "type1", "0").setSource("field1", 0, "field2", "toto").execute().actionGet();
|
||||
refresh();
|
||||
|
||||
try {
|
||||
client().prepareSearch("test")
|
||||
.addSort("field1", SortOrder.ASC)
|
||||
.setQuery(matchAllQuery())
|
||||
.searchAfter(new Object[]{0})
|
||||
.setScroll("1m")
|
||||
.execute().actionGet();
|
||||
|
||||
fail("Should fail on search_after cannot be used with scroll.");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
|
||||
assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(SearchContextException.class));
|
||||
assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("`search_after` cannot be used in a scroll context."));
|
||||
}
|
||||
try {
|
||||
client().prepareSearch("test")
|
||||
.addSort("field1", SortOrder.ASC)
|
||||
.setQuery(matchAllQuery())
|
||||
.searchAfter(new Object[]{0})
|
||||
.setFrom(10)
|
||||
.execute().actionGet();
|
||||
|
||||
fail("Should fail on search_after cannot be used with from > 0.");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
|
||||
assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(SearchContextException.class));
|
||||
assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("`from` parameter must be set to 0 when `search_after` is used."));
|
||||
}
|
||||
|
||||
try {
|
||||
client().prepareSearch("test")
|
||||
.setQuery(matchAllQuery())
|
||||
.searchAfter(new Object[]{0.75f})
|
||||
.execute().actionGet();
|
||||
|
||||
fail("Should fail on search_after on score only is disabled");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
|
||||
assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class));
|
||||
assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("Sort must contain at least one field."));
|
||||
}
|
||||
|
||||
try {
|
||||
client().prepareSearch("test")
|
||||
.addSort("field2", SortOrder.DESC)
|
||||
.addSort("field1", SortOrder.ASC)
|
||||
.setQuery(matchAllQuery())
|
||||
.searchAfter(new Object[]{1})
|
||||
.get();
|
||||
fail("Should fail on search_after size differs from sort field size");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
|
||||
assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class));
|
||||
assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("search_after has 1 value(s) but sort has 2."));
|
||||
}
|
||||
|
||||
try {
|
||||
client().prepareSearch("test")
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort("field1", SortOrder.ASC)
|
||||
.searchAfter(new Object[]{1, 2})
|
||||
.execute().actionGet();
|
||||
fail("Should fail on search_after size differs from sort field size");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
|
||||
assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class));
|
||||
assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("search_after has 2 value(s) but sort has 1."));
|
||||
}
|
||||
|
||||
try {
|
||||
client().prepareSearch("test")
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort("field1", SortOrder.ASC)
|
||||
.searchAfter(new Object[]{"toto"})
|
||||
.execute().actionGet();
|
||||
|
||||
fail("Should fail on search_after on score only is disabled");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
assertThat(e.getCause().getClass(), Matchers.equalTo(RemoteTransportException.class));
|
||||
assertThat(e.getCause().getCause().getClass(), Matchers.equalTo(IllegalArgumentException.class));
|
||||
assertThat(e.getCause().getCause().getMessage(), Matchers.equalTo("Failed to parse search_after value for field [field1]."));
|
||||
}
|
||||
}
|
||||
|
||||
public void testWithSimpleTypes() throws Exception {
|
||||
int numFields = randomInt(20) + 1;
|
||||
int[] types = new int[numFields-1];
|
||||
for (int i = 0; i < numFields-1; i++) {
|
||||
types[i] = randomInt(6);
|
||||
}
|
||||
List<List> documents = new ArrayList<> ();
|
||||
for (int i = 0; i < NUM_DOCS; i++) {
|
||||
List values = new ArrayList<>();
|
||||
for (int type : types) {
|
||||
switch (type) {
|
||||
case 0:
|
||||
values.add(randomBoolean());
|
||||
break;
|
||||
case 1:
|
||||
values.add(randomByte());
|
||||
break;
|
||||
case 2:
|
||||
values.add(randomShort());
|
||||
break;
|
||||
case 3:
|
||||
values.add(randomInt());
|
||||
break;
|
||||
case 4:
|
||||
values.add(randomFloat());
|
||||
break;
|
||||
case 5:
|
||||
values.add(randomDouble());
|
||||
break;
|
||||
case 6:
|
||||
values.add(new Text(randomAsciiOfLengthBetween(5, 20)));
|
||||
break;
|
||||
}
|
||||
}
|
||||
values.add(new Text(Strings.randomBase64UUID()));
|
||||
documents.add(values);
|
||||
}
|
||||
int reqSize = randomInt(NUM_DOCS-1);
|
||||
if (reqSize == 0) {
|
||||
reqSize = 1;
|
||||
}
|
||||
assertSearchFromWithSortValues(INDEX_NAME, TYPE_NAME, documents, reqSize);
|
||||
}
|
||||
|
||||
private static class ListComparator implements Comparator<List> {
|
||||
@Override
|
||||
public int compare(List o1, List o2) {
|
||||
if (o1.size() > o2.size()) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (o2.size() > o1.size()) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
for (int i = 0; i < o1.size(); i++) {
|
||||
if (!(o1.get(i) instanceof Comparable)) {
|
||||
throw new RuntimeException(o1.get(i).getClass() + " is not comparable");
|
||||
}
|
||||
Object cmp1 = o1.get(i);
|
||||
Object cmp2 = o2.get(i);
|
||||
int cmp = ((Comparable)cmp1).compareTo(cmp2);
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
private ListComparator LST_COMPARATOR = new ListComparator();
|
||||
|
||||
private void assertSearchFromWithSortValues(String indexName, String typeName, List<List> documents, int reqSize) throws Exception {
|
||||
int numFields = documents.get(0).size();
|
||||
{
|
||||
createIndexMappingsFromObjectType(indexName, typeName, documents.get(0));
|
||||
List<IndexRequestBuilder> requests = new ArrayList<>();
|
||||
for (int i = 0; i < documents.size(); i++) {
|
||||
XContentBuilder builder = jsonBuilder();
|
||||
assertThat(documents.get(i).size(), Matchers.equalTo(numFields));
|
||||
builder.startObject();
|
||||
for (int j = 0; j < numFields; j++) {
|
||||
builder.field("field" + Integer.toString(j), documents.get(i).get(j));
|
||||
}
|
||||
builder.endObject();
|
||||
requests.add(client().prepareIndex(INDEX_NAME, TYPE_NAME, Integer.toString(i)).setSource(builder));
|
||||
}
|
||||
indexRandom(true, requests);
|
||||
}
|
||||
|
||||
Collections.sort(documents, LST_COMPARATOR);
|
||||
int offset = 0;
|
||||
Object[] sortValues = null;
|
||||
while (offset < documents.size()) {
|
||||
SearchRequestBuilder req = client().prepareSearch(indexName);
|
||||
for (int i = 0; i < documents.get(0).size(); i++) {
|
||||
req.addSort("field" + Integer.toString(i), SortOrder.ASC);
|
||||
}
|
||||
req.setQuery(matchAllQuery()).setSize(reqSize);
|
||||
if (sortValues != null) {
|
||||
req.searchAfter(sortValues);
|
||||
}
|
||||
SearchResponse searchResponse = req.execute().actionGet();
|
||||
for (SearchHit hit : searchResponse.getHits()) {
|
||||
List toCompare = convertSortValues(documents.get(offset++));
|
||||
assertThat(LST_COMPARATOR.compare(toCompare, Arrays.asList(hit.sortValues())), equalTo(0));
|
||||
}
|
||||
sortValues = searchResponse.getHits().hits()[searchResponse.getHits().hits().length-1].getSortValues();
|
||||
}
|
||||
}
|
||||
|
||||
private void createIndexMappingsFromObjectType(String indexName, String typeName, List<Object> types) {
|
||||
CreateIndexRequestBuilder indexRequestBuilder = client().admin().indices().prepareCreate(indexName);
|
||||
List<String> mappings = new ArrayList<> ();
|
||||
int numFields = types.size();
|
||||
for (int i = 0; i < numFields; i++) {
|
||||
Class type = types.get(i).getClass();
|
||||
if (type == Integer.class) {
|
||||
mappings.add("field" + Integer.toString(i));
|
||||
mappings.add("type=integer");
|
||||
} else if (type == Long.class) {
|
||||
mappings.add("field" + Integer.toString(i));
|
||||
mappings.add("type=long");
|
||||
} else if (type == Float.class) {
|
||||
mappings.add("field" + Integer.toString(i));
|
||||
mappings.add("type=float");
|
||||
} else if (type == Double.class) {
|
||||
mappings.add("field" + Integer.toString(i));
|
||||
mappings.add("type=double");
|
||||
} else if (type == Byte.class) {
|
||||
mappings.add("field" + Integer.toString(i));
|
||||
mappings.add("type=byte");
|
||||
} else if (type == Short.class) {
|
||||
mappings.add("field" + Integer.toString(i));
|
||||
mappings.add("type=short");
|
||||
} else if (type == Boolean.class) {
|
||||
mappings.add("field" + Integer.toString(i));
|
||||
mappings.add("type=boolean");
|
||||
} else if (types.get(i) instanceof Text) {
|
||||
mappings.add("field" + Integer.toString(i));
|
||||
mappings.add("type=string,index=not_analyzed");
|
||||
} else {
|
||||
fail("Can't match type [" + type + "]");
|
||||
}
|
||||
}
|
||||
indexRequestBuilder.addMapping(typeName, mappings.toArray()).execute().actionGet();
|
||||
}
|
||||
|
||||
// Convert Integer, Short, Byte and Boolean to Long in order to match the conversion done
|
||||
// by the internal hits when populating the sort values.
|
||||
private List<Object> convertSortValues(List<Object> sortValues) {
|
||||
List<Object> converted = new ArrayList<> ();
|
||||
for (int i = 0; i < sortValues.size(); i++) {
|
||||
Object from = sortValues.get(i);
|
||||
if (from instanceof Integer) {
|
||||
converted.add(((Integer) from).longValue());
|
||||
} else if (from instanceof Short) {
|
||||
converted.add(((Short) from).longValue());
|
||||
} else if (from instanceof Byte) {
|
||||
converted.add(((Byte) from).longValue());
|
||||
} else if (from instanceof Boolean) {
|
||||
boolean b = (boolean) from;
|
||||
if (b) {
|
||||
converted.add(1L);
|
||||
} else {
|
||||
converted.add(0L);
|
||||
}
|
||||
} else {
|
||||
converted.add(from);
|
||||
}
|
||||
}
|
||||
return converted;
|
||||
}
|
||||
}
|
@ -104,7 +104,7 @@ specific index module:
|
||||
The maximum value of `from + size` for searches to this index. Defaults to
|
||||
`10000`. Search requests take heap memory and time proportional to
|
||||
`from + size` and this limits that memory. See
|
||||
{ref}/search-request-scroll.html[Scroll] for a more efficient alternative
|
||||
<<search-request-scroll,Scroll>> or <<search-request-search-after,Search After>> for a more efficient alternative
|
||||
to raising this.
|
||||
|
||||
`index.blocks.read_only`::
|
||||
|
@ -172,3 +172,5 @@ include::request/min-score.asciidoc[]
|
||||
include::request/named-queries-and-filters.asciidoc[]
|
||||
|
||||
include::request/inner-hits.asciidoc[]
|
||||
|
||||
include::request/search-after.asciidoc[]
|
||||
|
@ -21,5 +21,5 @@ defaults to `10`.
|
||||
--------------------------------------------------
|
||||
|
||||
Note that `from` + `size` can not be more than the `index.max_result_window`
|
||||
index setting which defaults to 10,000. See the <<search-request-scroll,Scroll>>
|
||||
index setting which defaults to 10,000. See the <<search-request-scroll,Scroll>> or <<search-request-search-after,Search After>>
|
||||
API for more efficient ways to do deep scrolling.
|
||||
|
62
docs/reference/search/request/search-after.asciidoc
Normal file
62
docs/reference/search/request/search-after.asciidoc
Normal file
@ -0,0 +1,62 @@
|
||||
[[search-request-search-after]]
|
||||
=== Search After
|
||||
|
||||
Pagination of results can be done by using the `from` and `size` but the cost becomes prohibitive when the deep pagination is reached.
|
||||
The `index.max_result_window` which defaults to 10,000 is a safeguard, search requests take heap memory and time proportional to `from + size`.
|
||||
The <<search-request-scroll,Scroll>> api is recommended for efficient deep scrolling but scroll contexts are costly and it is not
|
||||
recommended to use it for real time user requests.
|
||||
The `search_after` parameter circumvents this problem by providing a live cursor.
|
||||
The idea is to use the results from the previous page to help the retrieval of the next page.
|
||||
|
||||
Suppose that the query to retrieve the first page looks like this:
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XGET 'localhost:9200/twitter/tweet/_search'
|
||||
{
|
||||
size: "10"
|
||||
"query": {
|
||||
"match" : {
|
||||
"title" : "elasticsearch"
|
||||
}
|
||||
},
|
||||
"sort": [
|
||||
{"age": "asc"},
|
||||
{"_uid": "desc"}
|
||||
]
|
||||
}
|
||||
'
|
||||
--------------------------------------------------
|
||||
|
||||
NOTE: A field with one unique value per document should be used as the tiebreaker of the sort specification.
|
||||
Otherwise the sort order for documents that have the same sort values would be undefined. The recommended way is to use
|
||||
the field `_uid` which is certain to contain one unique value for each document.
|
||||
|
||||
The result from the above request includes an array of `sort values` for each document.
|
||||
These `sort values` can be used in conjunction with the `search_after` parameter to start returning results "after" any
|
||||
document in the result list.
|
||||
For instance we can use the `sort values` of the last document and pass it to `search_after` to retrieve the next page of results:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XGET 'localhost:9200/twitter/tweet/_search'
|
||||
{
|
||||
"size": 10
|
||||
"query": {
|
||||
"match" : {
|
||||
"title" : "elasticsearch"
|
||||
}
|
||||
},
|
||||
"search_after": [18, "tweet#654323"],
|
||||
"sort": [
|
||||
{"age": "asc"},
|
||||
{"_uid": "desc"}
|
||||
]
|
||||
}
|
||||
'
|
||||
--------------------------------------------------
|
||||
|
||||
NOTE: The parameter `from` must be set to 0 (or -1) when `search_after` is used.
|
||||
|
||||
`search_after` is not a solution to jump freely to a random page but rather to scroll many queries in parallel.
|
||||
It is very similar to the `scroll` API but unlike it, the `search_after` parameter is stateless, it is always resolved against the latest
|
||||
version of the searcher. For this reason the sort order may change during a walk depending on the updates and deletes of your index.
|
@ -154,6 +154,10 @@
|
||||
"request_cache": {
|
||||
"type" : "boolean",
|
||||
"description" : "Specify if request cache should be used for this request or not, defaults to index level setting"
|
||||
},
|
||||
"search_after": {
|
||||
"type" : "list",
|
||||
"description" : "An array of sort values that indicates where the sort of the top hits should start"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -0,0 +1,102 @@
|
||||
setup:
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar, age: 18 }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 42
|
||||
body: { foo: bar, age: 18 }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 172
|
||||
body: { foo: bar, age: 24 }
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
index: test
|
||||
|
||||
---
|
||||
"search with search_after parameter":
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
type: test
|
||||
body:
|
||||
size: 1
|
||||
query:
|
||||
match:
|
||||
foo: bar
|
||||
sort: [{ age: desc }, { _uid: desc }]
|
||||
|
||||
- match: {hits.total: 3 }
|
||||
- length: {hits.hits: 1 }
|
||||
- match: {hits.hits.0._index: test }
|
||||
- match: {hits.hits.0._type: test }
|
||||
- match: {hits.hits.0._id: "172" }
|
||||
- match: {hits.hits.0.sort: [24, "test#172"] }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
type: test
|
||||
body:
|
||||
size: 1
|
||||
query:
|
||||
match:
|
||||
foo: bar
|
||||
sort: [{ age: desc }, { _uid: desc }]
|
||||
search_after: [24, "test#172"]
|
||||
|
||||
- match: {hits.total: 3 }
|
||||
- length: {hits.hits: 1 }
|
||||
- match: {hits.hits.0._index: test }
|
||||
- match: {hits.hits.0._type: test }
|
||||
- match: {hits.hits.0._id: "42" }
|
||||
- match: {hits.hits.0.sort: [18, "test#42"] }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
type: test
|
||||
body:
|
||||
size: 1
|
||||
query:
|
||||
match:
|
||||
foo: bar
|
||||
sort: [ { age: desc }, { _uid: desc } ]
|
||||
search_after: [18, "test#42"]
|
||||
|
||||
- match: {hits.total: 3}
|
||||
- length: {hits.hits: 1 }
|
||||
- match: {hits.hits.0._index: test }
|
||||
- match: {hits.hits.0._type: test }
|
||||
- match: {hits.hits.0._id: "1" }
|
||||
- match: {hits.hits.0.sort: [18, "test#1"] }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
type: test
|
||||
body:
|
||||
size: 1
|
||||
query:
|
||||
match:
|
||||
foo: bar
|
||||
sort: [{ age: desc }, { _uid: desc } ]
|
||||
search_after: [18, "test#1"]
|
||||
|
||||
- match: {hits.total: 3}
|
||||
- length: {hits.hits: 0 }
|
@ -20,6 +20,7 @@ package org.elasticsearch.test;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Counter;
|
||||
@ -391,6 +392,16 @@ public class TestSearchContext extends SearchContext {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext searchAfter(FieldDoc searchAfter) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDoc searchAfter() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext parsedPostFilter(ParsedQuery postFilter) {
|
||||
this.postFilter = postFilter;
|
||||
|
Loading…
x
Reference in New Issue
Block a user