Implement top_metrics agg (#51155) (#52366)

The `top_metrics` agg is kind of like `top_hits` but it only works on
doc values so it *should* be faster.

At this point it is fairly limited in that it only supports a single,
numeric sort and a single, numeric metric. And it only fetches the "very
topest" document worth of metric. We plan to support returning a
configurable number of top metrics, requesting more than one metric and
more than one sort. And, eventually, non-numeric sorts and metrics. The
trick is doing those things fairly efficiently.

Co-Authored by: Zachary Tong <zach@elastic.co>
This commit is contained in:
Nik Everett 2020-02-14 11:19:11 -05:00 committed by GitHub
parent ad56802ac6
commit 146def8caa
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
60 changed files with 4235 additions and 212 deletions

View File

@ -55,7 +55,9 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.analytics.ParsedStringStats;
import org.elasticsearch.client.analytics.ParsedTopMetrics;
import org.elasticsearch.client.analytics.StringStatsAggregationBuilder;
import org.elasticsearch.client.analytics.TopMetricsAggregationBuilder;
import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.client.core.CountResponse;
import org.elasticsearch.client.core.GetSourceRequest;
@ -1929,6 +1931,7 @@ public class RestHighLevelClient implements Closeable {
map.put(TopHitsAggregationBuilder.NAME, (p, c) -> ParsedTopHits.fromXContent(p, (String) c));
map.put(CompositeAggregationBuilder.NAME, (p, c) -> ParsedComposite.fromXContent(p, (String) c));
map.put(StringStatsAggregationBuilder.NAME, (p, c) -> ParsedStringStats.PARSER.parse(p, (String) c));
map.put(TopMetricsAggregationBuilder.NAME, (p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c));
List<NamedXContentRegistry.Entry> entries = map.entrySet().stream()
.map(entry -> new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(entry.getKey()), entry.getValue()))
.collect(Collectors.toList());

View File

@ -0,0 +1,134 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.analytics;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParserUtils;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
/**
* Results of the {@code top_metrics} aggregation.
*/
public class ParsedTopMetrics extends ParsedAggregation {
private static final ParseField TOP_FIELD = new ParseField("top");
private final List<TopMetrics> topMetrics;
private ParsedTopMetrics(String name, List<TopMetrics> topMetrics) {
setName(name);
this.topMetrics = topMetrics;
}
/**
* The list of top metrics, in sorted order.
*/
public List<TopMetrics> getTopMetrics() {
return topMetrics;
}
@Override
public String getType() {
return TopMetricsAggregationBuilder.NAME;
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.startArray(TOP_FIELD.getPreferredName());
for (TopMetrics top : topMetrics) {
top.toXContent(builder, params);
}
return builder.endArray();
}
public static final ConstructingObjectParser<ParsedTopMetrics, String> PARSER = new ConstructingObjectParser<>(
TopMetricsAggregationBuilder.NAME, true, (args, name) -> {
@SuppressWarnings("unchecked")
List<TopMetrics> topMetrics = (List<TopMetrics>) args[0];
return new ParsedTopMetrics(name, topMetrics);
});
static {
PARSER.declareObjectArray(constructorArg(), (p, c) -> TopMetrics.PARSER.parse(p, null), TOP_FIELD);
ParsedAggregation.declareAggregationFields(PARSER);
}
/**
* The metrics belonging to the document with the "top" sort key.
*/
public static class TopMetrics implements ToXContent {
private static final ParseField SORT_FIELD = new ParseField("sort");
private static final ParseField METRICS_FIELD = new ParseField("metrics");
private final List<Object> sort;
private final Map<String, Double> metrics;
private TopMetrics(List<Object> sort, Map<String, Double> metrics) {
this.sort = sort;
this.metrics = metrics;
}
/**
* The sort key for these top metrics.
*/
public List<Object> getSort() {
return sort;
}
/**
* The top metric values returned by the aggregation.
*/
public Map<String, Double> getMetrics() {
return metrics;
}
private static final ConstructingObjectParser<TopMetrics, Void> PARSER = new ConstructingObjectParser<>("top", true,
(args, name) -> {
@SuppressWarnings("unchecked")
List<Object> sort = (List<Object>) args[0];
@SuppressWarnings("unchecked")
Map<String, Double> metrics = (Map<String, Double>) args[1];
return new TopMetrics(sort, metrics);
});
static {
PARSER.declareFieldArray(constructorArg(), (p, c) -> XContentParserUtils.parseFieldsValue(p),
SORT_FIELD, ObjectParser.ValueType.VALUE_ARRAY);
PARSER.declareObject(constructorArg(), (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), METRICS_FIELD);
}
public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
builder.startObject();
builder.field(SORT_FIELD.getPreferredName(), sort);
builder.field(METRICS_FIELD.getPreferredName(), metrics);
builder.endObject();
return builder;
};
}
}

View File

@ -0,0 +1,97 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.analytics;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import java.io.IOException;
import java.util.Map;
/**
* Builds the Top Metrics aggregation request.
* <p>
* NOTE: This extends {@linkplain AbstractAggregationBuilder} for compatibility
* with {@link SearchSourceBuilder#aggregation(AggregationBuilder)} but it
* doesn't support any "server" side things like
* {@linkplain Writeable#writeTo(StreamOutput)},
* {@linkplain AggregationBuilder#rewrite(QueryRewriteContext)}, or
* {@linkplain AbstractAggregationBuilder#build(QueryShardContext, AggregatorFactory)}.
*/
public class TopMetricsAggregationBuilder extends AbstractAggregationBuilder<TopMetricsAggregationBuilder> {
public static final String NAME = "top_metrics";
private final SortBuilder<?> sort;
private final String metric;
/**
* Build the request.
* @param name the name of the metric
* @param sort the sort key used to select the top metrics
* @param metric the name of the field to select
*/
public TopMetricsAggregationBuilder(String name, SortBuilder<?> sort, String metric) {
super(name);
this.sort = sort;
this.metric = metric;
}
@Override
public String getType() {
return NAME;
}
@Override
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
builder.startArray("sort");
sort.toXContent(builder, params);
builder.endArray();
builder.startObject("metric").field("field", metric).endObject();
}
return builder.endObject();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
throw new UnsupportedOperationException();
}
@Override
protected AggregatorFactory doBuild(QueryShardContext queryShardContext, AggregatorFactory parent, Builder subfactoriesBuilder)
throws IOException {
throw new UnsupportedOperationException();
}
@Override
protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map<String, Object> metaData) {
throw new UnsupportedOperationException();
}
}

View File

@ -676,7 +676,11 @@ public class RestHighLevelClientTests extends ESTestCase {
List<NamedXContentRegistry.Entry> namedXContents = RestHighLevelClient.getDefaultNamedXContents();
int expectedInternalAggregations = InternalAggregationTestCase.getDefaultNamedXContents().size();
int expectedSuggestions = 3;
// Explicitly check for metrics from the analytics module because they aren't in InternalAggregationTestCase
assertTrue(namedXContents.removeIf(e -> e.name.getPreferredName().equals("string_stats")));
assertTrue(namedXContents.removeIf(e -> e.name.getPreferredName().equals("top_metrics")));
assertEquals(expectedInternalAggregations + expectedSuggestions, namedXContents.size());
Map<Class<?>, Integer> categories = new HashMap<>();
for (NamedXContentRegistry.Entry namedXContent : namedXContents) {

View File

@ -27,16 +27,21 @@ import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.aMapWithSize;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasEntry;
import static org.hamcrest.Matchers.hasSize;
public class AnalyticsAggsIT extends ESRestHighLevelClientTestCase {
public void testBasic() throws IOException {
public void testStringStats() throws IOException {
BulkRequest bulk = new BulkRequest("test").setRefreshPolicy(RefreshPolicy.IMMEDIATE);
bulk.add(new IndexRequest().source(XContentType.JSON, "message", "trying out elasticsearch"));
bulk.add(new IndexRequest().source(XContentType.JSON, "message", "more words"));
@ -55,4 +60,20 @@ public class AnalyticsAggsIT extends ESRestHighLevelClientTestCase {
assertThat(stats.getDistribution(), hasEntry(equalTo("r"), closeTo(.12, .005)));
assertThat(stats.getDistribution(), hasEntry(equalTo("t"), closeTo(.09, .005)));
}
public void testBasic() throws IOException {
BulkRequest bulk = new BulkRequest("test").setRefreshPolicy(RefreshPolicy.IMMEDIATE);
bulk.add(new IndexRequest().source(XContentType.JSON, "s", 1, "v", 2));
bulk.add(new IndexRequest().source(XContentType.JSON, "s", 2, "v", 3));
highLevelClient().bulk(bulk, RequestOptions.DEFAULT);
SearchRequest search = new SearchRequest("test");
search.source().aggregation(new TopMetricsAggregationBuilder(
"test", new FieldSortBuilder("s").order(SortOrder.DESC), "v"));
SearchResponse response = highLevelClient().search(search, RequestOptions.DEFAULT);
ParsedTopMetrics top = response.getAggregations().get("test");
assertThat(top.getTopMetrics(), hasSize(1));
ParsedTopMetrics.TopMetrics metric = top.getTopMetrics().get(0);
assertThat(metric.getSort(), equalTo(singletonList(2)));
assertThat(metric.getMetrics(), equalTo(singletonMap("v", 3.0)));
}
}

View File

@ -25,6 +25,7 @@ This page lists all the available aggregations with their corresponding `Aggrega
| {ref}/search-aggregations-metrics-stats-aggregation.html[Stats] | {agg-ref}/metrics/stats/StatsAggregationBuilder.html[StatsAggregationBuilder] | {agg-ref}/AggregationBuilders.html#stats-java.lang.String-[AggregationBuilders.stats()]
| {ref}/search-aggregations-metrics-sum-aggregation.html[Sum] | {agg-ref}/metrics/sum/SumAggregationBuilder.html[SumAggregationBuilder] | {agg-ref}/AggregationBuilders.html#sum-java.lang.String-[AggregationBuilders.sum()]
| {ref}/search-aggregations-metrics-top-hits-aggregation.html[Top hits] | {agg-ref}/metrics/tophits/TopHitsAggregationBuilder.html[TopHitsAggregationBuilder] | {agg-ref}/AggregationBuilders.html#topHits-java.lang.String-[AggregationBuilders.topHits()]
| {ref}/search-aggregations-metrics-top-metrics.html[Top Metrics] | {javadoc-client}/analytics/TopMetricsAggregationBuilder.html[TopMetricsAggregationBuilder] | None
| {ref}/search-aggregations-metrics-valuecount-aggregation.html[Value Count] | {agg-ref}/metrics/valuecount/ValueCountAggregationBuilder.html[ValueCountAggregationBuilder] | {agg-ref}/AggregationBuilders.html#count-java.lang.String-[AggregationBuilders.count()]
| {ref}/search-aggregations-metrics-string-stats-aggregation.html[String Stats] | {javadoc-client}/analytics/StringStatsAggregationBuilder.html[StringStatsAggregationBuilder] | None
|======

View File

@ -41,6 +41,8 @@ include::metrics/sum-aggregation.asciidoc[]
include::metrics/tophits-aggregation.asciidoc[]
include::metrics/top-metrics-aggregation.asciidoc[]
include::metrics/valuecount-aggregation.asciidoc[]
include::metrics/median-absolute-deviation-aggregation.asciidoc[]

View File

@ -0,0 +1,284 @@
[role="xpack"]
[testenv="basic"]
[[search-aggregations-metrics-top-metrics]]
=== Top Metrics Aggregation
experimental[We expect to change the response format of this aggregation as we add more features., https://github.com/elastic/elasticsearch/issues/51813]
The `top_metrics` aggregation selects metrics from the document with the largest or smallest "sort"
value. For example, This gets the value of the `v` field on the document with the largest value of `s`:
[source,console,id=search-aggregations-metrics-top-metrics-simple]
----
POST /test/_bulk?refresh
{"index": {}}
{"s": 1, "v": 3.1415}
{"index": {}}
{"s": 2, "v": 1}
{"index": {}}
{"s": 3, "v": 2.71828}
POST /test/_search?filter_path=aggregations
{
"aggs": {
"tm": {
"top_metrics": {
"metric": {"field": "v"},
"sort": {"s": "desc"}
}
}
}
}
----
Which returns:
[source,js]
----
{
"aggregations": {
"tm": {
"top": [ {"sort": [3], "metrics": {"v": 2.718280076980591 } } ]
}
}
}
----
// TESTRESPONSE
`top_metrics` is fairly similar to <<search-aggregations-metrics-top-hits-aggregation, `top_hits`>>
in spirit but because it is more limited it is able to do its job using less memory and is often
faster.
==== `sort`
The `sort` field in the metric request functions exactly the same as the `sort` field in the
<<request-body-search-sort, search>> request except:
* It can't be used on <<binary,binary>>, <<flattened,flattened>, <<ip,ip>>,
<<keyword,keyword>>, or <<text,text>> fields.
* It only supports a single sort value.
The metrics that the aggregation returns is the first hit that would be returned by the search
request. So,
`"sort": {"s": "desc"}`:: gets metrics from the document with the highest `s`
`"sort": {"s": "asc"}`:: gets the metrics from the document with the lowest `s`
`"sort": {"_geo_distance": {"location": "35.7796, -78.6382"}}`::
gets metrics from the documents with `location` *closest* to `35.7796, -78.6382`
`"sort": "_score"`:: gets metrics from the document with the highest score
NOTE: This aggregation doesn't support any sort of "tie breaking". If two documents have
the same sort values then this aggregation could return either document's fields.
==== `metric`
At this point `metric` supports only `{"field": "field_name"}` and all metrics
are returned as double precision floating point numbers. Expect more to
come here.
==== Examples
===== Use with terms
This aggregation should be quite useful inside of <<search-aggregations-bucket-terms-aggregation, `terms`>>
aggregation, to, say, find the last value reported by each server.
[source,console,id=search-aggregations-metrics-top-metrics-terms]
----
PUT /node
{
"mappings": {
"properties": {
"ip": {"type": "ip"},
"date": {"type": "date"}
}
}
}
POST /node/_bulk?refresh
{"index": {}}
{"ip": "192.168.0.1", "date": "2020-01-01T01:01:01", "v": 1}
{"index": {}}
{"ip": "192.168.0.1", "date": "2020-01-01T02:01:01", "v": 2}
{"index": {}}
{"ip": "192.168.0.2", "date": "2020-01-01T02:01:01", "v": 3}
POST /node/_search?filter_path=aggregations
{
"aggs": {
"ip": {
"terms": {
"field": "ip"
},
"aggs": {
"tm": {
"top_metrics": {
"metric": {"field": "v"},
"sort": {"date": "desc"}
}
}
}
}
}
}
----
Which returns:
[source,js]
----
{
"aggregations": {
"ip": {
"buckets": [
{
"key": "192.168.0.1",
"doc_count": 2,
"tm": {
"top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 2.0 } } ]
}
},
{
"key": "192.168.0.2",
"doc_count": 1,
"tm": {
"top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 3.0 } } ]
}
}
],
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0
}
}
}
----
// TESTRESPONSE
Unlike `top_hits`, you can sort buckets by the results of this metric:
[source,console]
----
POST /node/_search?filter_path=aggregations
{
"aggs": {
"ip": {
"terms": {
"field": "ip",
"order": {"tm.v": "desc"}
},
"aggs": {
"tm": {
"top_metrics": {
"metric": {"field": "v"},
"sort": {"date": "desc"}
}
}
}
}
}
}
----
// TEST[continued]
Which returns:
[source,js]
----
{
"aggregations": {
"ip": {
"buckets": [
{
"key": "192.168.0.2",
"doc_count": 1,
"tm": {
"top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 3.0 } } ]
}
},
{
"key": "192.168.0.1",
"doc_count": 2,
"tm": {
"top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 2.0 } } ]
}
}
],
"doc_count_error_upper_bound": 0,
"sum_other_doc_count": 0
}
}
}
----
// TESTRESPONSE
===== Mixed sort types
Sorting `top_metrics` by a field that has different types across different
indices producs somewhat suprising results: floating point fields are
always sorted independantly of whole numbered fields.
[source,console,id=search-aggregations-metrics-top-metrics-mixed-sort]
----
POST /test/_bulk?refresh
{"index": {"_index": "test1"}}
{"s": 1, "v": 3.1415}
{"index": {"_index": "test1"}}
{"s": 2, "v": 1}
{"index": {"_index": "test2"}}
{"s": 3.1, "v": 2.71828}
POST /test*/_search?filter_path=aggregations
{
"aggs": {
"tm": {
"top_metrics": {
"metric": {"field": "v"},
"sort": {"s": "asc"}
}
}
}
}
----
Which returns:
[source,js]
----
{
"aggregations": {
"tm": {
"top": [ {"sort": [3.0999999046325684], "metrics": {"v": 2.718280076980591 } } ]
}
}
}
----
// TESTRESPONSE
While this is better than an error it *probably* isn't what you were going for.
While it does lose some precision, you can explictly cast the whole number
fields to floating points with something like:
[source,console]
----
POST /test*/_search?filter_path=aggregations
{
"aggs": {
"tm": {
"top_metrics": {
"metric": {"field": "v"},
"sort": {"s": {"order": "asc", "numeric_type": "double"}}
}
}
}
}
----
// TEST[continued]
Which returns the much more expected:
[source,js]
----
{
"aggregations": {
"tm": {
"top": [ {"sort": [1.0], "metrics": {"v": 3.1414999961853027 } } ]
}
}
}
----
// TESTRESPONSE

View File

@ -36,6 +36,7 @@ import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
@ -59,6 +60,8 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.math.BigDecimal;
@ -522,6 +525,12 @@ public class ScaledFloatFieldMapper extends FieldMapper {
return new SortField(getFieldName(), source, reverse);
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
return new DoubleValuesComparatorSource(this, missingValue, sortMode, nested).newBucketedSort(bigArrays, sortOrder, format);
}
@Override
public void clear() {
scaledFieldData.clear();

View File

@ -35,14 +35,18 @@ import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.NestedSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
@ -72,6 +76,12 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
*/
SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse);
/**
* Build a sort implementation specialized for aggregations.
*/
BucketedSort newBucketedSort(BigArrays bigArrays, @Nullable Object missingValue, MultiValueMode sortMode,
Nested nested, SortOrder sortOrder, DocValueFormat format);
/**
* Clears any resources associated with this field data.
*/
@ -227,6 +237,11 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
public Object missingValue(boolean reversed) {
return null;
}
/**
* Create a {@linkplain BucketedSort} which is useful for sorting inside of aggregations.
*/
public abstract BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format);
}
interface Builder {
@ -242,5 +257,4 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
IndexFieldData<FD> localGlobalDirect(DirectoryReader indexReader) throws Exception;
}
}

View File

@ -29,11 +29,15 @@ import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.fielddata.AbstractSortedDocValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
@ -135,6 +139,11 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
};
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("only supported on numeric fields");
}
/**
* A view of a SortedDocValues where missing values
* are replaced with the specified term

View File

@ -27,12 +27,16 @@ import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
@ -58,6 +62,18 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato
return indexFieldData.load(context).getDoubleValues();
}
private NumericDoubleValues getNumericDocValues(LeafReaderContext context, double missingValue) throws IOException {
final SortedNumericDoubleValues values = getValues(context);
if (nested == null) {
return FieldData.replaceMissing(sortMode.select(values), missingValue);
} else {
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE;
return sortMode.select(values, missingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren);
}
}
protected void setScorer(Scorable scorer) {}
@Override
@ -70,17 +86,7 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato
return new FieldComparator.DoubleComparator(numHits, null, null) {
@Override
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
final SortedNumericDoubleValues values = getValues(context);
final NumericDoubleValues selectedValues;
if (nested == null) {
selectedValues = FieldData.replaceMissing(sortMode.select(values), dMissingValue);
} else {
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE;
selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren);
}
return selectedValues.getRawDoubleValues();
return DoubleValuesComparatorSource.this.getNumericDocValues(context, dMissingValue).getRawDoubleValues();
}
@Override
public void setScorer(Scorable scorer) {
@ -88,4 +94,28 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato
}
};
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) {
return new BucketedSort.ForDoubles(bigArrays, sortOrder, format) {
private final double dMissingValue = (Double) missingObject(missingValue, sortOrder == SortOrder.DESC);
@Override
public Leaf forLeaf(LeafReaderContext ctx) throws IOException {
return new Leaf() {
private final NumericDoubleValues values = getNumericDocValues(ctx, dMissingValue);
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
@Override
protected double docValue() throws IOException {
return values.doubleValue();
}
};
}
};
}
}

View File

@ -22,15 +22,20 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
@ -52,27 +57,59 @@ public class FloatValuesComparatorSource extends IndexFieldData.XFieldComparator
return SortField.Type.FLOAT;
}
private NumericDoubleValues getNumericDocValues(LeafReaderContext context, float missingValue) throws IOException {
final SortedNumericDoubleValues values = indexFieldData.load(context).getDoubleValues();
if (nested == null) {
return FieldData.replaceMissing(sortMode.select(values), missingValue);
} else {
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE;
return sortMode.select(values, missingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren);
}
}
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName());
final float dMissingValue = (Float) missingObject(missingValue, reversed);
final float fMissingValue = (Float) missingObject(missingValue, reversed);
// NOTE: it's important to pass null as a missing value in the constructor so that
// the comparator doesn't check docsWithField since we replace missing values in select()
return new FieldComparator.FloatComparator(numHits, null, null) {
@Override
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
final SortedNumericDoubleValues values = indexFieldData.load(context).getDoubleValues();
final NumericDoubleValues selectedValues;
if (nested == null) {
selectedValues = FieldData.replaceMissing(sortMode.select(values), dMissingValue);
} else {
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE;
selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren);
}
return selectedValues.getRawFloatValues();
return FloatValuesComparatorSource.this.getNumericDocValues(context, fMissingValue).getRawFloatValues();
}
};
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) {
return new BucketedSort.ForFloats(bigArrays, sortOrder, format) {
private final float dMissingValue = (Float) missingObject(missingValue, sortOrder == SortOrder.DESC);
@Override
public boolean needsScores() { return false; }
@Override
public Leaf forLeaf(LeafReaderContext ctx) throws IOException {
return new Leaf() {
private final NumericDoubleValues values = getNumericDocValues(ctx, dMissingValue);
@Override
public void setScorer(Scorable scorer) {}
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
@Override
protected float docValue() throws IOException {
return (float) values.doubleValue();
}
};
}
};
}

View File

@ -26,12 +26,16 @@ import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.plain.SortedNumericDVIndexFieldData;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.function.Function;
@ -72,30 +76,54 @@ public class LongValuesComparatorSource extends IndexFieldData.XFieldComparatorS
}
return converter != null ? converter.apply(values) : values;
}
private NumericDocValues getNumericDocValues(LeafReaderContext context, long missingValue) throws IOException {
final SortedNumericDocValues values = loadDocValues(context);
if (nested == null) {
return FieldData.replaceMissing(sortMode.select(values), missingValue);
}
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE;
return sortMode.select(values, missingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren);
}
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
assert indexFieldData == null || fieldname.equals(indexFieldData.getFieldName());
final Long dMissingValue = (Long) missingObject(missingValue, reversed);
final long lMissingValue = (Long) missingObject(missingValue, reversed);
// NOTE: it's important to pass null as a missing value in the constructor so that
// the comparator doesn't check docsWithField since we replace missing values in select()
return new FieldComparator.LongComparator(numHits, null, null) {
@Override
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
final SortedNumericDocValues values = loadDocValues(context);
final NumericDocValues selectedValues;
if (nested == null) {
selectedValues = FieldData.replaceMissing(sortMode.select(values), dMissingValue);
} else {
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE;
selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc(), maxChildren);
}
return selectedValues;
return LongValuesComparatorSource.this.getNumericDocValues(context, lMissingValue);
}
};
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) {
return new BucketedSort.ForLongs(bigArrays, sortOrder, format) {
private final long lMissingValue = (Long) missingObject(missingValue, sortOrder == SortOrder.DESC);
@Override
public Leaf forLeaf(LeafReaderContext ctx) throws IOException {
return new Leaf() {
private final NumericDocValues values = getNumericDocValues(ctx, lMissingValue);
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
@Override
protected long docValue() throws IOException {
return values.longValue();
}
};
}
};
}
}

View File

@ -26,6 +26,7 @@ import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.SortField;
import org.apache.lucene.util.Accountable;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
@ -33,7 +34,10 @@ import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.N
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.io.UncheckedIOException;
@ -102,6 +106,12 @@ public final class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent i
throw new UnsupportedOperationException("no global ordinals sorting yet");
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("only supported on numeric fields");
}
@Override
public void clear() {}
@ -186,6 +196,12 @@ public final class GlobalOrdinalsIndexFieldData extends AbstractIndexComponent i
throw new UnsupportedOperationException("no global ordinals sorting yet");
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("only supported on numeric fields");
}
@Override
public void clear() {}

View File

@ -25,16 +25,21 @@ import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndexFieldData
implements IndexGeoPointFieldData {
@ -48,6 +53,12 @@ public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndex
throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance");
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance");
}
public static class LatLonPointDVIndexFieldData extends AbstractLatLonPointDVIndexFieldData {
public LatLonPointDVIndexFieldData(Index index, String fieldName) {
super(index, fieldName);

View File

@ -24,10 +24,15 @@ import org.apache.lucene.search.SortField;
import org.apache.lucene.search.SortedSetSortField;
import org.apache.lucene.search.SortedSetSelector;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
public class BinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData<BinaryDVAtomicFieldData> {
@ -64,4 +69,10 @@ public class BinaryDVIndexFieldData extends DocValuesIndexFieldData implements I
SortedSetSortField.STRING_LAST : SortedSetSortField.STRING_FIRST);
return sortField;
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("only supported on numeric fields");
}
}

View File

@ -23,6 +23,7 @@ import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexFieldData;
@ -31,7 +32,10 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
@ -46,6 +50,12 @@ public class BytesBinaryDVIndexFieldData extends DocValuesIndexFieldData impleme
throw new IllegalArgumentException("can't sort on binary field");
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("can't sort on binary field");
}
@Override
public BytesBinaryDVAtomicFieldData load(LeafReaderContext context) {
try {

View File

@ -28,18 +28,23 @@ import org.apache.lucene.search.SortField;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AbstractSortedDocValues;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.Collection;
@ -158,6 +163,12 @@ public class ConstantIndexFieldData extends AbstractIndexOrdinalsFieldData {
return new SortField(getFieldName(), source, reverse);
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("only supported on numeric fields");
}
@Override
public IndexOrdinalsFieldData loadGlobal(DirectoryReader indexReader) {
return this;

View File

@ -33,19 +33,24 @@ import org.apache.lucene.util.packed.PackedInts;
import org.apache.lucene.util.packed.PackedLongValues;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.RamAccountingTermsEnum;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
@ -84,6 +89,12 @@ public class PagedBytesIndexFieldData extends AbstractIndexOrdinalsFieldData {
return new SortField(getFieldName(), source, reverse);
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("only supported on numeric fields");
}
@Override
public AtomicOrdinalsFieldData loadDirect(LeafReaderContext context) throws Exception {
LeafReader reader = context.reader();

View File

@ -31,7 +31,9 @@ import org.apache.lucene.search.SortedNumericSelector;
import org.apache.lucene.search.SortedNumericSortField;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.time.DateUtils;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.AbstractSortedNumericDocValues;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
@ -43,7 +45,10 @@ import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.fieldcomparator.DoubleValuesComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.FloatValuesComparatorSource;
import org.elasticsearch.index.fielddata.fieldcomparator.LongValuesComparatorSource;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.Collection;
@ -72,42 +77,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple
*/
public SortField sortField(NumericType targetNumericType, Object missingValue, MultiValueMode sortMode,
Nested nested, boolean reverse) {
final XFieldComparatorSource source;
switch (targetNumericType) {
case HALF_FLOAT:
case FLOAT:
source = new FloatValuesComparatorSource(this, missingValue, sortMode, nested);
break;
case DOUBLE:
source = new DoubleValuesComparatorSource(this, missingValue, sortMode, nested);
break;
case DATE:
if (numericType == NumericType.DATE_NANOSECONDS) {
// converts date values to nanosecond resolution
source = new LongValuesComparatorSource(this, missingValue,
sortMode, nested, dvs -> convertNanosToMillis(dvs));
} else {
source = new LongValuesComparatorSource(this, missingValue, sortMode, nested);
}
break;
case DATE_NANOSECONDS:
if (numericType == NumericType.DATE) {
// converts date_nanos values to millisecond resolution
source = new LongValuesComparatorSource(this, missingValue,
sortMode, nested, dvs -> convertMillisToNanos(dvs));
} else {
source = new LongValuesComparatorSource(this, missingValue, sortMode, nested);
}
break;
default:
assert !targetNumericType.isFloatingPoint();
source = new LongValuesComparatorSource(this, missingValue, sortMode, nested);
break;
}
final XFieldComparatorSource source = comparatorSource(targetNumericType, missingValue, sortMode, nested);
/**
* Check if we can use a simple {@link SortedNumericSortField} compatible with index sorting and
@ -146,6 +116,49 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple
return sortField(numericType, missingValue, sortMode, nested, reverse);
}
/**
* Builds a {@linkplain BucketedSort} for the {@code targetNumericType},
* casting the values if their native type doesn't match.
*/
public BucketedSort newBucketedSort(NumericType targetNumericType, BigArrays bigArrays, @Nullable Object missingValue,
MultiValueMode sortMode, Nested nested, SortOrder sortOrder, DocValueFormat format) {
return comparatorSource(targetNumericType, missingValue, sortMode, nested).newBucketedSort(bigArrays, sortOrder, format);
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, @Nullable Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
return newBucketedSort(numericType, bigArrays, missingValue, sortMode, nested, sortOrder, format);
}
private XFieldComparatorSource comparatorSource(NumericType targetNumericType, @Nullable Object missingValue, MultiValueMode sortMode,
Nested nested) {
switch (targetNumericType) {
case HALF_FLOAT:
case FLOAT:
return new FloatValuesComparatorSource(this, missingValue, sortMode, nested);
case DOUBLE:
return new DoubleValuesComparatorSource(this, missingValue, sortMode, nested);
case DATE:
if (numericType == NumericType.DATE_NANOSECONDS) {
// converts date values to nanosecond resolution
return new LongValuesComparatorSource(this, missingValue,
sortMode, nested, dvs -> convertNanosToMillis(dvs));
}
return new LongValuesComparatorSource(this, missingValue, sortMode, nested);
case DATE_NANOSECONDS:
if (numericType == NumericType.DATE) {
// converts date_nanos values to millisecond resolution
return new LongValuesComparatorSource(this, missingValue,
sortMode, nested, dvs -> convertMillisToNanos(dvs));
}
return new LongValuesComparatorSource(this, missingValue, sortMode, nested);
default:
assert !targetNumericType.isFloatingPoint();
return new LongValuesComparatorSource(this, missingValue, sortMode, nested);
}
}
@Override
public NumericType getNumericType() {
return numericType;

View File

@ -30,6 +30,7 @@ import org.apache.lucene.search.SortedSetSelector;
import org.apache.lucene.search.SortedSetSortField;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
@ -40,7 +41,10 @@ import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparator
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsIndexFieldData;
import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsBuilder;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.function.Function;
@ -81,6 +85,12 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i
return sortField;
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("only supported on numeric fields");
}
@Override
public AtomicOrdinalsFieldData load(LeafReaderContext context) {
return new SortedSetDVBytesAtomicFieldData(context.reader(), fieldName, scriptFunction);

View File

@ -31,6 +31,7 @@ import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
@ -45,7 +46,10 @@ import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.Arrays;
@ -203,6 +207,12 @@ public class IdFieldMapper extends MetadataFieldMapper {
return new SortField(getFieldName(), source, reverse);
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode,
Nested nested, SortOrder sortOrder, DocValueFormat format) {
throw new UnsupportedOperationException("can't sort on the [" + CONTENT_TYPE + "] field");
}
@Override
public void clear() {
fieldData.clear();

View File

@ -263,6 +263,7 @@ import org.elasticsearch.search.sort.GeoDistanceSortBuilder;
import org.elasticsearch.search.sort.ScoreSortBuilder;
import org.elasticsearch.search.sort.ScriptSortBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortValue;
import org.elasticsearch.search.suggest.Suggest;
import org.elasticsearch.search.suggest.SuggestionBuilder;
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
@ -334,6 +335,7 @@ public class SearchModule {
registerSearchExts(plugins);
registerShapes();
registerIntervalsSourceProviders();
namedWriteables.addAll(SortValue.namedWriteables());
}
public List<NamedWriteableRegistry.Entry> getNamedWriteables() {

View File

@ -0,0 +1,372 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.common.util.BigArray;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.BitArray;
import org.elasticsearch.common.util.DoubleArray;
import org.elasticsearch.common.util.FloatArray;
import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.search.DocValueFormat;
import java.io.IOException;
/**
* Type specialized sort implementations designed for use in aggregations.
*/
public abstract class BucketedSort implements Releasable {
// TODO priority queue semantics to support multiple hits in the buckets
protected final BigArrays bigArrays;
private final SortOrder order;
private final DocValueFormat format;
public BucketedSort(BigArrays bigArrays, SortOrder order, DocValueFormat format) {
this.bigArrays = bigArrays;
this.order = order;
this.format = format;
}
/**
* The order of the sort.
*/
public final SortOrder getOrder() {
return order;
}
/**
* The format to use when presenting the values.
*/
public final DocValueFormat getFormat() {
return format;
}
/**
* Get the value for a bucket if it has been collected, null otherwise.
*/
public final SortValue getValue(long bucket) {
if (bucket >= buckets().size()) {
return null;
}
return getValueForBucket(bucket);
}
/**
* Get the {@linkplain Leaf} implementation that'll do that actual collecting.
*/
public abstract Leaf forLeaf(LeafReaderContext ctx) throws IOException;
/**
* Does this sort need scores? Most don't, but sorting on {@code _score} does.
*/
public abstract boolean needsScores();
/**
* The {@linkplain BigArray} backing this sort.
*/
protected abstract BigArray buckets();
/**
* Grow the {@linkplain BigArray} backing this sort to account for new buckets.
* This will only be called if the array is too small.
*/
protected abstract void grow(long minSize);
/**
* Get the value for a bucket. This will only be called if the bucket was collected.
*/
protected abstract SortValue getValueForBucket(long bucket);
/**
* Performs the actual collection against a {@linkplain LeafReaderContext}.
*/
public abstract class Leaf implements ScorerAware {
/**
* Collect this doc, returning {@code true} if it is competitive.
*/
public final boolean collectIfCompetitive(int doc, long bucket) throws IOException {
if (false == advanceExact(doc)) {
return false;
}
if (bucket >= buckets().size()) {
grow(bucket + 1);
setValue(bucket);
return true;
}
return setIfCompetitive(bucket);
}
/**
* Move the underlying data source reader to the doc and return
* {@code true} if there is data for the sort value.
*/
protected abstract boolean advanceExact(int doc) throws IOException;
/**
* Set the value for a particular bucket to the value that doc has for the sort.
* This is called when we're *sure* we haven't yet seen the bucket.
*/
protected abstract void setValue(long bucket) throws IOException;
/**
* If the value that doc has for the sort is competitive with the other values
* then set it. This is called for buckets we *might* have already seen. So
* implementers will have to check for "empty" buckets in their own way. The
* vaguery here is for two reasons:
* <ul>
* <li>When we see a bucket that won't fit in our arrays we oversize them so
* we don't have to grow them by 1 every time.</li>
* <li>Buckets don't always arrive in order and our storage is "dense" on the
* bucket ordinal. For example, we might get bucket number 4 grow the array
* to fit it, and *then* get bucket number 3.</li>
* </ul>
*/
protected abstract boolean setIfCompetitive(long bucket) throws IOException;
}
/**
* Superclass for implementations of {@linkplain BucketedSort} for {@code double} keys.
*/
public abstract static class ForDoubles extends BucketedSort {
private DoubleArray buckets = bigArrays.newDoubleArray(1, false);
public ForDoubles(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) {
super(bigArrays, sortOrder, format);
// NaN is a sentinel value for "unused"
buckets.set(0, Double.NaN);
}
@Override
public boolean needsScores() { return false; }
@Override
protected final BigArray buckets() { return buckets; }
@Override
protected final void grow(long minSize) {
long oldSize = buckets.size();
buckets = bigArrays.grow(buckets, minSize);
buckets.fill(oldSize, buckets.size(), Double.NaN);
}
@Override
public final SortValue getValueForBucket(long bucket) {
double val = buckets.get(bucket);
if (Double.isNaN(val)) {
return null;
}
return SortValue.from(val);
}
@Override
public final void close() {
buckets.close();
}
protected abstract class Leaf extends BucketedSort.Leaf {
protected abstract double docValue() throws IOException;
@Override
public final void setScorer(Scorable scorer) {}
@Override
protected final void setValue(long bucket) throws IOException {
buckets.set(bucket, docValue());
}
@Override
protected final boolean setIfCompetitive(long bucket) throws IOException {
double docSort = docValue();
double bestSort = buckets.get(bucket);
// The NaN check is important here because it needs to always lose.
if (false == Double.isNaN(bestSort) && getOrder().reverseMul() * Double.compare(bestSort, docSort) <= 0) {
return false;
}
buckets.set(bucket, docSort);
return true;
}
}
}
/**
* Superclass for implementations of {@linkplain BucketedSort} for {@code float} keys.
*/
public abstract static class ForFloats extends BucketedSort {
private FloatArray buckets = bigArrays.newFloatArray(1, false);
public ForFloats(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) {
super(bigArrays, sortOrder, format);
// NaN is a sentinel value for "unused"
buckets.set(0, Float.NaN);
}
@Override
protected final BigArray buckets() { return buckets; }
@Override
protected final void grow(long minSize) {
long oldSize = buckets.size();
buckets = bigArrays.grow(buckets, minSize);
buckets.fill(oldSize, buckets.size(), Float.NaN);
}
@Override
public final SortValue getValueForBucket(long bucket) {
float val = buckets.get(bucket);
if (Float.isNaN(val)) {
return null;
}
return SortValue.from(val);
}
@Override
public final void close() {
buckets.close();
}
protected abstract class Leaf extends BucketedSort.Leaf {
protected abstract float docValue() throws IOException;
@Override
protected final void setValue(long bucket) throws IOException {
buckets.set(bucket, docValue());
}
@Override
protected final boolean setIfCompetitive(long bucket) throws IOException {
float docSort = docValue();
float bestSort = buckets.get(bucket);
// The NaN check is important here because it needs to always lose.
if (false == Float.isNaN(bestSort) && getOrder().reverseMul() * Float.compare(bestSort, docSort) <= 0) {
return false;
}
buckets.set(bucket, docSort);
return true;
}
}
}
/**
* Superclass for implementations of {@linkplain BucketedSort} for {@code long} keys.
*/
public abstract static class ForLongs extends BucketedSort {
/**
* Tracks which buckets have been seen before so we can *always*
* set the value in that case. We need this because there isn't a
* sentinel value in the {@code long} type that we can use for this
* like NaN in {@code double} or {@code float}.
*/
private BitArray seen = new BitArray(1, bigArrays);
/**
* The actual values.
*/
private LongArray buckets = bigArrays.newLongArray(1, false);
private long maxBucket = -1;
public ForLongs(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) {
super(bigArrays, sortOrder, format);
}
@Override
public boolean needsScores() { return false; }
@Override
protected final BigArray buckets() { return buckets; }
@Override
protected final void grow(long minSize) {
buckets = bigArrays.grow(buckets, minSize);
}
@Override
public final SortValue getValueForBucket(long bucket) {
if (bucket > Integer.MAX_VALUE) {
/* We throw exceptions if we try to collect buckets bigger
* than an int so we *can't* have seen any of these. */
return null;
}
if (bucket > maxBucket) {
return null;
}
if (false == seen.get((int) bucket)) {
/* Buckets we haven't seen must be null here so we can
* skip "gaps" in seen buckets. */
return null;
}
return SortValue.from(buckets.get(bucket));
}
@Override
public final void close() {
Releasables.close(seen, buckets);
}
protected abstract class Leaf extends BucketedSort.Leaf {
protected abstract long docValue() throws IOException;
@Override
public final void setScorer(Scorable scorer) {}
@Override
protected final void setValue(long bucket) throws IOException {
seen.set(bucketIsInt(bucket));
buckets.set(bucket, docValue());
maxBucket = Math.max(bucket, maxBucket);
}
@Override
protected final boolean setIfCompetitive(long bucket) throws IOException {
long docSort = docValue();
int intBucket = bucketIsInt(bucket);
if (bucket > maxBucket) {
seen.set(intBucket);
buckets.set(bucket, docSort);
maxBucket = bucket;
return true;
}
if (false == seen.get(intBucket)) {
seen.set(intBucket);
buckets.set(bucket, docSort);
return true;
}
long bestSort = buckets.get(bucket);
if (getOrder().reverseMul() * Double.compare(bestSort, docSort) <= 0) {
return false;
}
buckets.set(bucket, docSort);
return true;
}
private int bucketIsInt(long bucket) {
if (bucket > Integer.MAX_VALUE) {
throw new UnsupportedOperationException("Long sort keys don't support more than [" + Integer.MAX_VALUE + "] buckets");
// I don't feel too bad about that because it'd take about 16 GB of memory....
}
return (int) bucket;
}
}
}
}

View File

@ -387,69 +387,101 @@ public class FieldSortBuilder extends SortBuilder<FieldSortBuilder> {
@Override
public SortFieldAndFormat build(QueryShardContext context) throws IOException {
if (DOC_FIELD_NAME.equals(fieldName)) {
if (order == SortOrder.DESC) {
return SORT_DOC_REVERSE;
} else {
return SORT_DOC;
}
} else {
boolean isUnmapped = false;
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
isUnmapped = true;
if (unmappedType != null) {
fieldType = context.getMapperService().unmappedFieldType(unmappedType);
} else {
throw new QueryShardException(context, "No mapping found for [" + fieldName + "] in order to sort on");
}
}
MultiValueMode localSortMode = null;
if (sortMode != null) {
localSortMode = MultiValueMode.fromString(sortMode.toString());
}
boolean reverse = (order == SortOrder.DESC);
if (localSortMode == null) {
localSortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
}
Nested nested = null;
if (isUnmapped == false) {
if (nestedSort != null) {
if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) {
throw new QueryShardException(context,
"max_children is only supported on v6.5.0 or higher");
}
if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) {
throw new QueryShardException(context,
"max_children is only supported on last level of nested sort");
}
validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort);
nested = resolveNested(context, nestedSort);
} else {
nested = resolveNested(context, nestedPath, nestedFilter);
}
}
IndexFieldData<?> fieldData = context.getForField(fieldType);
if (fieldData instanceof IndexNumericFieldData == false
&& (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) {
throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields");
}
final SortField field;
if (numericType != null) {
if (fieldData instanceof IndexNumericFieldData == false) {
throw new QueryShardException(context,
"[numeric_type] option cannot be set on a non-numeric field, got " + fieldType.typeName());
}
SortedNumericDVIndexFieldData numericFieldData = (SortedNumericDVIndexFieldData) fieldData;
NumericType resolvedType = resolveNumericType(numericType);
field = numericFieldData.sortField(resolvedType, missing, localSortMode, nested, reverse);
} else {
field = fieldData.sortField(missing, localSortMode, nested, reverse);
}
return new SortFieldAndFormat(field, fieldType.docValueFormat(null, null));
return order == SortOrder.DESC ? SORT_DOC_REVERSE : SORT_DOC;
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
Nested nested = nested(context, fieldType);
if (fieldType == null) {
fieldType = resolveUnmappedType(context);
}
boolean reverse = order == SortOrder.DESC;
IndexFieldData<?> fieldData = context.getForField(fieldType);
if (fieldData instanceof IndexNumericFieldData == false
&& (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) {
throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields");
}
final SortField field;
if (numericType != null) {
if (fieldData instanceof IndexNumericFieldData == false) {
throw new QueryShardException(context,
"[numeric_type] option cannot be set on a non-numeric field, got " + fieldType.typeName());
}
SortedNumericDVIndexFieldData numericFieldData = (SortedNumericDVIndexFieldData) fieldData;
NumericType resolvedType = resolveNumericType(numericType);
field = numericFieldData.sortField(resolvedType, missing, localSortMode(), nested, reverse);
} else {
field = fieldData.sortField(missing, localSortMode(), nested, reverse);
}
return new SortFieldAndFormat(field, fieldType.docValueFormat(null, null));
}
@Override
public BucketedSort buildBucketedSort(QueryShardContext context) throws IOException {
if (DOC_FIELD_NAME.equals(fieldName)) {
throw new IllegalArgumentException("sorting by _doc is not supported");
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
Nested nested = nested(context, fieldType);
if (fieldType == null) {
fieldType = resolveUnmappedType(context);
}
IndexFieldData<?> fieldData = context.getForField(fieldType);
if (fieldData instanceof IndexNumericFieldData == false
&& (sortMode == SortMode.SUM || sortMode == SortMode.AVG || sortMode == SortMode.MEDIAN)) {
throw new QueryShardException(context, "we only support AVG, MEDIAN and SUM on number based fields");
}
if (numericType != null) {
SortedNumericDVIndexFieldData numericFieldData = (SortedNumericDVIndexFieldData) fieldData;
NumericType resolvedType = resolveNumericType(numericType);
return numericFieldData.newBucketedSort(resolvedType, context.bigArrays(), missing, localSortMode(), nested, order,
fieldType.docValueFormat(null, null));
}
try {
return fieldData.newBucketedSort(context.bigArrays(), missing, localSortMode(), nested, order,
fieldType.docValueFormat(null, null));
} catch (IllegalArgumentException e) {
throw new IllegalArgumentException("error building sort for field [" + fieldName + "] of type ["
+ fieldType.typeName() + "] in index [" + context.index().getName() + "]: " + e.getMessage(), e);
}
}
private MappedFieldType resolveUnmappedType(QueryShardContext context) {
if (unmappedType == null) {
throw new QueryShardException(context, "No mapping found for [" + fieldName + "] in order to sort on");
}
return context.getMapperService().unmappedFieldType(unmappedType);
}
private MultiValueMode localSortMode() {
if (sortMode != null) {
return MultiValueMode.fromString(sortMode.toString());
}
return order == SortOrder.DESC ? MultiValueMode.MAX : MultiValueMode.MIN;
}
private Nested nested(QueryShardContext context, MappedFieldType fieldType) throws IOException {
if (fieldType == null) {
return null;
}
// If we have a nestedSort we'll use that. Otherwise, use old style.
if (nestedSort == null) {
return resolveNested(context, nestedPath, nestedFilter);
}
if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) {
throw new QueryShardException(context,
"max_children is only supported on v6.5.0 or higher");
}
if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) {
throw new QueryShardException(context,
"max_children is only supported on last level of nested sort");
}
validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort);
return resolveNested(context, nestedSort);
}
/**

View File

@ -38,6 +38,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
@ -575,7 +576,42 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
@Override
public SortFieldAndFormat build(QueryShardContext context) throws IOException {
GeoPoint[] localPoints = localPoints();
boolean reverse = order == SortOrder.DESC;
MultiValueMode localSortMode = localSortMode();
IndexGeoPointFieldData geoIndexFieldData = fieldData(context);
Nested nested = nested(context);
if (geoIndexFieldData.getClass() == LatLonPointDVIndexFieldData.class // only works with 5.x geo_point
&& nested == null
&& localSortMode == MultiValueMode.MIN // LatLonDocValuesField internally picks the closest point
&& unit == DistanceUnit.METERS
&& reverse == false
&& localPoints.length == 1) {
return new SortFieldAndFormat(
LatLonDocValuesField.newDistanceSort(fieldName, localPoints[0].lat(), localPoints[0].lon()),
DocValueFormat.RAW);
}
return new SortFieldAndFormat(
new SortField(fieldName, comparatorSource(localPoints, localSortMode, geoIndexFieldData, nested), reverse),
DocValueFormat.RAW);
}
@Override
public BucketedSort buildBucketedSort(QueryShardContext context) throws IOException {
GeoPoint[] localPoints = localPoints();
MultiValueMode localSortMode = localSortMode();
IndexGeoPointFieldData geoIndexFieldData = fieldData(context);
Nested nested = nested(context);
// TODO implement the single point optimization above
return comparatorSource(localPoints, localSortMode, geoIndexFieldData, nested)
.newBucketedSort(context.bigArrays(), order, DocValueFormat.RAW);
}
private GeoPoint[] localPoints() {
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed
// on 2.x created indexes
GeoPoint[] localPoints = points.toArray(new GeoPoint[points.size()]);
@ -601,15 +637,19 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
GeoUtils.normalizePoint(point, true, true);
}
}
return localPoints;
}
boolean reverse = (order == SortOrder.DESC);
final MultiValueMode finalSortMode;
if (sortMode == null) {
finalSortMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
} else {
finalSortMode = MultiValueMode.fromString(sortMode.toString());
private MultiValueMode localSortMode() {
// TODO this lines up with FieldSortBuilder. Share?
if (sortMode != null) {
return MultiValueMode.fromString(sortMode.toString());
}
return order == SortOrder.DESC ? MultiValueMode.MAX : MultiValueMode.MIN;
}
private IndexGeoPointFieldData fieldData(QueryShardContext context) {
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
if (ignoreUnmapped) {
@ -618,71 +658,80 @@ public class GeoDistanceSortBuilder extends SortBuilder<GeoDistanceSortBuilder>
throw new IllegalArgumentException("failed to find mapper for [" + fieldName + "] for geo distance based sort");
}
}
final IndexGeoPointFieldData geoIndexFieldData = context.getForField(fieldType);
return context.getForField(fieldType);
}
final Nested nested;
if (nestedSort != null) {
if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) {
throw new QueryShardException(context,
"max_children is only supported on v6.5.0 or higher");
}
if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) {
throw new QueryShardException(context,
"max_children is only supported on last level of nested sort");
}
// new nested sorts takes priority
validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort);
nested = resolveNested(context, nestedSort);
} else {
nested = resolveNested(context, nestedPath, nestedFilter);
private Nested nested(QueryShardContext context) throws IOException {
// If we have a nestedSort we'll use that. Otherwise, use old style.
if (nestedSort == null) {
return resolveNested(context, nestedPath, nestedFilter);
}
if (geoIndexFieldData.getClass() == LatLonPointDVIndexFieldData.class // only works with 5.x geo_point
&& nested == null
&& finalSortMode == MultiValueMode.MIN // LatLonDocValuesField internally picks the closest point
&& unit == DistanceUnit.METERS
&& reverse == false
&& localPoints.length == 1) {
return new SortFieldAndFormat(
LatLonDocValuesField.newDistanceSort(fieldName, localPoints[0].lat(), localPoints[0].lon()),
DocValueFormat.RAW);
if (context.indexVersionCreated().before(Version.V_6_5_0) && nestedSort.getMaxChildren() != Integer.MAX_VALUE) {
throw new QueryShardException(context,
"max_children is only supported on v6.5.0 or higher");
}
if (nestedSort.getNestedSort() != null && nestedSort.getMaxChildren() != Integer.MAX_VALUE) {
throw new QueryShardException(context,
"max_children is only supported on last level of nested sort");
}
validateMaxChildrenExistOnlyInTopLevelNestedSort(context, nestedSort);
return resolveNested(context, nestedSort);
}
IndexFieldData.XFieldComparatorSource geoDistanceComparatorSource = new IndexFieldData.XFieldComparatorSource(null, finalSortMode,
nested) {
private IndexFieldData.XFieldComparatorSource comparatorSource(GeoPoint[] localPoints, MultiValueMode localSortMode,
IndexGeoPointFieldData geoIndexFieldData, Nested nested) {
return new IndexFieldData.XFieldComparatorSource(null, localSortMode, nested) {
@Override
public SortField.Type reducedType() {
return SortField.Type.DOUBLE;
}
private NumericDoubleValues getNumericDoubleValues(LeafReaderContext context) throws IOException {
final MultiGeoPointValues geoPointValues = geoIndexFieldData.load(context).getGeoPointValues();
final SortedNumericDoubleValues distanceValues = GeoUtils.distanceValues(geoDistance, unit, geoPointValues, localPoints);
if (nested == null) {
return FieldData.replaceMissing(sortMode.select(distanceValues), Double.POSITIVE_INFINITY);
} else {
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
final int maxChildren = nested.getNestedSort() != null ? nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE;
return localSortMode.select(distanceValues, Double.POSITIVE_INFINITY, rootDocs, innerDocs,
context.reader().maxDoc(), maxChildren);
}
}
@Override
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
return new FieldComparator.DoubleComparator(numHits, null, null) {
@Override
protected NumericDocValues getNumericDocValues(LeafReaderContext context, String field) throws IOException {
final MultiGeoPointValues geoPointValues = geoIndexFieldData.load(context).getGeoPointValues();
final SortedNumericDoubleValues distanceValues = GeoUtils.distanceValues(geoDistance, unit, geoPointValues,
localPoints);
final NumericDoubleValues selectedValues;
if (nested == null) {
selectedValues = FieldData.replaceMissing(finalSortMode.select(distanceValues), Double.POSITIVE_INFINITY);
} else {
final BitSet rootDocs = nested.rootDocs(context);
final DocIdSetIterator innerDocs = nested.innerDocs(context);
final int maxChildren = nested.getNestedSort() != null ?
nested.getNestedSort().getMaxChildren() : Integer.MAX_VALUE;
selectedValues = finalSortMode.select(distanceValues, Double.POSITIVE_INFINITY, rootDocs, innerDocs,
context.reader().maxDoc(), maxChildren);
}
return selectedValues.getRawDoubleValues();
return getNumericDoubleValues(context).getRawDoubleValues();
}
};
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) {
return new BucketedSort.ForDoubles(bigArrays, sortOrder, format) {
@Override
public Leaf forLeaf(LeafReaderContext ctx) throws IOException {
return new Leaf() {
private final NumericDoubleValues values = getNumericDoubleValues(ctx);
@Override
protected boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
@Override
protected double docValue() throws IOException {
return values.doubleValue();
}
};
}
};
}
};
return new SortFieldAndFormat(new SortField(fieldName, geoDistanceComparatorSource, reverse),
DocValueFormat.RAW);
}
static void parseGeoPoints(XContentParser parser, List<GeoPoint> geoPoints) throws IOException {

View File

@ -19,6 +19,8 @@
package org.elasticsearch.search.sort;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -101,6 +103,39 @@ public class ScoreSortBuilder extends SortBuilder<ScoreSortBuilder> {
}
}
@Override
public BucketedSort buildBucketedSort(QueryShardContext context) throws IOException {
return new BucketedSort.ForFloats(context.bigArrays(), order, DocValueFormat.RAW) {
@Override
public boolean needsScores() { return true; }
@Override
public Leaf forLeaf(LeafReaderContext ctx) throws IOException {
return new BucketedSort.ForFloats.Leaf() {
private Scorable scorer;
@Override
public void setScorer(Scorable scorer) {
this.scorer = scorer;
}
@Override
protected boolean advanceExact(int doc) throws IOException {
assert doc == scorer.docID() : "expected scorer to be on [" + doc + "] but was on [" + scorer.docID() + "]";
/* We will never be called by documents that don't match the
* query and they'll all have a score, thus `true`. */
return true;
}
@Override
protected float docValue() throws IOException {
return scorer.score();
}
};
}
};
}
@Override
public boolean equals(Object object) {
if (this == object) {

View File

@ -32,6 +32,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -49,8 +50,8 @@ import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.NumberSortScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.StringSortScript;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
@ -306,13 +307,23 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
@Override
public SortFieldAndFormat build(QueryShardContext context) throws IOException {
return new SortFieldAndFormat(
new SortField("_script", fieldComparatorSource(context), order == SortOrder.DESC),
DocValueFormat.RAW);
}
@Override
public BucketedSort buildBucketedSort(QueryShardContext context) throws IOException {
return fieldComparatorSource(context).newBucketedSort(context.bigArrays(), order, DocValueFormat.RAW);
}
private IndexFieldData.XFieldComparatorSource fieldComparatorSource(QueryShardContext context) throws IOException {
MultiValueMode valueMode = null;
if (sortMode != null) {
valueMode = MultiValueMode.fromString(sortMode.toString());
}
boolean reverse = (order == SortOrder.DESC);
if (valueMode == null) {
valueMode = reverse ? MultiValueMode.MAX : MultiValueMode.MIN;
valueMode = order == SortOrder.DESC ? MultiValueMode.MAX : MultiValueMode.MIN;
}
final Nested nested;
@ -332,12 +343,11 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
nested = resolveNested(context, nestedPath, nestedFilter);
}
final IndexFieldData.XFieldComparatorSource fieldComparatorSource;
switch (type) {
case STRING:
final StringSortScript.Factory factory = context.compile(script, StringSortScript.CONTEXT);
final StringSortScript.LeafFactory searchScript = factory.newFactory(script.getParams(), context.lookup());
fieldComparatorSource = new BytesRefFieldComparatorSource(null, null, valueMode, nested) {
return new BytesRefFieldComparatorSource(null, null, valueMode, nested) {
StringSortScript leafScript;
@Override
protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException {
@ -361,12 +371,17 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
protected void setScorer(Scorable scorer) {
leafScript.setScorer(scorer);
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("error building sort for [_script]: "
+ "script sorting only supported on [numeric] scripts but was [" + type + "]");
}
};
break;
case NUMBER:
final NumberSortScript.Factory numberSortFactory = context.compile(script, NumberSortScript.CONTEXT);
final NumberSortScript.LeafFactory numberSortScript = numberSortFactory.newFactory(script.getParams(), context.lookup());
fieldComparatorSource = new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) {
return new DoubleValuesComparatorSource(null, Double.MAX_VALUE, valueMode, nested) {
NumberSortScript leafScript;
@Override
protected SortedNumericDoubleValues getValues(LeafReaderContext context) throws IOException {
@ -389,12 +404,9 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
leafScript.setScorer(scorer);
}
};
break;
default:
throw new QueryShardException(context, "custom script sort type [" + type + "] not supported");
}
return new SortFieldAndFormat(new SortField("_script", fieldComparatorSource, reverse), DocValueFormat.RAW);
}
@Override

View File

@ -70,10 +70,15 @@ public abstract class SortBuilder<T extends SortBuilder<T>> implements NamedWrit
}
/**
* Create a @link {@link SortFieldAndFormat} from this builder.
* Create a {@linkplain SortFieldAndFormat} from this builder.
*/
protected abstract SortFieldAndFormat build(QueryShardContext context) throws IOException;
/**
* Create a {@linkplain BucketedSort} which is useful for sorting inside of aggregations.
*/
public abstract BucketedSort buildBucketedSort(QueryShardContext context) throws IOException;
/**
* Set the order of sorting.
*/

View File

@ -40,6 +40,11 @@ public enum SortOrder implements Writeable {
public String toString() {
return "asc";
}
@Override
public int reverseMul() {
return 1;
}
},
/**
* Descending order.
@ -49,6 +54,11 @@ public enum SortOrder implements Writeable {
public String toString() {
return "desc";
}
@Override
public int reverseMul() {
return -1;
}
};
public static SortOrder readFromStream(StreamInput in) throws IOException {
@ -63,4 +73,9 @@ public enum SortOrder implements Writeable {
public static SortOrder fromString(String op) {
return valueOf(op.toUpperCase(Locale.ROOT));
}
/**
* -1 if the sort is reversed from the standard comparators, 1 otherwise.
*/
public abstract int reverseMul();
}

View File

@ -0,0 +1,247 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.DocValueFormat;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
/**
* A {@link Comparable}, {@link DocValueFormat} aware wrapper around a sort value.
*/
public abstract class SortValue implements NamedWriteable, Comparable<SortValue> {
/**
* Get a {@linkplain SortValue} for a double.
*/
public static SortValue from(double d) {
return new DoubleSortValue(d);
}
/**
* Get a {@linkplain SortValue} for a long.
*/
public static SortValue from(long l) {
return new LongSortValue(l);
}
/**
* Get the list of {@linkplain NamedWriteable}s that this class needs.
*/
public static List<NamedWriteableRegistry.Entry> namedWriteables() {
return Arrays.asList(
new NamedWriteableRegistry.Entry(SortValue.class, DoubleSortValue.NAME, DoubleSortValue::new),
new NamedWriteableRegistry.Entry(SortValue.class, LongSortValue.NAME, LongSortValue::new));
}
private SortValue() {
// All subclasses of this class are defined in this file.
}
@Override
public final int compareTo(SortValue other) {
/*
* It might make sense to try and compare doubles to longs
* *carefully* to get a real sort. but it might not. For now
* we sort all doubles before all longs.
*/
int typeCompare = getWriteableName().compareTo(other.getWriteableName());
if (typeCompare != 0) {
return typeCompare;
}
return compareToSameType(other);
}
/**
* Write the key as xcontent.
*/
public final XContentBuilder toXContent(XContentBuilder builder, DocValueFormat format) throws IOException {
if (format == DocValueFormat.RAW) {
return rawToXContent(builder);
}
return builder.value(format(format));
}
/**
* The java object representing the sort value.
*/
public abstract Object getKey();
/**
* Format this value using the provided format.
*/
public abstract String format(DocValueFormat format);
/**
* Write the key as xcontent using the most native type possible.
*/
protected abstract XContentBuilder rawToXContent(XContentBuilder builder) throws IOException;
/**
* Compare this sort value to another sort value of the same type.
*/
protected abstract int compareToSameType(SortValue obj);
// Force implementations to override equals for consistency with compareToSameType
@Override
public abstract boolean equals(Object obj);
// Force implementations to override hashCode for consistency with equals
@Override
public abstract int hashCode();
// Force implementations to override toString so debugging isn't a nightmare.
@Override
public abstract String toString();
private static class DoubleSortValue extends SortValue {
public static final String NAME = "double";
private final double key;
private DoubleSortValue(double key) {
this.key = key;
}
private DoubleSortValue(StreamInput in) throws IOException {
this.key = in.readDouble();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeDouble(key);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public Object getKey() {
return key;
}
@Override
public String format(DocValueFormat format) {
return format.format(key).toString();
}
@Override
protected XContentBuilder rawToXContent(XContentBuilder builder) throws IOException {
return builder.value(key);
}
@Override
protected int compareToSameType(SortValue obj) {
DoubleSortValue other = (DoubleSortValue) obj;
return Double.compare(key, other.key);
}
@Override
public boolean equals(Object obj) {
if (obj == null || false == getClass().equals(obj.getClass())) {
return false;
}
DoubleSortValue other = (DoubleSortValue) obj;
return key == other.key;
}
@Override
public int hashCode() {
return Double.hashCode(key);
}
@Override
public String toString() {
return Double.toString(key);
}
}
private static class LongSortValue extends SortValue {
public static final String NAME = "long";
private final long key;
LongSortValue(long key) {
this.key = key;
}
LongSortValue(StreamInput in) throws IOException {
key = in.readLong();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeLong(key);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
public Object getKey() {
return key;
}
@Override
public String format(DocValueFormat format) {
return format.format(key).toString();
}
@Override
protected XContentBuilder rawToXContent(XContentBuilder builder) throws IOException {
return builder.value(key);
}
@Override
protected int compareToSameType(SortValue obj) {
LongSortValue other = (LongSortValue) obj;
return Long.compare(key, other.key);
}
@Override
public boolean equals(Object obj) {
if (obj == null || false == getClass().equals(obj.getClass())) {
return false;
}
LongSortValue other = (LongSortValue) obj;
return key == other.key;
}
@Override
public int hashCode() {
return Long.hashCode(key);
}
@Override
public String toString() {
return Long.toString(key);
}
}
}

View File

@ -22,10 +22,14 @@ package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
/** Returns an implementation based on paged bytes which doesn't implement WithOrdinals in order to visit different paths in the code,
* eg. BytesRefFieldComparatorSource makes decisions based on whether the field data implements WithOrdinals. */
@ -60,6 +64,12 @@ public class NoOrdinalsStringFieldDataTests extends PagedBytesStringFieldDataTes
return new SortField(getFieldName(), source, reverse);
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new UnsupportedOperationException();
}
@Override
public void clear() {
in.clear();

View File

@ -48,6 +48,7 @@ import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery.FilterScoreFunction;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery.ScoreMode;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.lucene.search.function.LeafScoreFunction;
import org.elasticsearch.common.lucene.search.function.RandomScoreFunction;
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
@ -60,7 +61,11 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
@ -144,6 +149,12 @@ public class FunctionScoreTests extends ESTestCase {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public void clear() {
throw new UnsupportedOperationException(UNSUPPORTED);
@ -235,6 +246,12 @@ public class FunctionScoreTests extends ESTestCase {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public void clear() {
throw new UnsupportedOperationException(UNSUPPORTED);

View File

@ -28,13 +28,17 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -281,6 +285,11 @@ public class SearchAfterBuilderTests extends ESTestCase {
public FieldComparator<?> newComparator(String fieldname, int numHits, int sortPos, boolean reversed) {
return null;
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, SortOrder sortOrder, DocValueFormat format) {
return null;
}
};
type = extractSortType(new SortField("field", source));

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.search.DocValueFormat;
import java.io.IOException;
public class BucketedSortForDoublesTests extends BucketedSortTestCase<BucketedSort.ForDoubles> {
@Override
public BucketedSort.ForDoubles build(SortOrder sortOrder, DocValueFormat format, double[] values) {
return new BucketedSort.ForDoubles(bigArrays(), sortOrder, format) {
@Override
public Leaf forLeaf(LeafReaderContext ctx) throws IOException {
return new Leaf() {
int index = -1;
@Override
protected boolean advanceExact(int doc) throws IOException {
index = doc;
return doc < values.length;
}
@Override
protected double docValue() throws IOException {
return values[index];
}
};
}
};
}
@Override
protected SortValue expectedSortValue(double v) {
return SortValue.from(v);
}
}

View File

@ -0,0 +1,131 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorable;
import org.elasticsearch.search.DocValueFormat;
import java.io.IOException;
public class BucketedSortForFloatsTests extends BucketedSortTestCase<BucketedSort.ForFloats> {
@Override
public BucketedSort.ForFloats build(SortOrder sortOrder, DocValueFormat format, double[] values) {
return new BucketedSort.ForFloats(bigArrays(), sortOrder, format) {
@Override
public boolean needsScores() {
return false;
}
@Override
public Leaf forLeaf(LeafReaderContext ctx) throws IOException {
return new Leaf() {
int index = -1;
@Override
protected boolean advanceExact(int doc) throws IOException {
index = doc;
return doc < values.length;
}
@Override
protected float docValue() throws IOException {
return (float) values[index];
}
@Override
public void setScorer(Scorable scorer) {}
};
}
};
}
private BucketedSort.ForFloats buildForScores(SortOrder sortOrder, DocValueFormat format) {
return new BucketedSort.ForFloats(bigArrays(), sortOrder, format) {
@Override
public Leaf forLeaf(LeafReaderContext ctx) throws IOException {
return new Leaf() {
Scorable scorer;
@Override
public void setScorer(Scorable scorer) {
this.scorer = scorer;
}
@Override
protected boolean advanceExact(int doc) throws IOException {
return scorer.docID() == doc;
}
@Override
protected float docValue() throws IOException {
return scorer.score();
}
};
}
@Override
public boolean needsScores() {
return true;
}
};
}
@Override
protected SortValue expectedSortValue(double v) {
return SortValue.from(v);
}
public void testScorer() throws IOException {
try (BucketedSort.ForFloats sort = buildForScores(SortOrder.DESC, DocValueFormat.RAW)) {
assertTrue(sort.needsScores());
BucketedSort.Leaf leaf = sort.forLeaf(null);
MockScorable scorer = new MockScorable();
leaf.setScorer(scorer);
scorer.doc = 1;
scorer.score = 10;
assertFalse(leaf.collectIfCompetitive(0, 0));
assertTrue(leaf.collectIfCompetitive(1, 0));
assertEquals(sort.getValue(0), SortValue.from(10.0));
scorer.doc = 2;
scorer.score = 1;
assertFalse(leaf.collectIfCompetitive(2, 0));
assertEquals(sort.getValue(0), SortValue.from(10.0));
}
}
private class MockScorable extends Scorable {
private int doc;
private float score;
@Override
public float score() throws IOException {
return score;
}
@Override
public int docID() {
return doc;
}
}
}

View File

@ -0,0 +1,55 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import org.apache.lucene.index.LeafReaderContext;
import org.elasticsearch.search.DocValueFormat;
import java.io.IOException;
public class BucketedSortForLongsTests extends BucketedSortTestCase<BucketedSort.ForLongs> {
@Override
public BucketedSort.ForLongs build(SortOrder sortOrder, DocValueFormat format, double[] values) {
return new BucketedSort.ForLongs(bigArrays(), sortOrder, format) {
@Override
public Leaf forLeaf(LeafReaderContext ctx) throws IOException {
return new Leaf() {
int index = -1;
@Override
protected boolean advanceExact(int doc) throws IOException {
index = doc;
return doc < values.length;
}
@Override
protected long docValue() throws IOException {
return (long) values[index];
}
};
}
};
}
@Override
protected SortValue expectedSortValue(double v) {
return SortValue.from((long) v);
}
}

View File

@ -0,0 +1,178 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.MockBigArrays;
import org.elasticsearch.common.util.MockPageCacheRecycler;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public abstract class BucketedSortTestCase<T extends BucketedSort> extends ESTestCase {
/**
* Build a {@link BucketedSort} to test. Sorts built by this method shouldn't need scores.
* @param values values to test, always sent as doubles just to have
* numbers to test. subclasses should cast to their favorite types
*/
protected abstract T build(SortOrder sortOrder, DocValueFormat format, double[] values);
/**
* Build the expected sort value for a value.
*/
protected abstract SortValue expectedSortValue(double v);
private T build(SortOrder order, double[] values) {
DocValueFormat format = randomFrom(DocValueFormat.RAW, DocValueFormat.BINARY, DocValueFormat.BOOLEAN);
return build(order, format, values);
}
public final void testNeverCalled() {
SortOrder order = randomFrom(SortOrder.values());
DocValueFormat format = randomFrom(DocValueFormat.RAW, DocValueFormat.BINARY, DocValueFormat.BOOLEAN);
try (T sort = build(order, format, new double[] {})) {
assertThat(sort.getOrder(), equalTo(order));
assertThat(sort.getFormat(), equalTo(format));
assertThat(sort.getValue(randomNonNegativeLong()), nullValue());
assertFalse(sort.needsScores());
}
}
public final void testEmptyLeaf() throws IOException {
try (T sort = build(randomFrom(SortOrder.values()), new double[] {})) {
BucketedSort.Leaf leaf = sort.forLeaf(null);
assertFalse(leaf.advanceExact(0));
assertThat(sort.getValue(randomNonNegativeLong()), nullValue());
}
}
public final void testSingleDoc() throws IOException {
try (T sort = build(randomFrom(SortOrder.values()), new double[] {1})) {
BucketedSort.Leaf leaf = sort.forLeaf(null);
assertTrue(leaf.collectIfCompetitive(0, 0));
assertThat(sort.getValue(0), equalTo(expectedSortValue(1)));
}
}
public void testNonCompetitive() throws IOException {
try (T sort = build(SortOrder.DESC, new double[] {2, 1})) {
BucketedSort.Leaf leaf = sort.forLeaf(null);
assertTrue(leaf.collectIfCompetitive(0, 0));
assertFalse(leaf.collectIfCompetitive(1, 0));
assertThat(sort.getValue(0), equalTo(expectedSortValue(2)));
}
}
public void testCompetitive() throws IOException {
try (T sort = build(SortOrder.DESC, new double[] {1, 2})) {
BucketedSort.Leaf leaf = sort.forLeaf(null);
assertTrue(leaf.collectIfCompetitive(0, 0));
assertTrue(leaf.collectIfCompetitive(1, 0));
assertThat(sort.getValue(0), equalTo(expectedSortValue(2)));
}
}
public void testNegativeValue() throws IOException {
try (T sort = build(SortOrder.DESC, new double[] {-1})) {
BucketedSort.Leaf leaf = sort.forLeaf(null);
assertTrue(leaf.collectIfCompetitive(0, 0));
assertThat(sort.getValue(0), equalTo(expectedSortValue(-1)));
}
}
public void testSomeBuckets() throws IOException {
try (T sort = build(SortOrder.DESC, new double[] {2, 3})) {
BucketedSort.Leaf leaf = sort.forLeaf(null);
assertTrue(leaf.collectIfCompetitive(0, 0));
assertTrue(leaf.collectIfCompetitive(0, 1));
assertTrue(leaf.collectIfCompetitive(0, 2));
assertTrue(leaf.collectIfCompetitive(1, 0));
assertThat(sort.getValue(0), equalTo(expectedSortValue(3)));
assertThat(sort.getValue(1), equalTo(expectedSortValue(2)));
assertThat(sort.getValue(2), equalTo(expectedSortValue(2)));
assertThat(sort.getValue(3), nullValue());
}
}
public void testBucketGaps() throws IOException {
try (T sort = build(SortOrder.DESC, new double[] {2})) {
BucketedSort.Leaf leaf = sort.forLeaf(null);
assertTrue(leaf.collectIfCompetitive(0, 0));
assertTrue(leaf.collectIfCompetitive(0, 2));
assertThat(sort.getValue(0), equalTo(expectedSortValue(2)));
assertThat(sort.getValue(1), nullValue());
assertThat(sort.getValue(2), equalTo(expectedSortValue(2)));
assertThat(sort.getValue(3), nullValue());
}
}
public void testBucketsOutOfOrder() throws IOException {
try (T sort = build(SortOrder.DESC, new double[] {2})) {
BucketedSort.Leaf leaf = sort.forLeaf(null);
assertTrue(leaf.collectIfCompetitive(0, 1));
assertTrue(leaf.collectIfCompetitive(0, 0));
assertThat(sort.getValue(0), equalTo(expectedSortValue(2.0)));
assertThat(sort.getValue(1), equalTo(expectedSortValue(2.0)));
assertThat(sort.getValue(2), nullValue());
}
}
public void testManyBuckets() throws IOException {
// Set the bucket values in random order
int[] buckets = new int[10000];
for (int b = 0; b < buckets.length; b++) {
buckets[b] = b;
}
Collections.shuffle(Arrays.asList(buckets), random());
double[] maxes = new double[buckets.length];
try (T sort = build(SortOrder.DESC, new double[] {2, 3, -1})) {
BucketedSort.Leaf leaf = sort.forLeaf(null);
for (int b : buckets) {
maxes[b] = 2;
assertTrue(leaf.collectIfCompetitive(0, b));
if (randomBoolean()) {
maxes[b] = 3;
assertTrue(leaf.collectIfCompetitive(1, b));
}
if (randomBoolean()) {
assertFalse(leaf.collectIfCompetitive(2, b));
}
}
for (int b = 0; b < buckets.length; b++) {
assertThat(sort.getValue(b), equalTo(expectedSortValue(maxes[b])));
}
assertThat(sort.getValue(buckets.length), nullValue());
}
}
protected BigArrays bigArrays() {
return new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService());
}
}

View File

@ -0,0 +1,114 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.sort;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.test.AbstractNamedWriteableTestCase;
import java.io.IOException;
import java.time.ZoneId;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.lessThan;
public class SortValueTests extends AbstractNamedWriteableTestCase<SortValue> {
private static final DocValueFormat STRICT_DATE_TIME = new DocValueFormat.DateTime(DateFormatter.forPattern("strict_date_time"),
ZoneId.of("UTC"), DateFieldMapper.Resolution.MILLISECONDS);
@Override
protected Class<SortValue> categoryClass() {
return SortValue.class;
}
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(SortValue.namedWriteables());
}
@Override
protected SortValue createTestInstance() {
return randomBoolean() ? SortValue.from(randomDouble()) : SortValue.from(randomLong());
}
@Override
protected SortValue mutateInstance(SortValue instance) throws IOException {
return randomValueOtherThanMany(mut -> instance.getKey().equals(mut.getKey()), this::createTestInstance);
}
public void testFormatDouble() {
assertThat(SortValue.from(1.0).format(DocValueFormat.RAW), equalTo("1.0"));
// The date formatter coerces the double into a long to format it
assertThat(SortValue.from(1.0).format(STRICT_DATE_TIME), equalTo("1970-01-01T00:00:00.001Z"));
}
public void testFormatLong() {
assertThat(SortValue.from(1).format(DocValueFormat.RAW), equalTo("1"));
assertThat(SortValue.from(1).format(STRICT_DATE_TIME), equalTo("1970-01-01T00:00:00.001Z"));
}
public void testToXContentDouble() {
assertThat(toXContent(SortValue.from(1.0), DocValueFormat.RAW), equalTo("{\"test\":1.0}"));
// The date formatter coerces the double into a long to format it
assertThat(toXContent(SortValue.from(1.0), STRICT_DATE_TIME), equalTo("{\"test\":\"1970-01-01T00:00:00.001Z\"}"));
}
public void testToXContentLong() {
assertThat(toXContent(SortValue.from(1), DocValueFormat.RAW), equalTo("{\"test\":1}"));
assertThat(toXContent(SortValue.from(1), STRICT_DATE_TIME), equalTo("{\"test\":\"1970-01-01T00:00:00.001Z\"}"));
}
public void testCompareDifferentTypes() {
assertThat(SortValue.from(1.0), lessThan(SortValue.from(1)));
assertThat(SortValue.from(Double.MAX_VALUE), lessThan(SortValue.from(Long.MIN_VALUE)));
assertThat(SortValue.from(1), greaterThan(SortValue.from(1.0)));
assertThat(SortValue.from(Long.MIN_VALUE), greaterThan(SortValue.from(Double.MAX_VALUE)));
}
public void testCompareDoubles() {
double r = randomDouble();
assertThat(SortValue.from(r), equalTo(SortValue.from(r)));
assertThat(SortValue.from(r), lessThan(SortValue.from(r + 1)));
assertThat(SortValue.from(r), greaterThan(SortValue.from(r - 1)));
}
public void testCompareLongs() {
long r = randomLongBetween(Long.MIN_VALUE + 1, Long.MAX_VALUE - 1);
assertThat(SortValue.from(r), equalTo(SortValue.from(r)));
assertThat(SortValue.from(r), lessThan(SortValue.from(r + 1)));
assertThat(SortValue.from(r), greaterThan(SortValue.from(r - 1)));
}
public String toXContent(SortValue sortValue, DocValueFormat format) {
return Strings.toString(new ToXContentFragment() {
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("test");
return sortValue.toXContent(builder, format);
}
});
}
}

View File

@ -171,11 +171,23 @@ public abstract class AggregatorTestCase extends ESTestCase {
return aggregator;
}
/**
* Create a {@linkplain SearchContext} for testing an {@link Aggregator}.
*/
protected SearchContext createSearchContext(IndexSearcher indexSearcher,
IndexSettings indexSettings,
Query query,
MultiBucketConsumer bucketConsumer,
MappedFieldType... fieldTypes) {
return createSearchContext(indexSearcher, indexSettings, query, bucketConsumer, new NoneCircuitBreakerService(), fieldTypes);
}
protected SearchContext createSearchContext(IndexSearcher indexSearcher,
IndexSettings indexSettings,
Query query,
MultiBucketConsumer bucketConsumer,
CircuitBreakerService circuitBreakerService,
MappedFieldType... fieldTypes) {
QueryCache queryCache = new DisabledQueryCache(indexSettings);
QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() {
@Override
@ -197,14 +209,18 @@ public abstract class AggregatorTestCase extends ESTestCase {
when(searchContext.fetchPhase())
.thenReturn(new FetchPhase(Arrays.asList(new FetchSourcePhase(), new FetchDocValuesPhase())));
when(searchContext.bitsetFilterCache()).thenReturn(new BitsetFilterCache(indexSettings, mock(Listener.class)));
CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
IndexShard indexShard = mock(IndexShard.class);
when(indexShard.shardId()).thenReturn(new ShardId("test", "test", 0));
when(searchContext.indexShard()).thenReturn(indexShard);
when(searchContext.aggregations())
.thenReturn(new SearchContextAggregations(AggregatorFactories.EMPTY, bucketConsumer));
when(searchContext.query()).thenReturn(query);
when(searchContext.bigArrays()).thenReturn(new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), circuitBreakerService));
/*
* Always use the circuit breaking big arrays instance so that the CircuitBreakerService
* we're passed gets a chance to break.
*/
BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), circuitBreakerService).withCircuitBreaking();
when(searchContext.bigArrays()).thenReturn(bigArrays);
// TODO: now just needed for top_hits, this will need to be revised for other agg unit tests:
MapperService mapperService = mapperServiceMock();

View File

@ -0,0 +1,50 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable;
import java.io.IOException;
/**
* Standard test case for testing the wire serialization of subclasses of {@linkplain NamedWriteable}.
* See {@link AbstractWireSerializingTestCase} for subclasses of {@link Writeable}. While you *can*
* use {@linkplain AbstractWireSerializingTestCase} to test susbclasses of {@linkplain NamedWriteable}
* this superclass will also test reading and writing the name.
*/
public abstract class AbstractNamedWriteableTestCase<T extends NamedWriteable> extends AbstractWireTestCase<T> {
// Force subclasses to override to customize the registry for their NamedWriteable
@Override
protected abstract NamedWriteableRegistry getNamedWriteableRegistry();
/**
* The type of {@link NamedWriteable} to read.
*/
protected abstract Class<T> categoryClass();
@Override
protected T copyInstance(T instance, Version version) throws IOException {
return copyNamedWriteable(instance, getNamedWriteableRegistry(), categoryClass(), version);
}
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.test;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
@ -27,6 +28,7 @@ import java.io.IOException;
/**
* Standard test case for testing the wire serialization of subclasses of {@linkplain Writeable}.
* See {@link AbstractNamedWriteableTestCase} for subclasses of {@link NamedWriteable}.
*/
public abstract class AbstractWireSerializingTestCase<T extends Writeable> extends AbstractWireTestCase<T> {
/**

View File

@ -29,6 +29,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@ -1193,6 +1194,26 @@ public abstract class ESTestCase extends LuceneTestCase {
return copyInstance(original, namedWriteableRegistry, (out, value) -> value.writeTo(out), reader, version);
}
/**
* Create a copy of an original {@link NamedWriteable} object by running it through a {@link BytesStreamOutput} and
* reading it in again using a provided {@link Writeable.Reader}.
*/
public static <T extends NamedWriteable> T copyNamedWriteable(T original, NamedWriteableRegistry namedWriteableRegistry,
Class<T> categoryClass) throws IOException {
return copyNamedWriteable(original, namedWriteableRegistry, categoryClass, Version.CURRENT);
}
/**
* Same as {@link #copyNamedWriteable(NamedWriteable, NamedWriteableRegistry, Class)} but also allows to provide
* a {@link Version} argument which will be used to write and read back the object.
*/
public static <T extends NamedWriteable> T copyNamedWriteable(T original, NamedWriteableRegistry namedWriteableRegistry,
Class<T> categoryClass, Version version) throws IOException {
return copyInstance(original, namedWriteableRegistry,
(out, value) -> out.writeNamedWriteable(value),
in -> in.readNamedWriteable(categoryClass), version);
}
protected static <T> T copyInstance(T original, NamedWriteableRegistry namedWriteableRegistry, Writeable.Writer<T> writer,
Writeable.Reader<T> reader, Version version) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) {

View File

@ -25,6 +25,8 @@ import org.elasticsearch.xpack.analytics.cumulativecardinality.CumulativeCardina
import org.elasticsearch.xpack.analytics.mapper.HistogramFieldMapper;
import org.elasticsearch.xpack.analytics.stringstats.InternalStringStats;
import org.elasticsearch.xpack.analytics.stringstats.StringStatsAggregationBuilder;
import org.elasticsearch.xpack.analytics.topmetrics.InternalTopMetrics;
import org.elasticsearch.xpack.analytics.topmetrics.TopMetricsAggregationBuilder;
import org.elasticsearch.xpack.core.XPackPlugin;
import org.elasticsearch.xpack.core.analytics.action.AnalyticsStatsAction;
@ -40,8 +42,9 @@ import static java.util.Collections.singletonList;
public class AnalyticsPlugin extends Plugin implements SearchPlugin, ActionPlugin, MapperPlugin {
// TODO this should probably become more structured once Analytics plugin has more than just one agg
// TODO this should probably become more structured
public static AtomicLong cumulativeCardUsage = new AtomicLong(0);
public static AtomicLong topMetricsUsage = new AtomicLong(0);
private final boolean transportClientMode;
public AnalyticsPlugin(Settings settings) {
@ -72,7 +75,12 @@ public class AnalyticsPlugin extends Plugin implements SearchPlugin, ActionPlugi
BoxplotAggregationBuilder.NAME,
BoxplotAggregationBuilder::new,
(ContextParser<String, AggregationBuilder>) (p, c) -> BoxplotAggregationBuilder.parse(c, p))
.addResultReader(InternalBoxplot::new)
.addResultReader(InternalBoxplot::new),
new AggregationSpec(
TopMetricsAggregationBuilder.NAME,
TopMetricsAggregationBuilder::new,
track(TopMetricsAggregationBuilder.PARSER, topMetricsUsage))
.addResultReader(InternalTopMetrics::new)
);
}
@ -98,4 +106,16 @@ public class AnalyticsPlugin extends Plugin implements SearchPlugin, ActionPlugi
public Map<String, Mapper.TypeParser> getMappers() {
return Collections.singletonMap(HistogramFieldMapper.CONTENT_TYPE, new HistogramFieldMapper.TypeParser());
}
/**
* Track successful parsing.
*/
private static <T> ContextParser<String, T> track(ContextParser<String, T> realParser, AtomicLong usage) {
return (parser, name) -> {
T value = realParser.parse(parser, name);
// Intentionally doesn't count unless the parser returns cleanly.
usage.addAndGet(1);
return value;
};
}
}

View File

@ -52,6 +52,7 @@ public class TransportAnalyticsStatsAction extends TransportNodesAction<Analytic
protected AnalyticsStatsAction.NodeResponse nodeOperation(AnalyticsStatsAction.NodeRequest request) {
AnalyticsStatsAction.NodeResponse statsResponse = new AnalyticsStatsAction.NodeResponse(clusterService.localNode());
statsResponse.setCumulativeCardinalityUsage(AnalyticsPlugin.cumulativeCardUsage.get());
statsResponse.setTopMetricsUsage(AnalyticsPlugin.topMetricsUsage.get());
return statsResponse;
}

View File

@ -24,6 +24,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentSubParser;
@ -37,6 +38,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexHistogramFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@ -47,7 +49,10 @@ import org.elasticsearch.index.mapper.TypeParsers;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.Iterator;
@ -260,6 +265,12 @@ public class HistogramFieldMapper extends FieldMapper {
XFieldComparatorSource.Nested nested, boolean reverse) {
throw new UnsupportedOperationException("can't sort on the [" + CONTENT_TYPE + "] field");
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode,
Nested nested, SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("can't sort on the [" + CONTENT_TYPE + "] field");
}
};
}
};

View File

@ -0,0 +1,178 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.analytics.topmetrics;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.sort.SortValue;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class InternalTopMetrics extends InternalNumericMetricsAggregation.MultiValue {
private final DocValueFormat sortFormat;
private final SortOrder sortOrder;
private final SortValue sortValue;
private final String metricName;
private final double metricValue;
public InternalTopMetrics(String name, DocValueFormat sortFormat, @Nullable SortOrder sortOrder, SortValue sortValue, String metricName,
double metricValue, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
super(name, pipelineAggregators, metaData);
this.sortFormat = sortFormat;
this.sortOrder = sortOrder;
this.sortValue = sortValue;
this.metricName = metricName;
this.metricValue = metricValue;
}
static InternalTopMetrics buildEmptyAggregation(String name, String metricField,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
return new InternalTopMetrics(name, DocValueFormat.RAW, SortOrder.ASC, null, metricField, Double.NaN, pipelineAggregators,
metaData);
}
/**
* Read from a stream.
*/
public InternalTopMetrics(StreamInput in) throws IOException {
super(in);
sortFormat = in.readNamedWriteable(DocValueFormat.class);
sortOrder = SortOrder.readFromStream(in);
sortValue = in.readOptionalNamedWriteable(SortValue.class);
metricName = in.readString();
metricValue = in.readDouble();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(sortFormat);
sortOrder.writeTo(out);
out.writeOptionalNamedWriteable(sortValue);
out.writeString(metricName);
out.writeDouble(metricValue);
}
@Override
public String getWriteableName() {
return TopMetricsAggregationBuilder.NAME;
}
@Override
public Object getProperty(List<String> path) {
if (path.isEmpty()) {
return this;
}
if (path.size() == 1 && metricName.contentEquals(path.get(1))) {
return metricValue;
}
throw new IllegalArgumentException("path not supported for [" + getName() + "]: " + path);
}
@Override
public InternalTopMetrics reduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
if (false == isMapped()) {
return this;
}
DocValueFormat bestSortFormat = sortFormat;
SortValue bestSortValue = sortValue;
double bestMetricValue = metricValue;
int reverseMul = sortOrder.reverseMul();
for (InternalAggregation agg : aggregations) {
InternalTopMetrics result = (InternalTopMetrics) agg;
if (result.sortValue != null && reverseMul * bestSortValue.compareTo(result.sortValue) > 0) {
bestSortFormat = result.sortFormat;
bestSortValue = result.sortValue;
bestMetricValue = result.metricValue;
}
}
return new InternalTopMetrics(getName(), bestSortFormat, sortOrder, bestSortValue, metricName, bestMetricValue,
pipelineAggregators(), getMetaData());
}
@Override
public boolean isMapped() {
return sortValue != null;
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.startArray("top");
if (sortValue != null) {
builder.startObject();
{
builder.startArray("sort");
sortValue.toXContent(builder, sortFormat);
builder.endArray();
builder.startObject("metrics");
{
builder.field(metricName, Double.isNaN(metricValue) ? null : metricValue);
}
builder.endObject();
}
builder.endObject();
}
builder.endArray();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), sortFormat, sortOrder, sortValue, metricName, metricValue);
}
@Override
public boolean equals(Object obj) {
if (super.equals(obj) == false) return false;
InternalTopMetrics other = (InternalTopMetrics) obj;
return sortFormat.equals(other.sortFormat) &&
sortOrder.equals(other.sortOrder) &&
Objects.equals(sortValue, other.sortValue) &&
metricName.equals(other.metricName) &&
metricValue == other.metricValue;
}
@Override
public double value(String name) {
if (metricName.equals(name)) {
return metricValue;
}
throw new IllegalArgumentException("known metric [" + name + "]");
}
DocValueFormat getSortFormat() {
return sortFormat;
}
SortOrder getSortOrder() {
return sortOrder;
}
SortValue getSortValue() {
return sortValue;
}
String getFormattedSortValue() {
return sortValue.format(sortFormat);
}
String getMetricName() {
return metricName;
}
double getMetricValue() {
return metricValue;
}
}

View File

@ -0,0 +1,130 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.analytics.topmetrics;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ContextParser;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig;
import org.elasticsearch.search.sort.SortBuilder;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.search.builder.SearchSourceBuilder.SORT_FIELD;
public class TopMetricsAggregationBuilder extends AbstractAggregationBuilder<TopMetricsAggregationBuilder> {
public static final String NAME = "top_metrics";
public static final ParseField METRIC_FIELD = new ParseField("metric");
public static final ConstructingObjectParser<TopMetricsAggregationBuilder, String> PARSER = new ConstructingObjectParser<>(NAME,
false, (args, name) -> {
@SuppressWarnings("unchecked")
List<SortBuilder<?>> sorts = (List<SortBuilder<?>>) args[0];
MultiValuesSourceFieldConfig metricField = (MultiValuesSourceFieldConfig) args[1];
return new TopMetricsAggregationBuilder(name, sorts, metricField);
});
static {
PARSER.declareField(constructorArg(), (p, n) -> SortBuilder.fromXContent(p), SORT_FIELD,
ObjectParser.ValueType.OBJECT_ARRAY_OR_STRING);
ContextParser<Void, MultiValuesSourceFieldConfig.Builder> metricParser = MultiValuesSourceFieldConfig.PARSER.apply(true, false);
PARSER.declareObject(constructorArg(), (p, n) -> metricParser.parse(p, null).build(), METRIC_FIELD);
}
private final List<SortBuilder<?>> sortBuilders;
// TODO MultiValuesSourceFieldConfig has more things than we support and less things than we want to support
private final MultiValuesSourceFieldConfig metricField;
/**
* Ctor for parsing.
*/
public TopMetricsAggregationBuilder(String name, List<SortBuilder<?>> sortBuilders, MultiValuesSourceFieldConfig metricField) {
super(name);
if (sortBuilders.size() != 1) {
throw new IllegalArgumentException("[sort] must contain exactly one sort");
}
this.sortBuilders = sortBuilders;
this.metricField = metricField;
}
/**
* Cloning ctor for reducing.
*/
public TopMetricsAggregationBuilder(TopMetricsAggregationBuilder clone, AggregatorFactories.Builder factoriesBuilder,
Map<String, Object> metaData) {
super(clone, factoriesBuilder, metaData);
this.sortBuilders = clone.sortBuilders;
this.metricField = clone.metricField;
}
/**
* Read from a stream.
*/
public TopMetricsAggregationBuilder(StreamInput in) throws IOException {
super(in);
@SuppressWarnings("unchecked")
List<SortBuilder<?>> sortBuilders = (List<SortBuilder<?>>) (List<?>) in.readNamedWriteableList(SortBuilder.class);
this.sortBuilders = sortBuilders;
this.metricField = new MultiValuesSourceFieldConfig(in);
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeNamedWriteableList(sortBuilders);
metricField.writeTo(out);
}
@Override
protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metaData) {
return new TopMetricsAggregationBuilder(this, factoriesBuilder, metaData);
}
@Override
protected AggregatorFactory doBuild(QueryShardContext queryShardContext, AggregatorFactory parent, Builder subFactoriesBuilder)
throws IOException {
return new TopMetricsAggregatorFactory(name, queryShardContext, parent, subFactoriesBuilder, metaData, sortBuilders, metricField);
}
@Override
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
{
builder.startArray(SORT_FIELD.getPreferredName());
for (SortBuilder<?> sort : sortBuilders) {
sort.toXContent(builder, params);
}
builder.endArray();
builder.field(METRIC_FIELD.getPreferredName(), metricField);
}
builder.endObject();
return builder;
}
@Override
public String getType() {
return NAME;
}
List<SortBuilder<?>> getSortBuilders() {
return sortBuilders;
}
MultiValuesSourceFieldConfig getMetricField() {
return metricField;
}
}

View File

@ -0,0 +1,133 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.analytics.topmetrics;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Scorable;
import org.apache.lucene.search.ScoreMode;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.DoubleArray;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortValue;
import java.io.IOException;
import java.util.List;
import java.util.Map;
/**
* Collects the {@code top_metrics} aggregation, which functions like a memory
* efficient but limited version of the {@code top_hits} aggregation. Amortized,
* each bucket should take something like 16 bytes. Because of this, unlike
* {@code top_hits}, you can sort by the buckets of this metric.
*
* This extends {@linkplain NumericMetricsAggregator.MultiValue} as a compromise
* to allow sorting on the metric. Right now it only collects a single metric
* but we expect it to collect a list of them in the future. Also in the future
* we expect it to allow collecting non-numeric metrics which'll change how we
* do the inheritance. Finally, we also expect it to allow collecting more than
* one document worth of metrics. Once that happens we'll need to come up with
* some way to pick which document's metrics to use for the sort.
*/
class TopMetricsAggregator extends NumericMetricsAggregator.MultiValue {
private final BucketedSort sort;
private final String metricName;
private final ValuesSource.Numeric metricValueSource;
private DoubleArray values;
TopMetricsAggregator(String name, SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData, BucketedSort sort,
String metricName, ValuesSource.Numeric metricValueSource) throws IOException {
super(name, context, parent, pipelineAggregators, metaData);
this.sort = sort;
this.metricName = metricName;
this.metricValueSource = metricValueSource;
if (metricValueSource != null) {
values = context.bigArrays().newDoubleArray(1, false);
values.fill(0, values.size(), Double.NaN);
}
}
@Override
public boolean hasMetric(String name) {
return metricName.equals(name);
}
@Override
public double metric(String name, long owningBucketOrd) {
return values.get(owningBucketOrd);
}
@Override
public ScoreMode scoreMode() {
boolean needs = (sort != null && sort.needsScores()) || (metricValueSource != null && metricValueSource.needsScores());
return needs ? ScoreMode.COMPLETE : ScoreMode.COMPLETE_NO_SCORES;
}
@Override
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector sub) throws IOException {
assert sub == LeafBucketCollector.NO_OP_COLLECTOR : "Expected noop but was " + sub.toString();
if (metricValueSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
}
BucketedSort.Leaf leafSort = sort.forLeaf(ctx);
// TODO allow configuration of value mode
NumericDoubleValues metricValues = MultiValueMode.AVG.select(metricValueSource.doubleValues(ctx));
return new LeafBucketCollector() {
@Override
public void collect(int doc, long bucket) throws IOException {
if (leafSort.collectIfCompetitive(doc, bucket)) {
if (bucket >= values.size()) {
long oldSize = values.size();
values = context.bigArrays().grow(values, bucket + 1);
values.fill(oldSize, values.size(), Double.NaN);
}
double metricValue = metricValues.advanceExact(doc) ? metricValues.doubleValue() : Double.NaN;
values.set(bucket, metricValue);
}
}
@Override
public void setScorer(Scorable s) throws IOException {
leafSort.setScorer(s);
}
};
}
@Override
public InternalAggregation buildAggregation(long bucket) throws IOException {
if (metricValueSource == null) {
return buildEmptyAggregation();
}
double metricValue = values.get(bucket);
SortValue sortValue = sort.getValue(bucket);
return new InternalTopMetrics(name, sort.getFormat(), sort.getOrder(), sortValue, metricName, metricValue, pipelineAggregators(),
metaData());
}
@Override
public InternalTopMetrics buildEmptyAggregation() {
// The sort format and sort order aren't used in reduction so we pass the simplest thing.
return InternalTopMetrics.buildEmptyAggregation(name, metricName, pipelineAggregators(),
metaData());
}
@Override
public void doClose() {
Releasables.close(sort, values);
}
}

View File

@ -0,0 +1,59 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.analytics.topmetrics;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortBuilder;
import java.io.IOException;
import java.util.List;
import java.util.Map;
public class TopMetricsAggregatorFactory extends AggregatorFactory {
private final List<SortBuilder<?>> sortBuilders;
private final MultiValuesSourceFieldConfig metricField;
public TopMetricsAggregatorFactory(String name, QueryShardContext queryShardContext, AggregatorFactory parent,
Builder subFactoriesBuilder, Map<String, Object> metaData, List<SortBuilder<?>> sortBuilders,
MultiValuesSourceFieldConfig metricField) throws IOException {
super(name, queryShardContext, parent, subFactoriesBuilder, metaData);
this.sortBuilders = sortBuilders;
this.metricField = metricField;
}
@Override
protected TopMetricsAggregator createInternal(SearchContext searchContext, Aggregator parent, boolean collectsFromSingleBucket,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
ValuesSourceConfig<ValuesSource.Numeric> metricFieldSource = ValuesSourceConfig.resolve(queryShardContext, ValueType.NUMERIC,
metricField.getFieldName(), metricField.getScript(), metricField.getMissing(), metricField.getTimeZone(), null);
ValuesSource.Numeric metricValueSource = metricFieldSource.toValuesSource(queryShardContext);
if (metricValueSource == null) {
return createUnmapped(searchContext, parent, pipelineAggregators, metaData);
}
BucketedSort bucketedSort = sortBuilders.get(0).buildBucketedSort(searchContext.getQueryShardContext());
return new TopMetricsAggregator(name, searchContext, parent, pipelineAggregators, metaData, bucketedSort,
metricField.getFieldName(), metricValueSource);
}
private TopMetricsAggregator createUnmapped(SearchContext searchContext, Aggregator parent,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
return new TopMetricsAggregator(name, searchContext, parent, pipelineAggregators, metaData, null, metricField.getFieldName(),
null);
}
}

View File

@ -0,0 +1,82 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.analytics.topmetrics;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.sort.SortValue;
import org.elasticsearch.test.ESTestCase;
import java.util.Arrays;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.sameInstance;
/**
* Some explicit and simple tests for reducing {@link InternalTopMetrics}.
* All of the randomized testing, including randomized reduce testing is
* in {@link InternalTopMetricsTests}.
*/
public class InternalTopMetricsReduceTests extends ESTestCase {
public void testAllEmpty() {
InternalTopMetrics first = buildEmpty();
InternalTopMetrics reduced = reduce(first, buildEmpty(), buildEmpty(), buildEmpty());
assertThat(reduced, sameInstance(first));
}
public void testFirstEmpty() {
InternalTopMetrics first = buildEmpty();
InternalTopMetrics reduced = reduce(first, buildFilled(SortValue.from(1), 1.0));
assertThat(reduced, sameInstance(first));
}
public void testMany() {
InternalTopMetrics first = buildFilled(SortValue.from(2.0), randomDouble());
InternalTopMetrics min = buildFilled(SortValue.from(1.0), randomDouble());
InternalTopMetrics max = buildFilled(SortValue.from(7.0), randomDouble());
InternalTopMetrics[] metrics = new InternalTopMetrics[] {
first, max, min, buildEmpty(), buildEmpty(),
};
InternalTopMetrics winner = first.getSortOrder() == SortOrder.ASC ? min : max;
InternalTopMetrics reduced = reduce(metrics);
assertThat(reduced.getName(), equalTo("test"));
assertThat(reduced.getSortValue(), equalTo(winner.getSortValue()));
assertThat(reduced.getSortFormat(), equalTo(winner.getSortFormat()));
assertThat(reduced.getSortOrder(), equalTo(first.getSortOrder()));
assertThat(reduced.getMetricValue(), equalTo(winner.getMetricValue()));
assertThat(reduced.getMetricName(), equalTo("test"));
}
public void testDifferentTypes() {
InternalTopMetrics doubleMetrics = buildFilled(SortValue.from(100.0), randomDouble());
InternalTopMetrics longMetrics = buildFilled(SortValue.from(7), randomDouble());
InternalTopMetrics reduced = reduce(doubleMetrics, longMetrics);
// Doubles sort first.
InternalTopMetrics winner = doubleMetrics.getSortOrder() == SortOrder.ASC ? doubleMetrics : longMetrics;
assertThat(reduced.getName(), equalTo("test"));
assertThat(reduced.getSortValue(), equalTo(winner.getSortValue()));
assertThat(reduced.getSortFormat(), equalTo(winner.getSortFormat()));
assertThat(reduced.getSortOrder(), equalTo(doubleMetrics.getSortOrder()));
assertThat(reduced.getMetricValue(), equalTo(winner.getMetricValue()));
assertThat(reduced.getMetricName(), equalTo("test"));
}
private InternalTopMetrics buildEmpty() {
return InternalTopMetrics.buildEmptyAggregation("test", "test", emptyList(), null);
}
private InternalTopMetrics buildFilled(SortValue sortValue, double metricValue) {
DocValueFormat sortFormat = randomFrom(DocValueFormat.RAW, DocValueFormat.BINARY, DocValueFormat.BOOLEAN, DocValueFormat.IP);
SortOrder sortOrder = randomFrom(SortOrder.values());
return new InternalTopMetrics("test", sortFormat, sortOrder, sortValue, "test", metricValue, emptyList(), null);
}
private InternalTopMetrics reduce(InternalTopMetrics... results) {
return results[0].reduce(Arrays.asList(results), null);
}
}

View File

@ -0,0 +1,198 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.analytics.topmetrics;
import org.elasticsearch.client.analytics.ParsedTopMetrics;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.time.DateFormatter;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.Aggregation;
import org.elasticsearch.search.aggregations.ParsedAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.sort.SortValue;
import org.elasticsearch.test.InternalAggregationTestCase;
import java.io.IOException;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import static java.util.Collections.emptyList;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
public class InternalTopMetricsTests extends InternalAggregationTestCase<InternalTopMetrics> {
public void testEmptyIsNotMapped() {
InternalTopMetrics empty = InternalTopMetrics.buildEmptyAggregation(
randomAlphaOfLength(5), randomAlphaOfLength(2), emptyList(), null);
assertFalse(empty.isMapped());
}
public void testNonEmptyIsMapped() {
InternalTopMetrics nonEmpty = randomValueOtherThanMany(tm -> tm.getSortValue() == null, this::createTestInstance);
assertTrue(nonEmpty.isMapped());
}
public void testToXContentDoubleSortValue() throws IOException {
InternalTopMetrics tm = new InternalTopMetrics("test", DocValueFormat.RAW, randomFrom(SortOrder.values()), SortValue.from(1.0),
"test", 1.0, emptyList(), null);
assertThat(Strings.toString(tm, true, true), equalTo(
"{\n" +
" \"test\" : {\n" +
" \"top\" : [\n" +
" {\n" +
" \"sort\" : [\n" +
" 1.0\n" +
" ],\n" +
" \"metrics\" : {\n" +
" \"test\" : 1.0\n" +
" }\n" +
" }\n" +
" ]\n" +
" }\n" +
"}"));
}
public void testToXConentDateSortValue() throws IOException {
DocValueFormat sortFormat = new DocValueFormat.DateTime(DateFormatter.forPattern("strict_date_time"), ZoneId.of("UTC"),
DateFieldMapper.Resolution.MILLISECONDS);
SortValue sortValue = SortValue.from(ZonedDateTime.parse("2007-12-03T10:15:30Z").toInstant().toEpochMilli());
InternalTopMetrics tm = new InternalTopMetrics("test", sortFormat, randomFrom(SortOrder.values()), sortValue, "test", 1.0,
emptyList(), null);
assertThat(Strings.toString(tm, true, true), equalTo(
"{\n" +
" \"test\" : {\n" +
" \"top\" : [\n" +
" {\n" +
" \"sort\" : [\n" +
" \"2007-12-03T10:15:30.000Z\"\n" +
" ],\n" +
" \"metrics\" : {\n" +
" \"test\" : 1.0\n" +
" }\n" +
" }\n" +
" ]\n" +
" }\n" +
"}"));
}
@Override
protected List<NamedXContentRegistry.Entry> getNamedXContents() {
List<NamedXContentRegistry.Entry> result = new ArrayList<>(super.getNamedXContents());
result.add(new NamedXContentRegistry.Entry(Aggregation.class, new ParseField(TopMetricsAggregationBuilder.NAME),
(p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c)));
return result;
}
@Override
protected InternalTopMetrics createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
DocValueFormat sortFormat = randomNumericDocValueFormat();
SortOrder sortOrder = randomFrom(SortOrder.values());
SortValue sortValue = randomSortValue();
String metricName = randomAlphaOfLength(5);
double metricValue = randomDouble();
return new InternalTopMetrics(name, sortFormat, sortOrder, sortValue, metricName, metricValue, pipelineAggregators, metaData);
}
@Override
protected InternalTopMetrics mutateInstance(InternalTopMetrics instance) throws IOException {
String name = instance.getName();
DocValueFormat sortFormat = instance.getSortFormat();
SortOrder sortOrder = instance.getSortOrder();
SortValue sortValue = instance.getSortValue();
String metricName = instance.getMetricName();
double metricValue = instance.getMetricValue();
switch (randomInt(5)) {
case 0:
name = randomAlphaOfLength(6);
break;
case 1:
sortFormat = randomValueOtherThan(sortFormat, InternalAggregationTestCase::randomNumericDocValueFormat);
break;
case 2:
sortOrder = sortOrder == SortOrder.ASC ? SortOrder.DESC : SortOrder.ASC;
break;
case 3:
sortValue = randomValueOtherThan(sortValue, InternalTopMetricsTests::randomSortValue);
break;
case 4:
metricName = randomAlphaOfLength(6);
break;
case 5:
metricValue = randomValueOtherThan(metricValue, () -> randomDouble());
break;
default:
throw new IllegalArgumentException("bad mutation");
}
return new InternalTopMetrics(name, sortFormat, sortOrder, sortValue, metricName, metricValue, emptyList(), null);
}
@Override
protected Reader<InternalTopMetrics> instanceReader() {
return InternalTopMetrics::new;
}
@Override
protected void assertFromXContent(InternalTopMetrics aggregation, ParsedAggregation parsedAggregation) throws IOException {
ParsedTopMetrics parsed = (ParsedTopMetrics) parsedAggregation;
assertThat(parsed.getName(), equalTo(aggregation.getName()));
if (false == aggregation.isMapped()) {
assertThat(parsed.getTopMetrics(), hasSize(0));
return;
}
assertThat(parsed.getTopMetrics(), hasSize(1));
ParsedTopMetrics.TopMetrics parsedTop = parsed.getTopMetrics().get(0);
Object expectedSort = aggregation.getSortFormat() == DocValueFormat.RAW ?
aggregation.getSortValue().getKey() : aggregation.getFormattedSortValue();
assertThat(parsedTop.getSort(), equalTo(singletonList(expectedSort)));
assertThat(parsedTop.getMetrics(), equalTo(singletonMap(aggregation.getMetricName(), aggregation.getMetricValue())));
}
@Override
protected void assertReduced(InternalTopMetrics reduced, List<InternalTopMetrics> inputs) {
InternalTopMetrics first = inputs.get(0);
InternalTopMetrics winner = inputs.stream()
.filter(tm -> tm.isMapped())
.min((lhs, rhs) -> first.getSortOrder().reverseMul() * lhs.getSortValue().compareTo(rhs.getSortValue()))
.get();
assertThat(reduced.getName(), equalTo(first.getName()));
assertThat(reduced.getSortValue(), equalTo(winner.getSortValue()));
assertThat(reduced.getSortFormat(), equalTo(winner.getSortFormat()));
assertThat(reduced.getSortOrder(), equalTo(first.getSortOrder()));
assertThat(reduced.getMetricValue(), equalTo(winner.getMetricValue()));
assertThat(reduced.getMetricName(), equalTo(first.getMetricName()));
}
private static SortValue randomSortValue() {
switch (between(0, 2)) {
case 0:
return null;
case 1:
return SortValue.from(randomLong());
case 2:
return SortValue.from(randomDouble());
default:
throw new IllegalArgumentException("unsupported random sort");
}
}
@Override
protected Predicate<String> excludePathsFromXContentInsertion() {
return path -> path.endsWith(".metrics");
}
}

View File

@ -0,0 +1,102 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.analytics.topmetrics;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.BaseAggregationBuilder;
import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.AbstractSerializingTestCase;
import org.elasticsearch.test.AbstractXContentTestCase;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import static java.util.Collections.singletonList;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
public class TopMetricsAggregationBuilderTests extends AbstractSerializingTestCase<TopMetricsAggregationBuilder> {
@Override
protected NamedWriteableRegistry getNamedWriteableRegistry() {
return new NamedWriteableRegistry(Arrays.asList(
new NamedWriteableRegistry.Entry(SortBuilder.class, FieldSortBuilder.NAME, FieldSortBuilder::new)));
}
@Override
protected NamedXContentRegistry xContentRegistry() {
return new NamedXContentRegistry(Arrays.asList(
new NamedXContentRegistry.Entry(BaseAggregationBuilder.class, new ParseField(TopMetricsAggregationBuilder.NAME),
(p, c) -> TopMetricsAggregationBuilder.PARSER.parse(p, (String) c))));
}
@Override
protected TopMetricsAggregationBuilder doParseInstance(XContentParser parser) throws IOException {
assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
String name = parser.currentName();
assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME));
assertThat(parser.currentName(), equalTo("top_metrics"));
TopMetricsAggregationBuilder parsed = TopMetricsAggregationBuilder.PARSER.apply(parser, name);
assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT));
return parsed;
}
@Override
protected Reader<TopMetricsAggregationBuilder> instanceReader() {
return TopMetricsAggregationBuilder::new;
}
@Override
protected TopMetricsAggregationBuilder createTestInstance() {
List<SortBuilder<?>> sortBuilders = singletonList(
new FieldSortBuilder(randomAlphaOfLength(5)).order(randomFrom(SortOrder.values())));
MultiValuesSourceFieldConfig.Builder metricField = new MultiValuesSourceFieldConfig.Builder();
metricField.setFieldName(randomAlphaOfLength(5)).setMissing(1.0);
return new TopMetricsAggregationBuilder(randomAlphaOfLength(5), sortBuilders, metricField.build());
}
public void testClientBuilder() throws IOException {
AbstractXContentTestCase.xContentTester(
this::createParser, this::createTestInstance, this::toXContentThroughClientBuilder,
p -> {
p.nextToken();
AggregatorFactories.Builder b = AggregatorFactories.parseAggregators(p);
assertThat(b.getAggregatorFactories(), hasSize(1));
assertThat(b.getPipelineAggregatorFactories(), empty());
return (TopMetricsAggregationBuilder) b.getAggregatorFactories().iterator().next();
} ).test();
}
private void toXContentThroughClientBuilder(TopMetricsAggregationBuilder serverBuilder, XContentBuilder builder) throws IOException {
builder.startObject();
createClientBuilder(serverBuilder).toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
}
private org.elasticsearch.client.analytics.TopMetricsAggregationBuilder createClientBuilder(
TopMetricsAggregationBuilder serverBuilder) {
assertThat(serverBuilder.getSortBuilders(), hasSize(1));
return new org.elasticsearch.client.analytics.TopMetricsAggregationBuilder(
serverBuilder.getName(),
serverBuilder.getSortBuilders().get(0),
serverBuilder.getMetricField().getFieldName());
}
}

View File

@ -0,0 +1,498 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.analytics.topmetrics;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.breaker.CircuitBreakingException;
import org.elasticsearch.common.breaker.NoopCircuitBreaker;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.mapper.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.script.MockScriptEngine;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptModule;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig;
import org.elasticsearch.search.aggregations.support.ValueType;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.GeoDistanceSortBuilder;
import org.elasticsearch.search.sort.ScoreSortBuilder;
import org.elasticsearch.search.sort.ScriptSortBuilder;
import org.elasticsearch.search.sort.ScriptSortBuilder.ScriptSortType;
import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.search.sort.SortValue;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.notANumber;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TopMetricsAggregatorTests extends AggregatorTestCase {
public void testNoDocs() throws IOException {
InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {},
doubleFields());
assertThat(result.getSortFormat(), equalTo(DocValueFormat.RAW));
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getSortValue(), nullValue());
assertThat(result.getMetricValue(), notANumber());
}
public void testUnmappedMetric() throws IOException {
InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {
writer.addDocument(singletonList(doubleField("s", 1.0)));
},
numberFieldType(NumberType.DOUBLE, "s"));
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getSortValue(), nullValue());
assertThat(result.getMetricValue(), notANumber());
}
public void testMissingValueForMetric() throws IOException {
InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {
writer.addDocument(singletonList(doubleField("s", 1.0)));
},
doubleFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getSortValue(), equalTo(SortValue.from(1.0)));
assertThat(result.getMetricValue(), notANumber());
}
public void testActualValueForMetric() throws IOException {
InternalTopMetrics result = collect(simpleBuilder(), new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0)));
},
doubleFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getSortValue(), equalTo(SortValue.from(1.0)));
assertThat(result.getMetricValue(), equalTo(2.0d));
}
private InternalTopMetrics collectFromDoubles(TopMetricsAggregationBuilder builder) throws IOException {
return collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(doubleField("s", 2.0), doubleField("m", 3.0)));
},
doubleFields());
}
public void testSortByDoubleAscending() throws IOException {
InternalTopMetrics result = collectFromDoubles(simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC)));
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getSortValue(), equalTo(SortValue.from(1.0)));
assertThat(result.getMetricValue(), equalTo(2.0d));
}
public void testSortByDoubleDescending() throws IOException {
InternalTopMetrics result = collectFromDoubles(simpleBuilder(new FieldSortBuilder("s").order(SortOrder.DESC)));
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getSortValue(), equalTo(SortValue.from(2.0)));
assertThat(result.getMetricValue(), equalTo(3.0d));
}
public void testSortByDoubleCastToLong() throws IOException {
InternalTopMetrics result = collectFromDoubles(simpleBuilder(new FieldSortBuilder("s").setNumericType("long")));
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getSortValue(), equalTo(SortValue.from(1)));
assertThat(result.getMetricValue(), equalTo(2.0d));
}
public void testSortByFloatAscending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(floatField("s", 1.0F), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(floatField("s", 2.0F), doubleField("m", 3.0)));
},
floatAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getSortValue(), equalTo(SortValue.from(1.0)));
assertThat(result.getMetricValue(), equalTo(2.0d));
}
public void testSortByFloatDescending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.DESC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(floatField("s", 1.0F), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(floatField("s", 2.0F), doubleField("m", 3.0)));
},
floatAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getSortValue(), equalTo(SortValue.from(2.0)));
assertThat(result.getMetricValue(), equalTo(3.0d));
}
public void testSortByLongAscending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(longField("s", 10), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(longField("s", 20), doubleField("m", 3.0)));
},
longAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getSortValue(), equalTo(SortValue.from(10)));
assertThat(result.getMetricValue(), equalTo(2.0d));
}
public void testSortByLongDescending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.DESC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(longField("s", 10), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(longField("s", 20), doubleField("m", 3.0)));
},
longAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getSortValue(), equalTo(SortValue.from(20)));
assertThat(result.getMetricValue(), equalTo(3.0d));
}
public void testSortByScoreDescending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new ScoreSortBuilder().order(SortOrder.DESC));
InternalTopMetrics result = collect(builder, boostFoo(), writer -> {
writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0)));
},
textAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getSortValue(), equalTo(SortValue.from(2.0)));
assertThat(result.getMetricValue(), equalTo(2.0d));
}
public void testSortByScoreAscending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new ScoreSortBuilder().order(SortOrder.ASC));
InternalTopMetrics result = collect(builder, boostFoo(), writer -> {
writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0)));
},
textAndDoubleField());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getSortValue(), equalTo(SortValue.from(1.0)));
assertThat(result.getMetricValue(), equalTo(3.0d));
}
public void testSortByScriptDescending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(scriptSortOnS().order(SortOrder.DESC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("s", 2), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(doubleField("s", 1), doubleField("m", 3.0)));
},
doubleFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getSortValue(), equalTo(SortValue.from(2.0)));
assertThat(result.getMetricValue(), equalTo(2.0d));
}
public void testSortByScriptAscending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(scriptSortOnS().order(SortOrder.ASC));
InternalTopMetrics result = collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("s", 2), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(doubleField("s", 1), doubleField("m", 3.0)));
},
doubleFields());
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getSortValue(), equalTo(SortValue.from(1.0)));
assertThat(result.getMetricValue(), equalTo(3.0d));
}
public void testSortByStringScriptFails() throws IOException {
Script script = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "s", emptyMap());
TopMetricsAggregationBuilder builder = simpleBuilder(new ScriptSortBuilder(script, ScriptSortType.STRING));
Exception e = expectThrows(IllegalArgumentException.class, () -> collect(builder, boostFoo(), writer -> {
writer.addDocument(Arrays.asList(textField("s", "foo"), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(textField("s", "bar"), doubleField("m", 3.0)));
},
textAndDoubleField()));
assertThat(e.getMessage(), equalTo(
"error building sort for [_script]: script sorting only supported on [numeric] scripts but was [string]"));
}
private InternalTopMetrics collectFromNewYorkAndLA(TopMetricsAggregationBuilder builder) throws IOException {
return collect(builder, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(geoPointField("s", 40.7128, -74.0060), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(geoPointField("s", 34.0522, -118.2437), doubleField("m", 3.0)));
},
geoPointAndDoubleField());
}
public void testSortByGeoDistancDescending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new GeoDistanceSortBuilder("s", 35.7796, 78.6382).order(SortOrder.DESC));
InternalTopMetrics result = collectFromNewYorkAndLA(builder);
assertThat(result.getSortOrder(), equalTo(SortOrder.DESC));
assertThat(result.getSortValue(), equalTo(SortValue.from(1.2054632268631617E7)));
assertThat(result.getMetricValue(), equalTo(3.0d));
}
public void testSortByGeoDistanceAscending() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new GeoDistanceSortBuilder("s", 35.7796, 78.6382).order(SortOrder.ASC));
InternalTopMetrics result = collectFromNewYorkAndLA(builder);
assertThat(result.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(result.getSortValue(), equalTo(SortValue.from(1.1062351376961706E7)));
assertThat(result.getMetricValue(), equalTo(2.0d));
}
public void testInsideTerms() throws IOException {
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC));
TermsAggregationBuilder terms = new TermsAggregationBuilder("terms", ValueType.DOUBLE).field("c").subAggregation(builder);
Terms result = (Terms) collect(terms, new MatchAllDocsQuery(), writer -> {
writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 1.0), doubleField("m", 2.0)));
writer.addDocument(Arrays.asList(doubleField("c", 1.0), doubleField("s", 2.0), doubleField("m", 3.0)));
writer.addDocument(Arrays.asList(doubleField("c", 2.0), doubleField("s", 4.0), doubleField("m", 9.0)));
},
numberFieldType(NumberType.DOUBLE, "c"), numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m"));
Terms.Bucket bucket1 = result.getBuckets().get(0);
assertThat(bucket1.getKey(), equalTo(1.0));
InternalTopMetrics top1 = bucket1.getAggregations().get("test");
assertThat(top1.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(top1.getSortValue(), equalTo(SortValue.from(1.0)));
assertThat(top1.getMetricValue(), equalTo(2.0d));
Terms.Bucket bucket2 = result.getBuckets().get(1);
assertThat(bucket2.getKey(), equalTo(2.0));
InternalTopMetrics top2 = bucket2.getAggregations().get("test");
assertThat(top2.getSortOrder(), equalTo(SortOrder.ASC));
assertThat(top2.getSortValue(), equalTo(SortValue.from(4.0)));
assertThat(top2.getMetricValue(), equalTo(9.0d));
}
public void testTonsOfBucketsTriggersBreaker() throws IOException {
// Build a "simple" circuit breaker that trips at 20k
CircuitBreakerService breaker = mock(CircuitBreakerService.class);
ByteSizeValue max = new ByteSizeValue(20, ByteSizeUnit.KB);
when(breaker.getBreaker(CircuitBreaker.REQUEST)).thenReturn(new NoopCircuitBreaker(CircuitBreaker.REQUEST) {
private long total = 0;
@Override
public double addEstimateBytesAndMaybeBreak(long bytes, String label) throws CircuitBreakingException {
logger.debug("Used {} grabbing {} for {}", total, bytes, label);
total += bytes;
if (total > max.getBytes()) {
throw new CircuitBreakingException("test error", bytes, max.getBytes(), Durability.TRANSIENT);
}
return total;
}
@Override
public long addWithoutBreaking(long bytes) {
logger.debug("Used {} grabbing {}", total, bytes);
total += bytes;
return total;
}
});
// Collect some buckets with it
try (Directory directory = newDirectory()) {
try (RandomIndexWriter writer = new RandomIndexWriter(random(), directory)) {
writer.addDocument(Arrays.asList(doubleField("s", 1.0), doubleField("m", 2.0)));
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
IndexSearcher indexSearcher = newSearcher(indexReader, false, false);
SearchContext searchContext = createSearchContext(indexSearcher, createIndexSettings(), new MatchAllDocsQuery(),
new MultiBucketConsumer(Integer.MAX_VALUE, breaker.getBreaker(CircuitBreaker.REQUEST)), breaker, doubleFields());
TopMetricsAggregationBuilder builder = simpleBuilder(new FieldSortBuilder("s").order(SortOrder.ASC));
Aggregator aggregator = builder.build(searchContext.getQueryShardContext(), null)
.create(searchContext, null, true);
aggregator.preCollection();
assertThat(indexReader.leaves(), hasSize(1));
LeafBucketCollector leaf = aggregator.getLeafCollector(indexReader.leaves().get(0));
/*
* Collect some number of buckets that we *know* fit in the
* breaker. The number of buckets feels fairly arbitrary but
* it comes from:
* budget = 15k = 20k - 5k for the "default weight" of ever agg
* The 922th bucket causes a resize which requests puts the total
* just over 15k.O
*/
int bucketThatBreaks = 922;
for (int b = 0; b < bucketThatBreaks; b++) {
try {
leaf.collect(0, b);
} catch (Exception e) {
throw new RuntimeException("ADFADFS " + b, e);
}
}
CircuitBreakingException e = expectThrows(CircuitBreakingException.class, () -> leaf.collect(0, bucketThatBreaks));
assertThat(e.getMessage(), equalTo("test error"));
assertThat(e.getByteLimit(), equalTo(max.getBytes()));
assertThat(e.getBytesWanted(), equalTo(16440L));
}
}
}
private TopMetricsAggregationBuilder simpleBuilder(SortBuilder<?> sort) {
return new TopMetricsAggregationBuilder("test", singletonList(sort),
new MultiValuesSourceFieldConfig.Builder().setFieldName("m").build());
}
private TopMetricsAggregationBuilder simpleBuilder() {
return simpleBuilder(new FieldSortBuilder("s"));
}
/**
* Build a query that matches all documents but adds 1 to the score of
* all docs that contain "foo". We use this instead of a term query
* directly because the score that can come from the term query can
* very quite a bit but this is super predictable.
*/
private Query boostFoo() {
return new BooleanQuery.Builder()
.add(new BooleanClause(new MatchAllDocsQuery(), Occur.MUST))
.add(new BooleanClause(new BoostQuery(new ConstantScoreQuery(new TermQuery(new Term("s", "foo"))), 1.0f), Occur.SHOULD))
.build();
}
private MappedFieldType[] doubleFields() {
return new MappedFieldType[] {numberFieldType(NumberType.DOUBLE, "s"), numberFieldType(NumberType.DOUBLE, "m")};
}
private MappedFieldType[] floatAndDoubleField() {
return new MappedFieldType[] {numberFieldType(NumberType.FLOAT, "s"), numberFieldType(NumberType.DOUBLE, "m")};
}
private MappedFieldType[] longAndDoubleField() {
return new MappedFieldType[] {numberFieldType(NumberType.LONG, "s"), numberFieldType(NumberType.DOUBLE, "m")};
}
private MappedFieldType[] textAndDoubleField() {
return new MappedFieldType[] {textFieldType("s"), numberFieldType(NumberType.DOUBLE, "m")};
}
private MappedFieldType[] geoPointAndDoubleField() {
return new MappedFieldType[] {geoPointFieldType("s"), numberFieldType(NumberType.DOUBLE, "m")};
}
private MappedFieldType numberFieldType(NumberType numberType, String name) {
NumberFieldMapper.NumberFieldType type = new NumberFieldMapper.NumberFieldType(numberType);
type.setName(name);
return type;
}
private MappedFieldType textFieldType(String name) {
TextFieldMapper.TextFieldType type = new TextFieldMapper.TextFieldType();
type.setName(name);
return type;
}
private MappedFieldType geoPointFieldType(String name) {
GeoPointFieldMapper.GeoPointFieldType type = new GeoPointFieldMapper.GeoPointFieldType();
type.setName(name);
type.setHasDocValues(true);
return type;
}
private IndexableField doubleField(String name, double value) {
return new SortedNumericDocValuesField(name, NumericUtils.doubleToSortableLong(value));
}
private IndexableField floatField(String name, float value) {
return new SortedNumericDocValuesField(name, NumericUtils.floatToSortableInt(value));
}
private IndexableField longField(String name, long value) {
return new SortedNumericDocValuesField(name, value);
}
private IndexableField textField(String name, String value) {
return new Field(name, value, textFieldType(name));
}
private IndexableField geoPointField(String name, double lat, double lon) {
return new LatLonDocValuesField(name, lat, lon);
}
private InternalTopMetrics collect(TopMetricsAggregationBuilder builder, Query query,
CheckedConsumer<RandomIndexWriter, IOException> buildIndex, MappedFieldType... fields) throws IOException {
InternalTopMetrics result = (InternalTopMetrics) collect((AggregationBuilder) builder, query, buildIndex, fields);
assertThat(result.getSortFormat(), equalTo(DocValueFormat.RAW));
assertThat(result.getMetricName(), equalTo(builder.getMetricField().getFieldName()));
return result;
}
private InternalAggregation collect(AggregationBuilder builder, Query query,
CheckedConsumer<RandomIndexWriter, IOException> buildIndex, MappedFieldType... fields) throws IOException {
try (Directory directory = newDirectory()) {
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
buildIndex.accept(indexWriter);
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
return search(indexSearcher, query, builder, fields);
}
}
}
/**
* Builds a simple script that reads the "s" field.
*/
private ScriptSortBuilder scriptSortOnS() {
return new ScriptSortBuilder(new Script(ScriptType.INLINE, MockScriptEngine.NAME, "s", emptyMap()), ScriptSortType.NUMBER);
}
@Override
protected ScriptService getMockScriptService() {
MockScriptEngine scriptEngine = new MockScriptEngine(MockScriptEngine.NAME,
singletonMap("s", args -> {
@SuppressWarnings("unchecked")
Map<String, ScriptDocValues<?>> fields = (Map<String, ScriptDocValues<?>>) args.get("doc");
ScriptDocValues.Doubles field = (ScriptDocValues.Doubles) fields.get("s");
return field.getValue();
}),
emptyMap());
Map<String, ScriptEngine> engines = singletonMap(scriptEngine.getType(), scriptEngine);
return new ScriptService(Settings.EMPTY, engines, ScriptModule.CORE_CONTEXTS);
}
}

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.core.analytics.action;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.nodes.BaseNodeRequest;
@ -109,32 +110,46 @@ public class AnalyticsStatsAction extends ActionType<AnalyticsStatsAction.Respon
}
public static class NodeResponse extends BaseNodeResponse implements ToXContentObject {
static ParseField CUMULATIVE_CARDINALITY_USAGE = new ParseField("cumulative_cardinality_usage");
private long cumulativeCardinalityUsage;
static final ParseField CUMULATIVE_CARDINALITY_USAGE = new ParseField("cumulative_cardinality_usage");
static final ParseField TOP_METRICS_USAGE = new ParseField("top_metrics_usage");
public NodeResponse(StreamInput in) throws IOException {
super(in);
cumulativeCardinalityUsage = in.readZLong();
}
private long cumulativeCardinalityUsage;
private long topMetricsUsage;
public NodeResponse(DiscoveryNode node) {
super(node);
}
public void setCumulativeCardinalityUsage(long cumulativeCardinalityUsage) {
this.cumulativeCardinalityUsage = cumulativeCardinalityUsage;
public NodeResponse(StreamInput in) throws IOException {
super(in);
cumulativeCardinalityUsage = in.readZLong();
if (in.getVersion().onOrAfter(Version.V_7_7_0)) {
topMetricsUsage = in.readVLong();
}
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeZLong(cumulativeCardinalityUsage);
if (out.getVersion().onOrAfter(Version.V_7_7_0)) {
out.writeVLong(topMetricsUsage);
}
}
public void setCumulativeCardinalityUsage(long cumulativeCardinalityUsage) {
this.cumulativeCardinalityUsage = cumulativeCardinalityUsage;
}
public void setTopMetricsUsage(long topMetricsUsage) {
this.topMetricsUsage = topMetricsUsage;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(CUMULATIVE_CARDINALITY_USAGE.getPreferredName(), cumulativeCardinalityUsage);
builder.field(TOP_METRICS_USAGE.getPreferredName(), topMetricsUsage);
builder.endObject();
return builder;
}

View File

@ -23,6 +23,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
@ -34,6 +35,7 @@ import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource;
import org.elasticsearch.index.fielddata.plain.AbstractAtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
@ -49,7 +51,10 @@ import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.StringFieldType;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.Iterator;
@ -385,6 +390,12 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper {
return new SortField(getFieldName(), source, reverse);
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("only supported on numeric fields");
}
@Override
public void clear() {
delegate.clear();

View File

@ -0,0 +1,371 @@
---
"sort by long field":
- do:
bulk:
index: test
refresh: true
body:
- '{"index": {}}'
- '{"s": 1, "v": 3.1415}'
- '{"index": {}}'
- '{"s": 2, "v": 1}'
- '{"index": {}}'
- '{"s": 3, "v": 2.71828}'
- do:
search:
size: 0
body:
aggs:
tm:
top_metrics:
metric:
field: v
sort:
s: desc
- match: { aggregations.tm.top.0.metrics.v: 2.718280076980591 }
- match: { aggregations.tm.top.0.sort: [3] }
- do:
search:
body:
aggs:
tm:
top_metrics:
metric:
field: v
sort:
s: asc
- match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 }
- match: { aggregations.tm.top.0.sort: [1] }
- do:
search:
body:
aggs:
tm:
top_metrics:
metric:
field: v
sort:
s:
order: asc
numeric_type: date
- match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 }
- match: { aggregations.tm.top.0.sort: [1] }
---
"sort by double field":
- do:
indices.create:
index: test
body:
mappings:
properties:
s:
type: scaled_float
scaling_factor: 10
- do:
bulk:
index: test
refresh: true
body:
- '{"index": {}}'
- '{"s": 1.0, "v": 3.1415}'
- '{"index": {}}'
- '{"s": 2.0, "v": 1}'
- '{"index": {}}'
- '{"s": 3.0, "v": 2.71828}'
- do:
search:
size: 0
body:
aggs:
tm:
top_metrics:
metric:
field: v
sort:
s: desc
- match: { aggregations.tm.top.0.metrics.v: 2.718280076980591 }
- match: { aggregations.tm.top.0.sort: [3.0] }
- do:
search:
body:
aggs:
tm:
top_metrics:
metric:
field: v
sort:
s: asc
- match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 }
- match: { aggregations.tm.top.0.sort: [1.0] }
---
"sort by scaled float field":
- do:
indices.create:
index: test
body:
mappings:
properties:
s:
type: scaled_float
scaling_factor: 10
- do:
bulk:
index: test
refresh: true
body:
- '{"index": {}}'
- '{"s": 1, "v": 3.1415}'
- '{"index": {}}'
- '{"s": 2, "v": 1}'
- '{"index": {}}'
- '{"s": 3, "v": 2.71828}'
- do:
search:
size: 0
body:
aggs:
tm:
top_metrics:
metric:
field: v
sort:
s: desc
- match: { aggregations.tm.top.0.metrics.v: 2.718280076980591 }
- match: { aggregations.tm.top.0.sort: [3.0] }
- do:
search:
body:
aggs:
tm:
top_metrics:
metric:
field: v
sort:
s: asc
- match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 }
- match: { aggregations.tm.top.0.sort: [1.0] }
---
"sort by keyword field fails":
- do:
bulk:
index: test
refresh: true
body:
- '{"index": {}}'
- '{"s": "cow", "v": 3.1415}'
- do:
catch: bad_request
search:
size: 0
body:
aggs:
tm:
top_metrics:
metric:
field: v
sort: s.keyword
- match: { error.root_cause.0.reason: "error building sort for field [s.keyword] of type [keyword] in index [test]: only supported on numeric fields" }
---
"sort by score":
- do:
bulk:
index: test
refresh: true
body:
- '{"index": {}}'
- '{"s": "big cat", "v": 3.1415}'
- '{"index": {}}'
- '{"s": "cat", "v": 1}'
- '{"index": {}}'
- '{"s": "the small dog", "v": 2.71828}'
- do:
search:
size: 0
body:
query:
match:
s: big cat
aggs:
tm:
top_metrics:
metric:
field: v
sort: _score
- match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 }
- match: { aggregations.tm.top.0.sort: [1.450832724571228] }
---
"sort by numeric script":
- do:
bulk:
index: test
refresh: true
body:
- '{"index": {}}'
- '{"s": "cow", "v": 3.1415}'
- do:
search:
size: 0
body:
aggs:
tm:
top_metrics:
metric:
field: v
sort:
_script:
type: number
script:
source: doc['s.keyword'].value.length()
- match: { aggregations.tm.top.0.metrics.v: 3.1414999961853027 }
- match: { aggregations.tm.top.0.sort: [3.0] }
---
"sort by string script fails":
- do:
bulk:
index: test
refresh: true
body:
- '{"index": {}}'
- '{"s": "cow", "v": 3.1415}'
- do:
catch: bad_request
search:
size: 0
body:
aggs:
tm:
top_metrics:
metric:
field: v
sort:
_script:
type: string
script:
source: doc['s'].value
- match: { error.root_cause.0.reason: "error building sort for [_script]: script sorting only supported on [numeric] scripts but was [string]" }
---
"sort by geo_distance":
- do:
indices.create:
index: test
body:
mappings:
properties:
location:
type: geo_point
- do:
bulk:
index: test
refresh: true
body:
- '{"index": {}}'
- '{"location": {"lat" : 40.7128, "lon" : -74.0060}, "name": "New York", "population": 8623000}'
- '{"index": {}}'
- '{"location": {"lat" : 34.0522, "lon" : -118.2437}, "name": "Los Angeles", "population": 4000000}'
- '{"index": {}}'
- '{"location": {"lat" : 41.8781, "lon" : -87.6298}, "name": "Chicago", "population": 2716000}'
- do:
search:
size: 0
body:
aggs:
pop:
top_metrics:
metric:
field: population
sort:
_geo_distance:
location: "35.7796, -78.6382"
- match: { aggregations.pop.top.0.metrics.population: 8623000 }
- match: { aggregations.pop.top.0.sort: [681335.0456554737] }
---
"inside terms":
- do:
indices.create:
index: test
body:
mappings:
properties:
ip:
type: ip
date:
type: date
- do:
bulk:
index: test
refresh: true
body:
- '{"index": {}}'
- '{"ip": "192.168.0.1", "date": "2020-01-01T01:01:01", "v": 1}'
- '{"index": {}}'
- '{"ip": "192.168.0.1", "date": "2020-01-01T02:01:01", "v": 2}'
- '{"index": {}}'
- '{"ip": "192.168.0.2", "date": "2020-01-01T02:01:01", "v": 3}'
- do:
search:
size: 0
body:
aggs:
ip:
terms:
field: ip
aggs:
tm:
top_metrics:
metric:
field: v
sort:
date: desc
- length: { aggregations.ip.buckets: 2 }
- match: { aggregations.ip.buckets.0.key: 192.168.0.1 }
- match: { aggregations.ip.buckets.0.tm.top.0.metrics.v: 2 }
- match: { aggregations.ip.buckets.0.tm.top.0.sort: ['2020-01-01T02:01:01.000Z'] }
- match: { aggregations.ip.buckets.1.key: 192.168.0.2 }
- match: { aggregations.ip.buckets.1.tm.top.0.metrics.v: 3 }
- match: { aggregations.ip.buckets.1.tm.top.0.sort: ['2020-01-01T02:01:01.000Z'] }
- do:
search:
size: 0
body:
aggs:
ip:
terms:
field: ip
order:
tm.v: desc
aggs:
tm:
top_metrics:
metric:
field: v
sort:
date: desc
- length: { aggregations.ip.buckets: 2 }
- match: { aggregations.ip.buckets.0.key: 192.168.0.2 }
- match: { aggregations.ip.buckets.0.tm.top.0.metrics.v: 3 }
- match: { aggregations.ip.buckets.0.tm.top.0.sort: ['2020-01-01T02:01:01.000Z'] }
- match: { aggregations.ip.buckets.1.key: 192.168.0.1 }
- match: { aggregations.ip.buckets.1.tm.top.0.metrics.v: 2 }
- match: { aggregations.ip.buckets.1.tm.top.0.sort: ['2020-01-01T02:01:01.000Z'] }

View File

@ -74,7 +74,8 @@ public final class Aggregations {
"stats", // https://github.com/elastic/elasticsearch/issues/51925
"string_stats", // https://github.com/elastic/elasticsearch/issues/51925
"terms", // https://github.com/elastic/elasticsearch/issues/51073
"top_hits"
"top_hits",
"top_metrics" // https://github.com/elastic/elasticsearch/issues/52236
);
private Aggregations() {}

View File

@ -10,6 +10,7 @@ package org.elasticsearch.xpack.vectors.query;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.IndexFieldData;
@ -19,7 +20,10 @@ import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.SortOrder;
public class VectorDVIndexFieldData extends DocValuesIndexFieldData implements IndexFieldData<VectorDVAtomicFieldData> {
@ -35,6 +39,12 @@ public class VectorDVIndexFieldData extends DocValuesIndexFieldData implements I
throw new IllegalArgumentException("can't sort on the vector field");
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode, Nested nested,
SortOrder sortOrder, DocValueFormat format) {
throw new IllegalArgumentException("only supported on numeric fields");
}
@Override
public VectorDVAtomicFieldData load(LeafReaderContext context) {
return new VectorDVAtomicFieldData(context.reader(), fieldName, isDense);