metaData) throws IOException {
super(name, factories, context, parent, pipelineAggregators, metaData);
this.bucketCountThresholds = bucketCountThresholds;
@@ -186,7 +193,7 @@ public abstract class TermsAggregator extends BucketsAggregator {
aggsUsedForSorting.add(path.resolveTopmostAggregator(this));
} else if (order instanceof CompoundOrder) {
CompoundOrder compoundOrder = (CompoundOrder) order;
- for (Terms.Order orderElement : compoundOrder.orderElements()) {
+ for (BucketOrder orderElement : compoundOrder.orderElements()) {
if (orderElement instanceof Aggregation) {
AggregationPath path = ((Aggregation) orderElement).path();
aggsUsedForSorting.add(path.resolveTopmostAggregator(this));
@@ -195,6 +202,58 @@ public abstract class TermsAggregator extends BucketsAggregator {
}
}
+ /**
+ * Internal Optimization for ordering {@link InternalTerms.Bucket}s by a sub aggregation.
+ *
+ * in this phase, if the order is based on sub-aggregations, we need to use a different comparator
+ * to avoid constructing buckets for ordering purposes (we can potentially have a lot of buckets and building
+ * them will cause loads of redundant object constructions). The "special" comparators here will fetch the
+ * sub aggregation values directly from the sub aggregators bypassing bucket creation. Note that the comparator
+ * attached to the order will still be used in the reduce phase of the Aggregation.
+ *
+ * @param path determines which sub aggregation to use for ordering.
+ * @param asc {@code true} for ascending order, {@code false} for descending.
+ * @return {@code Comparator} to order {@link InternalTerms.Bucket}s in the desired order.
+ */
+ public Comparator bucketComparator(AggregationPath path, boolean asc) {
+
+ final Aggregator aggregator = path.resolveAggregator(this);
+ final String key = path.lastPathElement().key;
+
+ if (aggregator instanceof SingleBucketAggregator) {
+ assert key == null : "this should be picked up before the aggregation is executed - on validate";
+ return (b1, b2) -> {
+ int mul = asc ? 1 : -1;
+ int v1 = ((SingleBucketAggregator) aggregator).bucketDocCount(((InternalTerms.Bucket) b1).bucketOrd);
+ int v2 = ((SingleBucketAggregator) aggregator).bucketDocCount(((InternalTerms.Bucket) b2).bucketOrd);
+ return mul * (v1 - v2);
+ };
+ }
+
+ // with only support single-bucket aggregators
+ assert !(aggregator instanceof BucketsAggregator) : "this should be picked up before the aggregation is executed - on validate";
+
+ if (aggregator instanceof NumericMetricsAggregator.MultiValue) {
+ assert key != null : "this should be picked up before the aggregation is executed - on validate";
+ return (b1, b2) -> {
+ double v1 = ((NumericMetricsAggregator.MultiValue) aggregator).metric(key, ((InternalTerms.Bucket) b1).bucketOrd);
+ double v2 = ((NumericMetricsAggregator.MultiValue) aggregator).metric(key, ((InternalTerms.Bucket) b2).bucketOrd);
+ // some metrics may return NaN (eg. avg, variance, etc...) in which case we'd like to push all of those to
+ // the bottom
+ return Comparators.compareDiscardNaN(v1, v2, asc);
+ };
+ }
+
+ // single-value metrics agg
+ return (b1, b2) -> {
+ double v1 = ((NumericMetricsAggregator.SingleValue) aggregator).metric(((InternalTerms.Bucket) b1).bucketOrd);
+ double v2 = ((NumericMetricsAggregator.SingleValue) aggregator).metric(((InternalTerms.Bucket) b2).bucketOrd);
+ // some metrics may return NaN (eg. avg, variance, etc...) in which case we'd like to push all of those to
+ // the bottom
+ return Comparators.compareDiscardNaN(v1, v2, asc);
+ };
+ }
+
@Override
protected boolean shouldDefer(Aggregator aggregator) {
return collectMode == SubAggCollectionMode.BREADTH_FIRST
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java
index 10fef554555..9a06dfe66f5 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorFactory.java
@@ -33,6 +33,8 @@ import org.elasticsearch.search.aggregations.bucket.BucketUtils;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
+import org.elasticsearch.search.aggregations.BucketOrder;
+import org.elasticsearch.search.aggregations.InternalOrder;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
@@ -44,14 +46,14 @@ import java.util.Map;
public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory {
- private final Terms.Order order;
+ private final BucketOrder order;
private final IncludeExclude includeExclude;
private final String executionHint;
private final SubAggCollectionMode collectMode;
private final TermsAggregator.BucketCountThresholds bucketCountThresholds;
private boolean showTermDocCountError;
- public TermsAggregatorFactory(String name, ValuesSourceConfig config, Terms.Order order,
+ public TermsAggregatorFactory(String name, ValuesSourceConfig config, BucketOrder order,
IncludeExclude includeExclude, String executionHint, SubAggCollectionMode collectMode,
TermsAggregator.BucketCountThresholds bucketCountThresholds, boolean showTermDocCountError, SearchContext context,
AggregatorFactory> parent, AggregatorFactories.Builder subFactoriesBuilder, Map metaData) throws IOException {
@@ -90,7 +92,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData)
@@ -242,7 +244,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData)
@@ -262,7 +264,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData)
@@ -281,7 +283,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData)
@@ -319,7 +321,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData)
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java
index 6aaccd22d72..595991dac06 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/UnmappedTerms.java
@@ -25,6 +25,7 @@ import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
+import org.elasticsearch.search.aggregations.BucketOrder;
import java.io.IOException;
import java.util.Collections;
@@ -50,7 +51,7 @@ public class UnmappedTerms extends InternalTerms pipelineAggregators, Map metaData) {
super(name, order, requiredSize, minDocCount, pipelineAggregators, metaData);
}
diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java
index 746e0e5e161..995381373ab 100644
--- a/core/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java
+++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/AggregationPath.java
@@ -33,7 +33,7 @@ import java.util.ArrayList;
import java.util.List;
/**
- * A path that can be used to sort/order buckets (in some multi-bucket aggregations, eg terms & histogram) based on
+ * A path that can be used to sort/order buckets (in some multi-bucket aggregations, e.g. terms & histogram) based on
* sub-aggregations. The path may point to either a single-bucket aggregation or a metrics aggregation. If the path
* points to a single-bucket aggregation, the sort will be applied based on the {@code doc_count} of the bucket. If this
* path points to a metrics aggregation, if it's a single-value metrics (eg. avg, max, min, etc..) the sort will be
@@ -281,14 +281,15 @@ public class AggregationPath {
/**
* Validates this path over the given aggregator as a point of reference.
*
- * @param root The point of reference of this path
+ * @param root The point of reference of this path
+ * @throws AggregationExecutionException on validation error
*/
- public void validate(Aggregator root) {
+ public void validate(Aggregator root) throws AggregationExecutionException {
Aggregator aggregator = root;
for (int i = 0; i < pathElements.size(); i++) {
aggregator = aggregator.subAggregator(pathElements.get(i).name);
if (aggregator == null) {
- throw new AggregationExecutionException("Invalid term-aggregator order path [" + this + "]. Unknown aggregation ["
+ throw new AggregationExecutionException("Invalid aggregator order path [" + this + "]. Unknown aggregation ["
+ pathElements.get(i).name + "]");
}
if (i < pathElements.size() - 1) {
@@ -296,16 +297,16 @@ public class AggregationPath {
// we're in the middle of the path, so the aggregator can only be a single-bucket aggregator
if (!(aggregator instanceof SingleBucketAggregator)) {
- throw new AggregationExecutionException("Invalid terms aggregation order path [" + this +
- "]. Terms buckets can only be sorted on a sub-aggregator path " +
+ throw new AggregationExecutionException("Invalid aggregation order path [" + this +
+ "]. Buckets can only be sorted on a sub-aggregator path " +
"that is built out of zero or more single-bucket aggregations within the path and a final " +
"single-bucket or a metrics aggregation at the path end. Sub-path [" +
subPath(0, i + 1) + "] points to non single-bucket aggregation");
}
if (pathElements.get(i).key != null) {
- throw new AggregationExecutionException("Invalid terms aggregation order path [" + this +
- "]. Terms buckets can only be sorted on a sub-aggregator path " +
+ throw new AggregationExecutionException("Invalid aggregation order path [" + this +
+ "]. Buckets can only be sorted on a sub-aggregator path " +
"that is built out of zero or more single-bucket aggregations within the path and a " +
"final single-bucket or a metrics aggregation at the path end. Sub-path [" +
subPath(0, i + 1) + "] points to non single-bucket aggregation");
@@ -314,8 +315,8 @@ public class AggregationPath {
}
boolean singleBucket = aggregator instanceof SingleBucketAggregator;
if (!singleBucket && !(aggregator instanceof NumericMetricsAggregator)) {
- throw new AggregationExecutionException("Invalid terms aggregation order path [" + this +
- "]. Terms buckets can only be sorted on a sub-aggregator path " +
+ throw new AggregationExecutionException("Invalid aggregation order path [" + this +
+ "]. Buckets can only be sorted on a sub-aggregator path " +
"that is built out of zero or more single-bucket aggregations within the path and a final " +
"single-bucket or a metrics aggregation at the path end.");
}
@@ -324,7 +325,7 @@ public class AggregationPath {
if (singleBucket) {
if (lastToken.key != null && !"doc_count".equals(lastToken.key)) {
- throw new AggregationExecutionException("Invalid terms aggregation order path [" + this +
+ throw new AggregationExecutionException("Invalid aggregation order path [" + this +
"]. Ordering on a single-bucket aggregation can only be done on its doc_count. " +
"Either drop the key (a la \"" + lastToken.name + "\") or change it to \"doc_count\" (a la \"" + lastToken.name + ".doc_count\")");
}
@@ -333,7 +334,7 @@ public class AggregationPath {
if (aggregator instanceof NumericMetricsAggregator.SingleValue) {
if (lastToken.key != null && !"value".equals(lastToken.key)) {
- throw new AggregationExecutionException("Invalid terms aggregation order path [" + this +
+ throw new AggregationExecutionException("Invalid aggregation order path [" + this +
"]. Ordering on a single-value metrics aggregation can only be done on its value. " +
"Either drop the key (a la \"" + lastToken.name + "\") or change it to \"value\" (a la \"" + lastToken.name + ".value\")");
}
@@ -342,12 +343,12 @@ public class AggregationPath {
// the aggregator must be of a multi-value metrics type
if (lastToken.key == null) {
- throw new AggregationExecutionException("Invalid terms aggregation order path [" + this +
+ throw new AggregationExecutionException("Invalid aggregation order path [" + this +
"]. When ordering on a multi-value metrics aggregation a metric name must be specified");
}
if (!((NumericMetricsAggregator.MultiValue) aggregator).hasMetric(lastToken.key)) {
- throw new AggregationExecutionException("Invalid terms aggregation order path [" + this +
+ throw new AggregationExecutionException("Invalid aggregation order path [" + this +
"]. Unknown metric name [" + lastToken.key + "] on multi-value metrics aggregation [" + lastToken.name + "]");
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java
index c1155bdfbd6..c8eee3e91ef 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java
@@ -19,19 +19,31 @@
package org.elasticsearch.search.fetch.subphase;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.CollectionTerminatedException;
+import org.apache.lucene.search.Collector;
+import org.apache.lucene.search.ConjunctionDISI;
+import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.DocValuesTermsQuery;
+import org.apache.lucene.search.LeafCollector;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
+import org.apache.lucene.search.Scorer;
+import org.apache.lucene.search.ScorerSupplier;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector;
import org.apache.lucene.search.TopScoreDocCollector;
+import org.apache.lucene.search.TotalHitCountCollector;
+import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.join.ParentChildrenBlockJoinQuery;
+import org.apache.lucene.util.Bits;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.DocumentMapper;
@@ -40,11 +52,11 @@ import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHitField;
-import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.SubSearchContext;
import java.io.IOException;
+import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
@@ -57,7 +69,7 @@ public final class InnerHitsContext {
this.innerHits = new HashMap<>();
}
- public InnerHitsContext(Map innerHits) {
+ InnerHitsContext(Map innerHits) {
this.innerHits = Objects.requireNonNull(innerHits);
}
@@ -77,14 +89,16 @@ public final class InnerHitsContext {
public abstract static class BaseInnerHits extends SubSearchContext {
private final String name;
+ final SearchContext context;
private InnerHitsContext childInnerHits;
- protected BaseInnerHits(String name, SearchContext context) {
+ BaseInnerHits(String name, SearchContext context) {
super(context);
this.name = name;
+ this.context = context;
}
- public abstract TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException;
+ public abstract TopDocs[] topDocs(SearchHit[] hits) throws IOException;
public String getName() {
return name;
@@ -98,6 +112,12 @@ public final class InnerHitsContext {
public void setChildInnerHits(Map childInnerHits) {
this.childInnerHits = new InnerHitsContext(childInnerHits);
}
+
+ Weight createInnerHitQueryWeight() throws IOException {
+ final boolean needsScores = size() != 0 && (sort() == null || sort().sort.needsScores());
+ return context.searcher().createNormalizedWeight(query(), needsScores);
+ }
+
}
public static final class NestedInnerHits extends BaseInnerHits {
@@ -112,35 +132,48 @@ public final class InnerHitsContext {
}
@Override
- public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException {
- Query rawParentFilter;
- if (parentObjectMapper == null) {
- rawParentFilter = Queries.newNonNestedFilter();
- } else {
- rawParentFilter = parentObjectMapper.nestedTypeFilter();
- }
- BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter);
- Query childFilter = childObjectMapper.nestedTypeFilter();
- int parentDocId = hitContext.readerContext().docBase + hitContext.docId();
- Query q = Queries.filtered(query(), new ParentChildrenBlockJoinQuery(parentFilter, childFilter, parentDocId));
-
- if (size() == 0) {
- return new TopDocs(context.searcher().count(q), Lucene.EMPTY_SCORE_DOCS, 0);
- } else {
- int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
- TopDocsCollector> topDocsCollector;
- if (sort() != null) {
- topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores());
+ public TopDocs[] topDocs(SearchHit[] hits) throws IOException {
+ Weight innerHitQueryWeight = createInnerHitQueryWeight();
+ TopDocs[] result = new TopDocs[hits.length];
+ for (int i = 0; i < hits.length; i++) {
+ SearchHit hit = hits[i];
+ Query rawParentFilter;
+ if (parentObjectMapper == null) {
+ rawParentFilter = Queries.newNonNestedFilter();
} else {
- topDocsCollector = TopScoreDocCollector.create(topN);
+ rawParentFilter = parentObjectMapper.nestedTypeFilter();
}
- try {
- context.searcher().search(q, topDocsCollector);
- } finally {
- clearReleasables(Lifetime.COLLECTION);
+
+ int parentDocId = hit.docId();
+ final int readerIndex = ReaderUtil.subIndex(parentDocId, searcher().getIndexReader().leaves());
+ // With nested inner hits the nested docs are always in the same segement, so need to use the other segments
+ LeafReaderContext ctx = searcher().getIndexReader().leaves().get(readerIndex);
+
+ Query childFilter = childObjectMapper.nestedTypeFilter();
+ BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter);
+ Query q = new ParentChildrenBlockJoinQuery(parentFilter, childFilter, parentDocId);
+ Weight weight = context.searcher().createNormalizedWeight(q, false);
+ if (size() == 0) {
+ TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
+ intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx);
+ result[i] = new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0);
+ } else {
+ int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
+ TopDocsCollector> topDocsCollector;
+ if (sort() != null) {
+ topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores());
+ } else {
+ topDocsCollector = TopScoreDocCollector.create(topN);
+ }
+ try {
+ intersect(weight, innerHitQueryWeight, topDocsCollector, ctx);
+ } finally {
+ clearReleasables(Lifetime.COLLECTION);
+ }
+ result[i] = topDocsCollector.topDocs(from(), size());
}
- return topDocsCollector.topDocs(from(), size());
}
+ return result;
}
}
@@ -156,53 +189,65 @@ public final class InnerHitsContext {
}
@Override
- public TopDocs topDocs(SearchContext context, FetchSubPhase.HitContext hitContext) throws IOException {
- final Query hitQuery;
- if (isParentHit(hitContext.hit())) {
- String field = ParentFieldMapper.joinField(hitContext.hit().getType());
- hitQuery = new DocValuesTermsQuery(field, hitContext.hit().getId());
- } else if (isChildHit(hitContext.hit())) {
- DocumentMapper hitDocumentMapper = mapperService.documentMapper(hitContext.hit().getType());
- final String parentType = hitDocumentMapper.parentFieldMapper().type();
- SearchHitField parentField = hitContext.hit().field(ParentFieldMapper.NAME);
- if (parentField == null) {
- throw new IllegalStateException("All children must have a _parent");
- }
- Term uidTerm = context.mapperService().createUidTerm(parentType, parentField.getValue());
- if (uidTerm == null) {
- hitQuery = new MatchNoDocsQuery("Missing type: " + parentType);
+ public TopDocs[] topDocs(SearchHit[] hits) throws IOException {
+ Weight innerHitQueryWeight = createInnerHitQueryWeight();
+ TopDocs[] result = new TopDocs[hits.length];
+ for (int i = 0; i < hits.length; i++) {
+ SearchHit hit = hits[i];
+ final Query hitQuery;
+ if (isParentHit(hit)) {
+ String field = ParentFieldMapper.joinField(hit.getType());
+ hitQuery = new DocValuesTermsQuery(field, hit.getId());
+ } else if (isChildHit(hit)) {
+ DocumentMapper hitDocumentMapper = mapperService.documentMapper(hit.getType());
+ final String parentType = hitDocumentMapper.parentFieldMapper().type();
+ SearchHitField parentField = hit.field(ParentFieldMapper.NAME);
+ if (parentField == null) {
+ throw new IllegalStateException("All children must have a _parent");
+ }
+ Term uidTerm = context.mapperService().createUidTerm(parentType, parentField.getValue());
+ if (uidTerm == null) {
+ hitQuery = new MatchNoDocsQuery("Missing type: " + parentType);
+ } else {
+ hitQuery = new TermQuery(uidTerm);
+ }
} else {
- hitQuery = new TermQuery(uidTerm);
+ result[i] = Lucene.EMPTY_TOP_DOCS;
+ continue;
}
- } else {
- return Lucene.EMPTY_TOP_DOCS;
- }
- BooleanQuery q = new BooleanQuery.Builder()
- .add(query(), Occur.MUST)
- // Only include docs that have the current hit as parent
- .add(hitQuery, Occur.FILTER)
- // Only include docs that have this inner hits type
- .add(documentMapper.typeFilter(context.getQueryShardContext()), Occur.FILTER)
- .build();
- if (size() == 0) {
- final int count = context.searcher().count(q);
- return new TopDocs(count, Lucene.EMPTY_SCORE_DOCS, 0);
- } else {
- int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
- TopDocsCollector topDocsCollector;
- if (sort() != null) {
- topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores());
+ BooleanQuery q = new BooleanQuery.Builder()
+ // Only include docs that have the current hit as parent
+ .add(hitQuery, Occur.FILTER)
+ // Only include docs that have this inner hits type
+ .add(documentMapper.typeFilter(context.getQueryShardContext()), Occur.FILTER)
+ .build();
+ Weight weight = context.searcher().createNormalizedWeight(q, false);
+ if (size() == 0) {
+ TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
+ for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) {
+ intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx);
+ }
+ result[i] = new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0);
} else {
- topDocsCollector = TopScoreDocCollector.create(topN);
+ int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
+ TopDocsCollector topDocsCollector;
+ if (sort() != null) {
+ topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores());
+ } else {
+ topDocsCollector = TopScoreDocCollector.create(topN);
+ }
+ try {
+ for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) {
+ intersect(weight, innerHitQueryWeight, topDocsCollector, ctx);
+ }
+ } finally {
+ clearReleasables(Lifetime.COLLECTION);
+ }
+ result[i] = topDocsCollector.topDocs(from(), size());
}
- try {
- context.searcher().search(q, topDocsCollector);
- } finally {
- clearReleasables(Lifetime.COLLECTION);
- }
- return topDocsCollector.topDocs(from(), size());
}
+ return result;
}
private boolean isParentHit(SearchHit hit) {
@@ -214,4 +259,42 @@ public final class InnerHitsContext {
return documentMapper.type().equals(hitDocumentMapper.parentFieldMapper().type());
}
}
+
+ static void intersect(Weight weight, Weight innerHitQueryWeight, Collector collector, LeafReaderContext ctx) throws IOException {
+ ScorerSupplier scorerSupplier = weight.scorerSupplier(ctx);
+ if (scorerSupplier == null) {
+ return;
+ }
+ // use random access since this scorer will be consumed on a minority of documents
+ Scorer scorer = scorerSupplier.get(true);
+
+ ScorerSupplier innerHitQueryScorerSupplier = innerHitQueryWeight.scorerSupplier(ctx);
+ if (innerHitQueryScorerSupplier == null) {
+ return;
+ }
+ // use random access since this scorer will be consumed on a minority of documents
+ Scorer innerHitQueryScorer = innerHitQueryScorerSupplier.get(true);
+
+ final LeafCollector leafCollector;
+ try {
+ leafCollector = collector.getLeafCollector(ctx);
+ // Just setting the innerHitQueryScorer is ok, because that is the actual scoring part of the query
+ leafCollector.setScorer(innerHitQueryScorer);
+ } catch (CollectionTerminatedException e) {
+ return;
+ }
+
+ try {
+ Bits acceptDocs = ctx.reader().getLiveDocs();
+ DocIdSetIterator iterator = ConjunctionDISI.intersectIterators(Arrays.asList(innerHitQueryScorer.iterator(),
+ scorer.iterator()));
+ for (int docId = iterator.nextDoc(); docId < DocIdSetIterator.NO_MORE_DOCS; docId = iterator.nextDoc()) {
+ if (acceptDocs == null || acceptDocs.get(docId)) {
+ leafCollector.collect(docId);
+ }
+ }
+ } catch (CollectionTerminatedException e) {
+ // ignore and continue
+ }
+ }
}
diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java
index 48294bd82c5..90e1b5cf828 100644
--- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java
+++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsFetchSubPhase.java
@@ -22,12 +22,11 @@ package org.elasticsearch.search.fetch.subphase;
import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs;
-import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.search.SearchHit;
+import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.fetch.FetchPhase;
import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.FetchSubPhase;
-import org.elasticsearch.search.SearchHit;
-import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@@ -43,39 +42,42 @@ public final class InnerHitsFetchSubPhase implements FetchSubPhase {
}
@Override
- public void hitExecute(SearchContext context, HitContext hitContext) {
+ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException {
if ((context.innerHits() != null && context.innerHits().getInnerHits().size() > 0) == false) {
return;
}
- Map results = new HashMap<>();
+
for (Map.Entry entry : context.innerHits().getInnerHits().entrySet()) {
InnerHitsContext.BaseInnerHits innerHits = entry.getValue();
- TopDocs topDocs;
- try {
- topDocs = innerHits.topDocs(context, hitContext);
- } catch (IOException e) {
- throw ExceptionsHelper.convertToElastic(e);
- }
- innerHits.queryResult().topDocs(topDocs, innerHits.sort() == null ? null : innerHits.sort().formats);
- int[] docIdsToLoad = new int[topDocs.scoreDocs.length];
- for (int i = 0; i < topDocs.scoreDocs.length; i++) {
- docIdsToLoad[i] = topDocs.scoreDocs[i].doc;
- }
- innerHits.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
- fetchPhase.execute(innerHits);
- FetchSearchResult fetchResult = innerHits.fetchResult();
- SearchHit[] internalHits = fetchResult.fetchResult().hits().internalHits();
- for (int i = 0; i < internalHits.length; i++) {
- ScoreDoc scoreDoc = topDocs.scoreDocs[i];
- SearchHit searchHitFields = internalHits[i];
- searchHitFields.score(scoreDoc.score);
- if (scoreDoc instanceof FieldDoc) {
- FieldDoc fieldDoc = (FieldDoc) scoreDoc;
- searchHitFields.sortValues(fieldDoc.fields, innerHits.sort().formats);
+ TopDocs[] topDocs = innerHits.topDocs(hits);
+ for (int i = 0; i < hits.length; i++) {
+ SearchHit hit = hits[i];
+ TopDocs topDoc = topDocs[i];
+
+ Map results = hit.getInnerHits();
+ if (results == null) {
+ hit.setInnerHits(results = new HashMap<>());
}
+ innerHits.queryResult().topDocs(topDoc, innerHits.sort() == null ? null : innerHits.sort().formats);
+ int[] docIdsToLoad = new int[topDoc.scoreDocs.length];
+ for (int j = 0; j < topDoc.scoreDocs.length; j++) {
+ docIdsToLoad[j] = topDoc.scoreDocs[j].doc;
+ }
+ innerHits.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length);
+ fetchPhase.execute(innerHits);
+ FetchSearchResult fetchResult = innerHits.fetchResult();
+ SearchHit[] internalHits = fetchResult.fetchResult().hits().internalHits();
+ for (int j = 0; j < internalHits.length; j++) {
+ ScoreDoc scoreDoc = topDoc.scoreDocs[j];
+ SearchHit searchHitFields = internalHits[j];
+ searchHitFields.score(scoreDoc.score);
+ if (scoreDoc instanceof FieldDoc) {
+ FieldDoc fieldDoc = (FieldDoc) scoreDoc;
+ searchHitFields.sortValues(fieldDoc.fields, innerHits.sort().formats);
+ }
+ }
+ results.put(entry.getKey(), fetchResult.hits());
}
- results.put(entry.getKey(), fetchResult.hits());
}
- hitContext.hit().setInnerHits(results);
}
}
diff --git a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java
index f6faaaeea5c..24f2647167e 100644
--- a/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java
+++ b/core/src/main/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGenerator.java
@@ -58,7 +58,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
private final TermsEnum termsEnum;
private final IndexReader reader;
private final long dictSize;
- private final double logBase = 5;
+ private static final double LOG_BASE = 5;
private final long frequencyPlateau;
private final Analyzer preFilter;
private final Analyzer postFilter;
@@ -189,7 +189,7 @@ public final class DirectCandidateGenerator extends CandidateGenerator {
protected long thresholdFrequency(long termFrequency, long dictionarySize) {
if (termFrequency > 0) {
- return max(0, round(termFrequency * (log10(termFrequency - frequencyPlateau) * (1.0 / log10(logBase))) + 1));
+ return max(0, round(termFrequency * (log10(termFrequency - frequencyPlateau) * (1.0 / log10(LOG_BASE))) + 1));
}
return 0;
diff --git a/core/src/test/java/org/elasticsearch/bootstrap/BootstrapTests.java b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapTests.java
new file mode 100644
index 00000000000..b2fab7746fb
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/bootstrap/BootstrapTests.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.bootstrap;
+
+import org.elasticsearch.test.ESTestCase;
+
+public class BootstrapTests extends ESTestCase {
+
+ public void testConfigDeprecation() {
+ Bootstrap.checkConfigExtension(".json");
+ assertWarnings("elasticsearch.json is deprecated; rename your configuration file to elasticsearch.yaml");
+ Bootstrap.checkConfigExtension(".yml");
+ assertWarnings("elasticsearch.yml is deprecated; rename your configuration file to elasticsearch.yaml");
+ Bootstrap.checkConfigExtension(".yaml"); // no warnings, will be checked in @After
+ }
+}
diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java
index 6de9cbab620..cd6622368c9 100644
--- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java
@@ -19,6 +19,7 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.join.ScoreMode;
+import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
@@ -311,13 +312,16 @@ public class InnerHitBuilderTests extends ESTestCase {
}
innerHits.setScriptFields(new HashSet<>(scriptFields.values()));
FetchSourceContext randomFetchSourceContext;
- if (randomBoolean()) {
- randomFetchSourceContext = new FetchSourceContext(randomBoolean());
- } else {
+ int randomInt = randomIntBetween(0, 2);
+ if (randomInt == 0) {
+ randomFetchSourceContext = new FetchSourceContext(true, Strings.EMPTY_ARRAY, Strings.EMPTY_ARRAY);
+ } else if (randomInt == 1) {
randomFetchSourceContext = new FetchSourceContext(true,
generateRandomStringArray(12, 16, false),
generateRandomStringArray(12, 16, false)
);
+ } else {
+ randomFetchSourceContext = new FetchSourceContext(randomBoolean());
}
innerHits.setFetchSourceContext(randomFetchSourceContext);
if (randomBoolean()) {
diff --git a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java
index f2d66409e9e..c321ffa965a 100644
--- a/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java
@@ -32,6 +32,7 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
+import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
@@ -64,6 +65,8 @@ import static org.hamcrest.Matchers.instanceOf;
public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase {
+ private static final String[] SHUFFLE_PROTECTED_FIELDS = new String[]{Item.Field.DOC.getPreferredName()};
+
private static String[] randomFields;
private static Item[] randomLikeItems;
private static Item[] randomUnlikeItems;
@@ -204,6 +207,16 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase getObjectsHoldingArbitraryContent() {
//doc contains arbitrary content, anything can be added to it and no exception will be thrown
diff --git a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java
index 702e8b60917..90d11efb11c 100644
--- a/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryBuilderTests.java
@@ -83,6 +83,9 @@ import static org.hamcrest.Matchers.nullValue;
public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase {
+ private static final String[] SHUFFLE_PROTECTED_FIELDS = new String[] {Script.PARAMS_PARSE_FIELD.getPreferredName(),
+ ExponentialDecayFunctionBuilder.NAME, LinearDecayFunctionBuilder.NAME, GaussDecayFunctionBuilder.NAME};
+
@Override
protected Collection> getPlugins() {
return Collections.singleton(TestPlugin.class);
@@ -106,6 +109,12 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase getObjectsHoldingArbitraryContent() {
//script_score.script.params can contain arbitrary parameters. no error is expected when adding additional objects
@@ -218,7 +227,7 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase randomSearchFailures() {
+ private List randomSearchFailures() {
if (randomBoolean()) {
return emptyList();
}
@@ -68,7 +67,7 @@ public class BulkByScrollResponseTests extends ESTestCase {
shardId = randomInt();
nodeId = usually() ? randomAlphaOfLength(5) : null;
}
- return singletonList(new SearchFailure(new ElasticsearchException("foo"), index, shardId, nodeId));
+ return singletonList(new ScrollableHitSource.SearchFailure(new ElasticsearchException("foo"), index, shardId, nodeId));
}
private void assertResponseEquals(BulkByScrollResponse expected, BulkByScrollResponse actual) {
@@ -86,8 +85,8 @@ public class BulkByScrollResponseTests extends ESTestCase {
}
assertEquals(expected.getSearchFailures().size(), actual.getSearchFailures().size());
for (int i = 0; i < expected.getSearchFailures().size(); i++) {
- SearchFailure expectedFailure = expected.getSearchFailures().get(i);
- SearchFailure actualFailure = actual.getSearchFailures().get(i);
+ ScrollableHitSource.SearchFailure expectedFailure = expected.getSearchFailures().get(i);
+ ScrollableHitSource.SearchFailure actualFailure = actual.getSearchFailures().get(i);
assertEquals(expectedFailure.getIndex(), actualFailure.getIndex());
assertEquals(expectedFailure.getShardId(), actualFailure.getShardId());
assertEquals(expectedFailure.getNodeId(), actualFailure.getNodeId());
diff --git a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskStatusTests.java b/core/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java
similarity index 97%
rename from core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskStatusTests.java
rename to core/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java
index 503fe1db7cd..982198c8fee 100644
--- a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskStatusTests.java
+++ b/core/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.action.bulk.byscroll;
+package org.elasticsearch.index.reindex;
import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ElasticsearchException;
@@ -26,6 +26,7 @@ import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.test.ESTestCase;
+import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.List;
@@ -75,7 +76,7 @@ public class BulkByScrollTaskStatusTests extends ESTestCase {
assertEquals(expected.getReasonCancelled(), actual.getReasonCancelled());
assertEquals(expected.getThrottledUntil(), actual.getThrottledUntil());
if (version.onOrAfter(Version.V_5_1_1_UNRELEASED)) {
- assertThat(actual.getSliceStatuses(), hasSize(expected.getSliceStatuses().size()));
+ assertThat(actual.getSliceStatuses(), Matchers.hasSize(expected.getSliceStatuses().size()));
for (int i = 0; i < expected.getSliceStatuses().size(); i++) {
BulkByScrollTask.StatusOrException sliceStatus = expected.getSliceStatuses().get(i);
if (sliceStatus == null) {
diff --git a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskTests.java b/core/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java
similarity index 99%
rename from core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskTests.java
rename to core/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java
index ff0eae55520..f4d4ea790bc 100644
--- a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/BulkByScrollTaskTests.java
+++ b/core/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskTests.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.action.bulk.byscroll;
+package org.elasticsearch.index.reindex;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
diff --git a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/DeleteByQueryRequestTests.java b/core/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryRequestTests.java
similarity index 99%
rename from core/src/test/java/org/elasticsearch/action/bulk/byscroll/DeleteByQueryRequestTests.java
rename to core/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryRequestTests.java
index f5c00f63de9..8c84c8f3f56 100644
--- a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/DeleteByQueryRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/index/reindex/DeleteByQueryRequestTests.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.action.bulk.byscroll;
+package org.elasticsearch.index.reindex;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.search.SearchRequest;
diff --git a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/ParentBulkByScrollTaskTests.java b/core/src/test/java/org/elasticsearch/index/reindex/ParentBulkByScrollTaskTests.java
similarity index 99%
rename from core/src/test/java/org/elasticsearch/action/bulk/byscroll/ParentBulkByScrollTaskTests.java
rename to core/src/test/java/org/elasticsearch/index/reindex/ParentBulkByScrollTaskTests.java
index 715fcaaad54..6e2d44abed5 100644
--- a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/ParentBulkByScrollTaskTests.java
+++ b/core/src/test/java/org/elasticsearch/index/reindex/ParentBulkByScrollTaskTests.java
@@ -17,7 +17,7 @@
* under the License.
*/
-package org.elasticsearch.action.bulk.byscroll;
+package org.elasticsearch.index.reindex;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.test.ESTestCase;
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java b/core/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java
similarity index 97%
rename from modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java
rename to core/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java
index d1bb6f6096c..32b01237375 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/index/reindex/ReindexRequestTests.java
@@ -20,11 +20,9 @@
package org.elasticsearch.index.reindex;
import org.elasticsearch.action.ActionRequestValidationException;
-import org.elasticsearch.action.bulk.byscroll.AbstractBulkByScrollRequestTestCase;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.bytes.BytesArray;
-import org.elasticsearch.index.reindex.remote.RemoteInfo;
import org.elasticsearch.search.slice.SliceBuilder;
import static java.util.Collections.emptyMap;
diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java b/core/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java
similarity index 97%
rename from modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java
rename to core/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java
index 700f45b42c5..b30968cf056 100644
--- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java
+++ b/core/src/test/java/org/elasticsearch/index/reindex/UpdateByQueryRequestTests.java
@@ -19,7 +19,6 @@
package org.elasticsearch.index.reindex;
-import org.elasticsearch.action.bulk.byscroll.AbstractBulkByScrollRequestTestCase;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.IndicesOptions;
diff --git a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/WorkingBulkByScrollTaskTests.java b/core/src/test/java/org/elasticsearch/index/reindex/WorkingBulkByScrollTaskTests.java
similarity index 98%
rename from core/src/test/java/org/elasticsearch/action/bulk/byscroll/WorkingBulkByScrollTaskTests.java
rename to core/src/test/java/org/elasticsearch/index/reindex/WorkingBulkByScrollTaskTests.java
index 7356d626c10..5d594d080b8 100644
--- a/core/src/test/java/org/elasticsearch/action/bulk/byscroll/WorkingBulkByScrollTaskTests.java
+++ b/core/src/test/java/org/elasticsearch/index/reindex/WorkingBulkByScrollTaskTests.java
@@ -17,10 +17,8 @@
* under the License.
*/
-package org.elasticsearch.action.bulk.byscroll;
+package org.elasticsearch.index.reindex;
-import org.elasticsearch.action.bulk.byscroll.BulkByScrollTask;
-import org.elasticsearch.action.bulk.byscroll.WorkingBulkByScrollTask;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.tasks.TaskId;
diff --git a/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java
index c08e3976bde..c13177a6250 100644
--- a/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java
+++ b/core/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java
@@ -158,7 +158,7 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
final ShardRouting shardRouting = shard.routingEntry();
assertThat(shardRouting + " local checkpoint mismatch", shardStats.getLocalCheckpoint(), equalTo(numDocs - 1L));
/*
- * After the last indexing operation completes, the primary will advance its global checkpoint. Without an other indexing
+ * After the last indexing operation completes, the primary will advance its global checkpoint. Without another indexing
* operation, or a background sync, the primary will not have broadcast this global checkpoint to its replicas. However, a
* shard could have recovered from the primary in which case its global checkpoint will be in-sync with the primary.
* Therefore, we can only assert that the global checkpoint is number of docs minus one (matching the primary, in case of a
@@ -178,6 +178,7 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
// simulate a background global checkpoint sync at which point we expect the global checkpoint to advance on the replicas
shards.syncGlobalCheckpoint();
+ final long noOpsPerformed = SequenceNumbersService.NO_OPS_PERFORMED;
for (IndexShard shard : shards) {
final SeqNoStats shardStats = shard.seqNoStats();
final ShardRouting shardRouting = shard.routingEntry();
@@ -185,7 +186,7 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase
assertThat(
shardRouting + " global checkpoint mismatch",
shardStats.getGlobalCheckpoint(),
- numDocs == 0 ? equalTo(unassignedSeqNo) : equalTo(numDocs - 1L));
+ numDocs == 0 ? equalTo(noOpsPerformed) : equalTo(numDocs - 1L));
assertThat(shardRouting + " max seq no mismatch", shardStats.getMaxSeqNo(), equalTo(numDocs - 1L));
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointTrackerTests.java b/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointTrackerTests.java
index b99ac13a0d4..61eb4581328 100644
--- a/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointTrackerTests.java
+++ b/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointTrackerTests.java
@@ -145,17 +145,20 @@ public class GlobalCheckpointTrackerTests extends ESTestCase {
final Map assigned = new HashMap<>();
assigned.putAll(active);
assigned.putAll(initializing);
- final String maxActiveID = active.entrySet().stream().max(Comparator.comparing(Map.Entry::getValue)).get().getKey();
tracker.updateAllocationIdsFromMaster(
- active.entrySet().stream().filter(e -> !e.getKey().equals(maxActiveID)).map(Map.Entry::getKey).collect(Collectors.toSet()),
+ active.keySet(),
initializing.keySet());
randomSubsetOf(initializing.keySet()).forEach(k -> markAllocationIdAsInSyncQuietly(tracker, k, tracker.getGlobalCheckpoint()));
- assigned.forEach(tracker::updateLocalCheckpoint);
+ final String missingActiveID = randomFrom(active.keySet());
+ assigned
+ .entrySet()
+ .stream()
+ .filter(e -> !e.getKey().equals(missingActiveID))
+ .forEach(e -> tracker.updateLocalCheckpoint(e.getKey(), e.getValue()));
- // now mark all active shards
- tracker.updateAllocationIdsFromMaster(active.keySet(), initializing.keySet());
+ assertThat(tracker.getGlobalCheckpoint(), equalTo(UNASSIGNED_SEQ_NO));
- // update again
+ // now update all knowledge of all shards
assigned.forEach(tracker::updateLocalCheckpoint);
assertThat(tracker.getGlobalCheckpoint(), not(equalTo(UNASSIGNED_SEQ_NO)));
}
diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java
index 338634d977e..a5e5ecd8aa6 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java
@@ -69,6 +69,7 @@ import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.io.IOException;
+import java.io.UncheckedIOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
@@ -81,6 +82,8 @@ import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Predicate;
+import java.util.function.Supplier;
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
@@ -168,33 +171,43 @@ public class IndexShardIT extends ESSingleNodeTestCase {
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService(resolveIndex("test"));
IndexShard shard = test.getShardOrNull(0);
- shard.checkIdle(Long.MIN_VALUE);
+ Translog translog = ShardUtilsTests.getShardEngine(shard).getTranslog();
+ Predicate needsSync = (tlog) -> {
+ // we can't use tlog.needsSync() here since it also takes the global checkpoint into account
+ // we explicitly want to check here if our durability checks are taken into account so we only
+ // check if we are synced upto the current write location
+ Translog.Location lastWriteLocation = tlog.getLastWriteLocation();
+ try {
+ // the lastWriteLocaltion has a Integer.MAX_VALUE size so we have to create a new one
+ return tlog.ensureSynced(new Translog.Location(lastWriteLocation.generation, lastWriteLocation.translogLocation, 0));
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ };
setDurability(shard, Translog.Durability.REQUEST);
- assertBusy(() -> assertFalse(ShardUtilsTests.getShardEngine(shard).getTranslog().syncNeeded()));
+ assertFalse(needsSync.test(translog));
setDurability(shard, Translog.Durability.ASYNC);
client().prepareIndex("test", "bar", "2").setSource("{}", XContentType.JSON).get();
- assertTrue(ShardUtilsTests.getShardEngine(shard).getTranslog().syncNeeded());
+ assertTrue(needsSync.test(translog));
setDurability(shard, Translog.Durability.REQUEST);
client().prepareDelete("test", "bar", "1").get();
- shard.checkIdle(Long.MIN_VALUE);
- assertBusy(() -> assertFalse(ShardUtilsTests.getShardEngine(shard).getTranslog().syncNeeded()));
+ assertFalse(needsSync.test(translog));
setDurability(shard, Translog.Durability.ASYNC);
client().prepareDelete("test", "bar", "2").get();
- assertTrue(ShardUtilsTests.getShardEngine(shard).getTranslog().syncNeeded());
+ assertTrue(translog.syncNeeded());
setDurability(shard, Translog.Durability.REQUEST);
assertNoFailures(client().prepareBulk()
.add(client().prepareIndex("test", "bar", "3").setSource("{}", XContentType.JSON))
.add(client().prepareDelete("test", "bar", "1")).get());
- shard.checkIdle(Long.MIN_VALUE);
- assertBusy(() -> assertFalse(ShardUtilsTests.getShardEngine(shard).getTranslog().syncNeeded()));
+ assertFalse(needsSync.test(translog));
setDurability(shard, Translog.Durability.ASYNC);
assertNoFailures(client().prepareBulk()
.add(client().prepareIndex("test", "bar", "4").setSource("{}", XContentType.JSON))
.add(client().prepareDelete("test", "bar", "3")).get());
setDurability(shard, Translog.Durability.REQUEST);
- assertTrue(ShardUtilsTests.getShardEngine(shard).getTranslog().syncNeeded());
+ assertTrue(needsSync.test(translog));
}
private void setDurability(IndexShard shard, Translog.Durability durability) {
diff --git a/core/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java b/core/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java
index 033fa9cdf20..5db397ab16d 100644
--- a/core/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java
+++ b/core/src/test/java/org/elasticsearch/node/InternalSettingsPreparerTests.java
@@ -28,17 +28,14 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.env.Environment;
-import org.elasticsearch.node.InternalSettingsPreparer;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.io.InputStream;
-import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
@@ -155,22 +152,36 @@ public class InternalSettingsPreparerTests extends ESTestCase {
public void testMultipleSettingsFileNotAllowed() throws IOException {
InputStream yaml = getClass().getResourceAsStream("/config/elasticsearch.yaml");
- InputStream properties = getClass().getResourceAsStream("/config/elasticsearch.properties");
- Path home = createTempDir();
- Path config = home.resolve("config");
+ InputStream json = getClass().getResourceAsStream("/config/elasticsearch.json");
+ Path config = homeDir.resolve("config");
Files.createDirectory(config);
Files.copy(yaml, config.resolve("elasticsearch.yaml"));
- Files.copy(properties, config.resolve("elasticsearch.properties"));
+ Files.copy(json, config.resolve("elasticsearch.json"));
- try {
- InternalSettingsPreparer.prepareEnvironment(Settings.builder()
- .put(baseEnvSettings)
- .build(), null);
- } catch (SettingsException e) {
- assertTrue(e.getMessage(), e.getMessage().contains("multiple settings files found with suffixes"));
- assertTrue(e.getMessage(), e.getMessage().contains(".yaml"));
- assertTrue(e.getMessage(), e.getMessage().contains(".properties"));
- }
+ SettingsException e = expectThrows(SettingsException.class, () ->
+ InternalSettingsPreparer.prepareEnvironment(Settings.builder().put(baseEnvSettings).build(), null)
+ );
+ assertTrue(e.getMessage(), e.getMessage().contains("multiple settings files found with suffixes"));
+ assertTrue(e.getMessage(), e.getMessage().contains(".yaml"));
+ assertTrue(e.getMessage(), e.getMessage().contains(".json"));
+ }
+
+ public void testYmlExtension() throws IOException {
+ InputStream yaml = getClass().getResourceAsStream("/config/elasticsearch.yaml");
+ Path config = homeDir.resolve("config");
+ Files.createDirectory(config);
+ Files.copy(yaml, config.resolve("elasticsearch.yml"));
+ Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.builder().put(baseEnvSettings).build(), null);
+ assertEquals(".yml", env.configExtension());
+ }
+
+ public void testJsonExtension() throws IOException {
+ InputStream yaml = getClass().getResourceAsStream("/config/elasticsearch.json");
+ Path config = homeDir.resolve("config");
+ Files.createDirectory(config);
+ Files.copy(yaml, config.resolve("elasticsearch.json"));
+ Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.builder().put(baseEnvSettings).build(), null);
+ assertEquals(".json", env.configExtension());
}
public void testSecureSettings() {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/InternalOrderTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/InternalOrderTests.java
new file mode 100644
index 00000000000..43d10af99fb
--- /dev/null
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/InternalOrderTests.java
@@ -0,0 +1,158 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.search.aggregations;
+
+import org.elasticsearch.Version;
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.io.stream.BytesStreamOutput;
+import org.elasticsearch.common.io.stream.StreamInput;
+import org.elasticsearch.common.io.stream.Writeable.Reader;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentParser.Token;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.search.aggregations.InternalOrder.CompoundOrder;
+import org.elasticsearch.test.AbstractSerializingTestCase;
+import org.elasticsearch.test.VersionUtils;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class InternalOrderTests extends AbstractSerializingTestCase {
+
+ @Override
+ protected BucketOrder createTestInstance() {
+ if (randomBoolean()) {
+ return getRandomOrder();
+ } else {
+ List orders = new ArrayList<>();
+ for (int i = 0; i < randomInt(3); i++) {
+ orders.add(getRandomOrder());
+ }
+ return BucketOrder.compound(orders);
+ }
+ }
+
+ private BucketOrder getRandomOrder() {
+ switch(randomInt(2)) {
+ case 0: return BucketOrder.key(randomBoolean());
+ case 1: return BucketOrder.count(randomBoolean());
+ default: return BucketOrder.aggregation(randomAlphaOfLength(10), randomBoolean());
+ }
+ }
+
+ @Override
+ protected Reader instanceReader() {
+ return InternalOrder.Streams::readOrder;
+ }
+
+ @Override
+ protected BucketOrder doParseInstance(XContentParser parser) throws IOException {
+ Token token = parser.nextToken();
+ if (token == Token.START_OBJECT) {
+ return InternalOrder.Parser.parseOrderParam(parser, null);
+ }
+ if (token == Token.START_ARRAY) {
+ List orders = new ArrayList<>();
+ while (parser.nextToken() == Token.START_OBJECT) {
+ orders.add(InternalOrder.Parser.parseOrderParam(parser, null));
+ }
+ return BucketOrder.compound(orders);
+ }
+ return null;
+ }
+
+ @Override
+ protected BucketOrder assertSerialization(BucketOrder testInstance) throws IOException {
+ // identical behavior to AbstractWireSerializingTestCase, except assertNotSame is only called for
+ // compound and aggregation order because _key and _count orders are static instances.
+ BucketOrder deserializedInstance = copyInstance(testInstance);
+ assertEquals(testInstance, deserializedInstance);
+ assertEquals(testInstance.hashCode(), deserializedInstance.hashCode());
+ if(testInstance instanceof CompoundOrder || testInstance instanceof InternalOrder.Aggregation) {
+ assertNotSame(testInstance, deserializedInstance);
+ }
+ return deserializedInstance;
+ }
+
+ @Override
+ protected void assertParsedInstance(XContentType xContentType, BytesReference instanceAsBytes, BucketOrder expectedInstance)
+ throws IOException {
+ // identical behavior to AbstractSerializingTestCase, except assertNotSame is only called for
+ // compound and aggregation order because _key and _count orders are static instances.
+ XContentParser parser = createParser(XContentFactory.xContent(xContentType), instanceAsBytes);
+ BucketOrder newInstance = parseInstance(parser);
+ assertEquals(expectedInstance, newInstance);
+ assertEquals(expectedInstance.hashCode(), newInstance.hashCode());
+ if(expectedInstance instanceof CompoundOrder || expectedInstance instanceof InternalOrder.Aggregation) {
+ assertNotSame(newInstance, expectedInstance);
+ }
+ }
+
+ public void testHistogramOrderBwc() throws IOException {
+ for (int runs = 0; runs < NUMBER_OF_TEST_RUNS; runs++) {
+ BucketOrder order = createTestInstance();
+ Version bwcVersion = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(),
+ VersionUtils.getPreviousVersion(Version.V_6_0_0_alpha2_UNRELEASED));
+ boolean bwcOrderFlag = randomBoolean();
+ try (BytesStreamOutput out = new BytesStreamOutput()) {
+ out.setVersion(bwcVersion);
+ InternalOrder.Streams.writeHistogramOrder(order, out, bwcOrderFlag);
+ try (StreamInput in = out.bytes().streamInput()) {
+ in.setVersion(bwcVersion);
+ BucketOrder actual = InternalOrder.Streams.readHistogramOrder(in, bwcOrderFlag);
+ BucketOrder expected = order;
+ if (order instanceof CompoundOrder) {
+ expected = ((CompoundOrder) order).orderElements.get(0);
+ }
+ assertEquals(expected, actual);
+ }
+ }
+ }
+ }
+
+ public void testAggregationOrderEqualsAndHashCode() {
+ String path = randomAlphaOfLength(10);
+ boolean asc = randomBoolean();
+ BucketOrder o1 = BucketOrder.aggregation(path, asc);
+ BucketOrder o2 = BucketOrder.aggregation(path + "test", asc);
+ BucketOrder o3 = BucketOrder.aggregation(path, !asc);
+ BucketOrder o4 = BucketOrder.aggregation(path, asc);
+ assertNotEquals(o1, o2);
+ assertNotEquals(o1.hashCode(), o2.hashCode());
+ assertNotEquals(o1, o3);
+ assertNotEquals(o1.hashCode(), o3.hashCode());
+ assertEquals(o1, o4);
+ assertEquals(o1.hashCode(), o4.hashCode());
+
+ o1 = InternalOrder.compound(o1);
+ o2 = InternalOrder.compound(o2);
+ o3 = InternalOrder.compound(o3);
+ assertNotEquals(o1, o2);
+ assertNotEquals(o1.hashCode(), o2.hashCode());
+ assertNotEquals(o1, o2);
+ assertNotEquals(o1.hashCode(), o2.hashCode());
+ assertNotEquals(o1, o3);
+ assertNotEquals(o1.hashCode(), o3.hashCode());
+ assertNotEquals(o1, o4);
+ assertNotEquals(o1.hashCode(), o4.hashCode());
+ }
+
+}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
index cef4cb07f88..bedd8610a40 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramIT.java
@@ -18,7 +18,9 @@
*/
package org.elasticsearch.search.aggregations.bucket;
+import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.joda.DateMathParser;
import org.elasticsearch.common.joda.Joda;
@@ -30,13 +32,16 @@ import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
+import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.DateScriptMocks.DateScriptsMockPlugin;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBounds;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
+import org.elasticsearch.search.aggregations.metrics.avg.Avg;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
import org.joda.time.DateTime;
@@ -57,6 +62,7 @@ import java.util.concurrent.TimeUnit;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
+import static org.elasticsearch.search.aggregations.AggregationBuilders.avg;
import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.max;
@@ -73,6 +79,8 @@ import static org.hamcrest.core.IsNull.notNullValue;
@ESIntegTestCase.SuiteScopeTestCase
public class DateHistogramIT extends ESIntegTestCase {
+ static Map> expectedMultiSortBuckets;
+
private DateTime date(int month, int day) {
return new DateTime(2012, month, day, 0, 0, DateTimeZone.UTC);
}
@@ -98,6 +106,7 @@ public class DateHistogramIT extends ESIntegTestCase {
return client().prepareIndex("idx", "type").setSource(jsonBuilder()
.startObject()
.field("value", value)
+ .field("constant", 1)
.field("date", date(month, day))
.startArray("dates").value(date(month, day)).value(date(month + 1, day + 1)).endArray()
.endObject());
@@ -115,6 +124,9 @@ public class DateHistogramIT extends ESIntegTestCase {
.field("value", i * 2)
.endObject()));
}
+
+ getMultiSortDocs(builders);
+
builders.addAll(Arrays.asList(
indexDoc(1, 2, 1), // date: Jan 2, dates: Jan 2, Feb 3
indexDoc(2, 2, 2), // date: Feb 2, dates: Feb 2, Mar 3
@@ -126,6 +138,50 @@ public class DateHistogramIT extends ESIntegTestCase {
ensureSearchable();
}
+ private void addExpectedBucket(DateTime key, long docCount, double avg, double sum) {
+ Map bucketProps = new HashMap<>();
+ bucketProps.put("_count", docCount);
+ bucketProps.put("avg_l", avg);
+ bucketProps.put("sum_d", sum);
+ expectedMultiSortBuckets.put(key, bucketProps);
+ }
+
+ private void getMultiSortDocs(List builders) throws IOException {
+ expectedMultiSortBuckets = new HashMap<>();
+ addExpectedBucket(date(1, 1), 3, 1, 6);
+ addExpectedBucket(date(1, 2), 3, 2, 6);
+ addExpectedBucket(date(1, 3), 2, 3, 3);
+ addExpectedBucket(date(1, 4), 2, 3, 4);
+ addExpectedBucket(date(1, 5), 2, 5, 3);
+ addExpectedBucket(date(1, 6), 1, 5, 1);
+ addExpectedBucket(date(1, 7), 1, 5, 1);
+
+ assertAcked(client().admin().indices().prepareCreate("sort_idx")
+ .addMapping("type", "date", "type=date").get());
+ for (int i = 1; i <= 3; i++) {
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field("date", date(1, 1)).field("l", 1).field("d", i).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field("date", date(1, 2)).field("l", 2).field("d", i).endObject()));
+ }
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field("date", date(1, 3)).field("l", 3).field("d", 1).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field("date", date(1, 3).plusHours(1)).field("l", 3).field("d", 2).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field("date", date(1, 4)).field("l", 3).field("d", 1).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field("date", date(1, 4).plusHours(2)).field("l", 3).field("d", 3).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field("date", date(1, 5)).field("l", 5).field("d", 1).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field("date", date(1, 5).plusHours(12)).field("l", 5).field("d", 2).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field("date", date(1, 6)).field("l", 5).field("d", 1).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field("date", date(1, 7)).field("l", 5).field("d", 1).endObject()));
+ }
+
@Override
protected Collection> nodePlugins() {
return Arrays.asList(
@@ -281,7 +337,7 @@ public class DateHistogramIT extends ESIntegTestCase {
.addAggregation(dateHistogram("histo")
.field("date")
.dateHistogramInterval(DateHistogramInterval.MONTH)
- .order(Histogram.Order.KEY_ASC))
+ .order(BucketOrder.key(true)))
.execute().actionGet();
assertSearchResponse(response);
@@ -304,7 +360,7 @@ public class DateHistogramIT extends ESIntegTestCase {
.addAggregation(dateHistogram("histo")
.field("date")
.dateHistogramInterval(DateHistogramInterval.MONTH)
- .order(Histogram.Order.KEY_DESC))
+ .order(BucketOrder.key(false)))
.execute().actionGet();
assertSearchResponse(response);
@@ -326,7 +382,7 @@ public class DateHistogramIT extends ESIntegTestCase {
.addAggregation(dateHistogram("histo")
.field("date")
.dateHistogramInterval(DateHistogramInterval.MONTH)
- .order(Histogram.Order.COUNT_ASC))
+ .order(BucketOrder.count(true)))
.execute().actionGet();
assertSearchResponse(response);
@@ -348,7 +404,7 @@ public class DateHistogramIT extends ESIntegTestCase {
.addAggregation(dateHistogram("histo")
.field("date")
.dateHistogramInterval(DateHistogramInterval.MONTH)
- .order(Histogram.Order.COUNT_DESC))
+ .order(BucketOrder.count(false)))
.execute().actionGet();
assertSearchResponse(response);
@@ -428,7 +484,7 @@ public class DateHistogramIT extends ESIntegTestCase {
.addAggregation(dateHistogram("histo")
.field("date")
.dateHistogramInterval(DateHistogramInterval.MONTH)
- .order(Histogram.Order.aggregation("sum", true))
+ .order(BucketOrder.aggregation("sum", true))
.subAggregation(max("sum").field("value")))
.execute().actionGet();
@@ -451,7 +507,7 @@ public class DateHistogramIT extends ESIntegTestCase {
.addAggregation(dateHistogram("histo")
.field("date")
.dateHistogramInterval(DateHistogramInterval.MONTH)
- .order(Histogram.Order.aggregation("sum", false))
+ .order(BucketOrder.aggregation("sum", false))
.subAggregation(max("sum").field("value")))
.execute().actionGet();
@@ -474,7 +530,7 @@ public class DateHistogramIT extends ESIntegTestCase {
.addAggregation(dateHistogram("histo")
.field("date")
.dateHistogramInterval(DateHistogramInterval.MONTH)
- .order(Histogram.Order.aggregation("stats", "sum", false))
+ .order(BucketOrder.aggregation("stats", "sum", false))
.subAggregation(stats("stats").field("value")))
.execute().actionGet();
@@ -492,6 +548,60 @@ public class DateHistogramIT extends ESIntegTestCase {
}
}
+ public void testSingleValuedFieldOrderedByTieBreaker() throws Exception {
+ SearchResponse response = client().prepareSearch("idx")
+ .addAggregation(dateHistogram("histo")
+ .field("date")
+ .dateHistogramInterval(DateHistogramInterval.MONTH)
+ .order(BucketOrder.aggregation("max_constant", randomBoolean()))
+ .subAggregation(max("max_constant").field("constant")))
+ .execute().actionGet();
+
+ assertSearchResponse(response);
+
+ Histogram histo = response.getAggregations().get("histo");
+ assertThat(histo, notNullValue());
+ assertThat(histo.getName(), equalTo("histo"));
+ assertThat(histo.getBuckets().size(), equalTo(3));
+
+ int i = 1;
+ for (Histogram.Bucket bucket : histo.getBuckets()) {
+ assertThat(bucket.getKey(), equalTo(date(i, 1)));
+ i++;
+ }
+ }
+
+ public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception {
+ boolean asc = true;
+ try {
+ client()
+ .prepareSearch("idx")
+ .addAggregation(
+ dateHistogram("histo").field("date")
+ .dateHistogramInterval(DateHistogramInterval.MONTH)
+ .order(BucketOrder.aggregation("inner_histo>avg", asc))
+ .subAggregation(dateHistogram("inner_histo")
+ .dateHistogramInterval(DateHistogramInterval.MONTH)
+ .field("dates")
+ .subAggregation(avg("avg").field("value"))))
+ .execute().actionGet();
+ fail("Expected an exception");
+ } catch (SearchPhaseExecutionException e) {
+ ElasticsearchException[] rootCauses = e.guessRootCauses();
+ if (rootCauses.length == 1) {
+ ElasticsearchException rootCause = rootCauses[0];
+ if (rootCause instanceof AggregationExecutionException) {
+ AggregationExecutionException aggException = (AggregationExecutionException) rootCause;
+ assertThat(aggException.getMessage(), Matchers.startsWith("Invalid aggregation order path"));
+ } else {
+ throw e;
+ }
+ } else {
+ throw e;
+ }
+ }
+ }
+
public void testSingleValuedFieldWithValueScript() throws Exception {
Map params = new HashMap<>();
params.put("fieldname", "date");
@@ -583,12 +693,12 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(bucket.getDocCount(), equalTo(3L));
}
- public void testMultiValuedFieldOrderedByKeyDesc() throws Exception {
+ public void testMultiValuedFieldOrderedByCountDesc() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(dateHistogram("histo")
.field("dates")
.dateHistogramInterval(DateHistogramInterval.MONTH)
- .order(Histogram.Order.COUNT_DESC))
+ .order(BucketOrder.count(false)))
.execute().actionGet();
assertSearchResponse(response);
@@ -598,23 +708,26 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(4));
- // TODO: use diamond once JI-9019884 is fixed
List buckets = new ArrayList<>(histo.getBuckets());
Histogram.Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
+ assertThat(bucket.getKey(), equalTo(date(3, 1)));
assertThat(bucket.getDocCount(), equalTo(5L));
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
+ assertThat(bucket.getKey(), equalTo(date(2, 1)));
assertThat(bucket.getDocCount(), equalTo(3L));
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
+ assertThat(bucket.getKey(), equalTo(date(4, 1)));
assertThat(bucket.getDocCount(), equalTo(3L));
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
+ assertThat(bucket.getKey(), equalTo(date(1, 1)));
assertThat(bucket.getDocCount(), equalTo(1L));
}
@@ -1236,4 +1349,75 @@ public class DateHistogramIT extends ESIntegTestCase {
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getMissCount(), equalTo(1L));
}
+
+ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndKeyDesc() throws Exception {
+ int[] expectedDays = new int[] { 1, 2, 4, 3, 7, 6, 5 };
+ assertMultiSortResponse(expectedDays, BucketOrder.aggregation("avg_l", true), BucketOrder.key(false));
+ }
+
+ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndKeyAsc() throws Exception {
+ int[] expectedDays = new int[] { 1, 2, 3, 4, 5, 6, 7 };
+ assertMultiSortResponse(expectedDays, BucketOrder.aggregation("avg_l", true), BucketOrder.key(true));
+ }
+
+ public void testSingleValuedFieldOrderedBySingleValueSubAggregationDescAndKeyAsc() throws Exception {
+ int[] expectedDays = new int[] { 5, 6, 7, 3, 4, 2, 1 };
+ assertMultiSortResponse(expectedDays, BucketOrder.aggregation("avg_l", false), BucketOrder.key(true));
+ }
+
+ public void testSingleValuedFieldOrderedByCountAscAndSingleValueSubAggregationAsc() throws Exception {
+ int[] expectedDays = new int[] { 6, 7, 3, 4, 5, 1, 2 };
+ assertMultiSortResponse(expectedDays, BucketOrder.count(true), BucketOrder.aggregation("avg_l", true));
+ }
+
+ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscSingleValueSubAggregationAsc() throws Exception {
+ int[] expectedDays = new int[] { 6, 7, 3, 5, 4, 1, 2 };
+ assertMultiSortResponse(expectedDays, BucketOrder.aggregation("sum_d", true), BucketOrder.aggregation("avg_l", true));
+ }
+
+ public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception {
+ int[] expectedDays = new int[] { 2, 1, 4, 5, 3, 6, 7 };
+ assertMultiSortResponse(expectedDays, BucketOrder.count(false), BucketOrder.aggregation("sum_d", false),
+ BucketOrder.aggregation("avg_l", false));
+ }
+
+ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception {
+ int[] expectedDays = new int[] { 1, 2, 3, 4, 5, 6, 7 };
+ assertMultiSortResponse(expectedDays, BucketOrder.aggregation("avg_l", true));
+ }
+
+ private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) {
+ DateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(DateTime[]::new);
+ SearchResponse response = client()
+ .prepareSearch("sort_idx")
+ .setTypes("type")
+ .addAggregation(
+ dateHistogram("histo").field("date").dateHistogramInterval(DateHistogramInterval.DAY).order(BucketOrder.compound(order))
+ .subAggregation(avg("avg_l").field("l")).subAggregation(sum("sum_d").field("d"))).execute().actionGet();
+
+ assertSearchResponse(response);
+
+ Histogram histogram = response.getAggregations().get("histo");
+ assertThat(histogram, notNullValue());
+ assertThat(histogram.getName(), equalTo("histo"));
+ assertThat(histogram.getBuckets().size(), equalTo(expectedKeys.length));
+
+ int i = 0;
+ for (Histogram.Bucket bucket : histogram.getBuckets()) {
+ assertThat(bucket, notNullValue());
+ assertThat(key(bucket), equalTo(expectedKeys[i]));
+ assertThat(bucket.getDocCount(), equalTo(expectedMultiSortBuckets.get(expectedKeys[i]).get("_count")));
+ Avg avg = bucket.getAggregations().get("avg_l");
+ assertThat(avg, notNullValue());
+ assertThat(avg.getValue(), equalTo(expectedMultiSortBuckets.get(expectedKeys[i]).get("avg_l")));
+ Sum sum = bucket.getAggregations().get("sum_d");
+ assertThat(sum, notNullValue());
+ assertThat(sum.getValue(), equalTo(expectedMultiSortBuckets.get(expectedKeys[i]).get("sum_d")));
+ i++;
+ }
+ }
+
+ private DateTime key(Histogram.Bucket bucket) {
+ return (DateTime) bucket.getKey();
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java
index 76e58c715bf..e86b3a553e9 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DateHistogramTests.java
@@ -20,10 +20,13 @@
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
-import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
import org.elasticsearch.search.aggregations.bucket.histogram.ExtendedBoundsTests;
+import org.elasticsearch.search.aggregations.BucketOrder;
+
+import java.util.ArrayList;
+import java.util.List;
public class DateHistogramTests extends BaseAggregationTestCase {
@@ -80,29 +83,41 @@ public class DateHistogramTests extends BaseAggregationTestCase order = randomOrder();
+ if(order.size() == 1 && randomBoolean()) {
+ factory.order(order.get(0));
+ } else {
+ factory.order(order);
}
}
return factory;
}
+ private List randomOrder() {
+ List orders = new ArrayList<>();
+ switch (randomInt(4)) {
+ case 0:
+ orders.add(BucketOrder.key(randomBoolean()));
+ break;
+ case 1:
+ orders.add(BucketOrder.count(randomBoolean()));
+ break;
+ case 2:
+ orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomBoolean()));
+ break;
+ case 3:
+ orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomAlphaOfLengthBetween(3, 20), randomBoolean()));
+ break;
+ case 4:
+ int numOrders = randomIntBetween(1, 3);
+ for (int i = 0; i < numOrders; i++) {
+ orders.addAll(randomOrder());
+ }
+ break;
+ default:
+ fail();
+ }
+ return orders;
+ }
+
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java
index c8803b7e790..6710bcdb231 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DiversifiedSamplerIT.java
@@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.max.Max;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.Collection;
@@ -103,7 +104,7 @@ public class DiversifiedSamplerIT extends ESIntegTestCase {
SearchResponse response = client().prepareSearch("test").setTypes("book").setSearchType(SearchType.QUERY_THEN_FETCH)
.addAggregation(terms("genres")
.field("genre")
- .order(Terms.Order.aggregation("sample>max_price.value", asc))
+ .order(BucketOrder.aggregation("sample>max_price.value", asc))
.subAggregation(sampler("sample").shardSize(100)
.subAggregation(max("max_price").field("price")))
).execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java
index ca106721fcc..2363c21c7d1 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/DoubleTermsIT.java
@@ -41,6 +41,7 @@ import org.elasticsearch.search.aggregations.metrics.max.Max;
import org.elasticsearch.search.aggregations.metrics.stats.Stats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
@@ -134,6 +135,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
.startObject()
.field(SINGLE_VALUED_FIELD_NAME, (double) i)
.field("num_tag", i < NUM_DOCS/2 + 1 ? 1 : 0) // used to test order by single-bucket sub agg
+ .field("constant", 1)
.startArray(MULTI_VALUED_FIELD_NAME).value((double) i).value(i + 1d).endArray()
.endObject()));
@@ -315,7 +317,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
.field(SINGLE_VALUED_FIELD_NAME)
.size(20)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.term(true))) // we need to sort by terms cause we're checking the first 20 values
+ .order(BucketOrder.key(true))) // we need to sort by terms cause we're checking the first 20 values
.execute().actionGet();
assertSearchResponse(response);
@@ -363,15 +365,15 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
assertThat(bucket.getDocCount(), equalTo(1L));
}
}
-
+
public void testSingleValueFieldWithPartitionedFiltering() throws Exception {
runTestFieldWithPartitionedFiltering(SINGLE_VALUED_FIELD_NAME);
}
-
+
public void testMultiValueFieldWithPartitionedFiltering() throws Exception {
runTestFieldWithPartitionedFiltering(MULTI_VALUED_FIELD_NAME);
}
-
+
private void runTestFieldWithPartitionedFiltering(String field) throws Exception {
// Find total number of unique terms
SearchResponse allResponse = client().prepareSearch("idx")
@@ -399,14 +401,14 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
}
}
assertEquals(expectedCardinality, foundTerms.size());
- }
+ }
public void testSingleValueFieldOrderedByTermAsc() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.term(true)))
+ .order(BucketOrder.key(true)))
.execute().actionGet();
assertSearchResponse(response);
@@ -432,7 +434,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.term(false)))
+ .order(BucketOrder.key(false)))
.execute().actionGet();
assertSearchResponse(response);
@@ -453,6 +455,33 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
}
}
+ public void testSingleValueFieldOrderedByTieBreaker() throws Exception {
+ SearchResponse response = client().prepareSearch("idx").setTypes("type")
+ .addAggregation(terms("terms")
+ .field(SINGLE_VALUED_FIELD_NAME)
+ .collectMode(randomFrom(SubAggCollectionMode.values()))
+ .order(BucketOrder.aggregation("max_constant", randomBoolean()))
+ .subAggregation(max("max_constant").field("constant")))
+ .execute().actionGet();
+
+ assertSearchResponse(response);
+
+
+ Terms terms = response.getAggregations().get("terms");
+ assertThat(terms, notNullValue());
+ assertThat(terms.getName(), equalTo("terms"));
+ assertThat(terms.getBuckets().size(), equalTo(5));
+
+ int i = 0;
+ for (Terms.Bucket bucket : terms.getBuckets()) {
+ assertThat(bucket, notNullValue());
+ assertThat(key(bucket), equalTo("" + (double)i));
+ assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i));
+ assertThat(bucket.getDocCount(), equalTo(1L));
+ i++;
+ }
+ }
+
public void testSingleValuedFieldWithSubAggregation() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(terms("terms")
@@ -759,7 +788,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
.prepareSearch("idx")
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("avg_i", asc)).subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)))
+ .order(BucketOrder.aggregation("avg_i", asc)).subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)))
.execute().actionGet();
@@ -789,7 +818,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("avg_i", asc))
+ .order(BucketOrder.aggregation("avg_i", asc))
.subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME))
.subAggregation(
terms("subTerms").field(MULTI_VALUED_FIELD_NAME).collectMode(
@@ -831,7 +860,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
.prepareSearch("idx")
.addAggregation(
terms("num_tags").field("num_tag").collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("filter", asc))
+ .order(BucketOrder.aggregation("filter", asc))
.subAggregation(filter("filter", QueryBuilders.matchAllQuery()))).execute().actionGet();
@@ -869,7 +898,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
terms("tags")
.field("num_tag")
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("filter1>filter2>max", asc))
+ .order(BucketOrder.aggregation("filter1>filter2>max", asc))
.subAggregation(
filter("filter1", QueryBuilders.matchAllQuery()).subAggregation(
filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(
@@ -923,7 +952,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
client().prepareSearch(index)
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("avg_i", true))).execute().actionGet();
+ .order(BucketOrder.aggregation("avg_i", true))).execute().actionGet();
fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation that doesn't exist");
@@ -941,7 +970,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("num_tags", true))
+ .order(BucketOrder.aggregation("num_tags", true))
.subAggregation(
terms("num_tags").field("num_tags").collectMode(randomFrom(SubAggCollectionMode.values()))))
.execute().actionGet();
@@ -960,7 +989,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
client().prepareSearch(index)
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME + "2").collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats.foo", true))
+ .order(BucketOrder.aggregation("stats.foo", true))
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))).execute().actionGet();
fail("Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " +
@@ -978,7 +1007,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
client().prepareSearch(index)
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats", true))
+ .order(BucketOrder.aggregation("stats", true))
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))).execute().actionGet();
fail("Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation " +
@@ -996,7 +1025,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
.prepareSearch("idx")
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("avg_i", asc)).subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)))
+ .order(BucketOrder.aggregation("avg_i", asc)).subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME)))
.execute().actionGet();
@@ -1026,7 +1055,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
.prepareSearch("idx")
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats.avg", asc))
+ .order(BucketOrder.aggregation("stats.avg", asc))
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))).execute().actionGet();
assertSearchResponse(response);
@@ -1054,7 +1083,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
.prepareSearch("idx")
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats.avg", asc))
+ .order(BucketOrder.aggregation("stats.avg", asc))
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))).execute().actionGet();
assertSearchResponse(response);
@@ -1082,7 +1111,7 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
.prepareSearch("idx")
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats.variance", asc))
+ .order(BucketOrder.aggregation("stats.variance", asc))
.subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME))).execute().actionGet();
assertSearchResponse(response);
@@ -1139,48 +1168,48 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndTermsDesc() throws Exception {
double[] expectedKeys = new double[] { 1, 2, 4, 3, 7, 6, 5 };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", true), Terms.Order.term(false));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true), BucketOrder.key(false));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndTermsAsc() throws Exception {
double[] expectedKeys = new double[] { 1, 2, 3, 4, 5, 6, 7 };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", true), Terms.Order.term(true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true), BucketOrder.key(true));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationDescAndTermsAsc() throws Exception {
double[] expectedKeys = new double[] { 5, 6, 7, 3, 4, 2, 1 };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", false), Terms.Order.term(true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", false), BucketOrder.key(true));
}
public void testSingleValuedFieldOrderedByCountAscAndSingleValueSubAggregationAsc() throws Exception {
double[] expectedKeys = new double[] { 6, 7, 3, 4, 5, 1, 2 };
- assertMultiSortResponse(expectedKeys, Terms.Order.count(true), Terms.Order.aggregation("avg_l", true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.count(true), BucketOrder.aggregation("avg_l", true));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscSingleValueSubAggregationAsc() throws Exception {
double[] expectedKeys = new double[] { 6, 7, 3, 5, 4, 1, 2 };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("sum_d", true), Terms.Order.aggregation("avg_l", true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("sum_d", true), BucketOrder.aggregation("avg_l", true));
}
public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception {
double[] expectedKeys = new double[] { 2, 1, 4, 5, 3, 6, 7 };
- assertMultiSortResponse(expectedKeys, Terms.Order.count(false),
- Terms.Order.aggregation("sum_d", false),
- Terms.Order.aggregation("avg_l", false));
+ assertMultiSortResponse(expectedKeys, BucketOrder.count(false),
+ BucketOrder.aggregation("sum_d", false),
+ BucketOrder.aggregation("avg_l", false));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception {
double[] expectedKeys = new double[] { 1, 2, 3, 4, 5, 6, 7 };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true));
}
- private void assertMultiSortResponse(double[] expectedKeys, Terms.Order... order) {
+ private void assertMultiSortResponse(double[] expectedKeys, BucketOrder... order) {
SearchResponse response = client()
.prepareSearch("sort_idx")
.setTypes("multi_sort_type")
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.compound(order)).subAggregation(avg("avg_l").field("l"))
+ .order(BucketOrder.compound(order)).subAggregation(avg("avg_l").field("l"))
.subAggregation(sum("sum_d").field("d"))).execute().actionGet();
assertSearchResponse(response);
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java
index 683e7924419..d7bd069f2ba 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramIT.java
@@ -19,7 +19,9 @@
package org.elasticsearch.search.aggregations.bucket;
import com.carrotsearch.hppc.LongHashSet;
+import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.index.IndexRequestBuilder;
+import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders;
@@ -27,16 +29,20 @@ import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
+import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
+import org.elasticsearch.search.aggregations.metrics.avg.Avg;
import org.elasticsearch.search.aggregations.metrics.max.Max;
import org.elasticsearch.search.aggregations.metrics.stats.Stats;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
+import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
@@ -48,6 +54,7 @@ import java.util.function.Function;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
+import static org.elasticsearch.search.aggregations.AggregationBuilders.avg;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
import static org.elasticsearch.search.aggregations.AggregationBuilders.max;
@@ -72,6 +79,7 @@ public class HistogramIT extends ESIntegTestCase {
static int interval;
static int numValueBuckets, numValuesBuckets;
static long[] valueCounts, valuesCounts;
+ static Map> expectedMultiSortBuckets;
@Override
protected Collection> nodePlugins() {
@@ -130,16 +138,18 @@ public class HistogramIT extends ESIntegTestCase {
}
List builders = new ArrayList<>();
-
for (int i = 0; i < numDocs; i++) {
builders.add(client().prepareIndex("idx", "type").setSource(jsonBuilder()
.startObject()
.field(SINGLE_VALUED_FIELD_NAME, i + 1)
.startArray(MULTI_VALUED_FIELD_NAME).value(i + 1).value(i + 2).endArray()
.field("tag", "tag" + i)
+ .field("constant", 1)
.endObject()));
}
+ getMultiSortDocs(builders);
+
assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer"));
for (int i = 0; i < 2; i++) {
builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(jsonBuilder()
@@ -151,6 +161,51 @@ public class HistogramIT extends ESIntegTestCase {
ensureSearchable();
}
+ private void addExpectedBucket(long key, long docCount, double avg, double sum) {
+ Map bucketProps = new HashMap<>();
+ bucketProps.put("key", key);
+ bucketProps.put("_count", docCount);
+ bucketProps.put("avg_l", avg);
+ bucketProps.put("sum_d", sum);
+ expectedMultiSortBuckets.put(key, bucketProps);
+ }
+
+ private void getMultiSortDocs(List builders) throws IOException {
+ expectedMultiSortBuckets = new HashMap<>();
+ addExpectedBucket(1, 3, 1, 6);
+ addExpectedBucket(2, 3, 2, 6);
+ addExpectedBucket(3, 2, 3, 3);
+ addExpectedBucket(4, 2, 3, 4);
+ addExpectedBucket(5, 2, 5, 3);
+ addExpectedBucket(6, 1, 5, 1);
+ addExpectedBucket(7, 1, 5, 1);
+
+ assertAcked(client().admin().indices().prepareCreate("sort_idx")
+ .addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=double").get());
+ for (int i = 1; i <= 3; i++) {
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 1).field("l", 1).field("d", i).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 2).field("l", 2).field("d", i).endObject()));
+ }
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3).field("l", 3).field("d", 1).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 3.8).field("l", 3).field("d", 2).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4).field("l", 3).field("d", 1).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 4.4).field("l", 3).field("d", 3).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5).field("l", 5).field("d", 1).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 5.1).field("l", 5).field("d", 2).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 6).field("l", 5).field("d", 1).endObject()));
+ builders.add(client().prepareIndex("sort_idx", "type").setSource(
+ jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, 7).field("l", 5).field("d", 1).endObject()));
+ }
+
public void testSingleValuedField() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval))
@@ -241,7 +296,7 @@ public class HistogramIT extends ESIntegTestCase {
public void testSingleValuedFieldOrderedByKeyAsc() throws Exception {
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_ASC))
+ .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.key(true)))
.execute().actionGet();
assertSearchResponse(response);
@@ -252,7 +307,6 @@ public class HistogramIT extends ESIntegTestCase {
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
- // TODO: use diamond once JI-9019884 is fixed
List buckets = new ArrayList<>(histo.getBuckets());
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
@@ -264,7 +318,7 @@ public class HistogramIT extends ESIntegTestCase {
public void testsingleValuedFieldOrderedByKeyDesc() throws Exception {
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_DESC))
+ .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.key(false)))
.execute().actionGet();
assertSearchResponse(response);
@@ -275,7 +329,6 @@ public class HistogramIT extends ESIntegTestCase {
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
- // TODO: use diamond once JI-9019884 is fixed
List buckets = new ArrayList<>(histo.getBuckets());
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(numValueBuckets - i - 1);
@@ -287,7 +340,7 @@ public class HistogramIT extends ESIntegTestCase {
public void testSingleValuedFieldOrderedByCountAsc() throws Exception {
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.COUNT_ASC))
+ .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.count(true)))
.execute().actionGet();
assertSearchResponse(response);
@@ -299,7 +352,6 @@ public class HistogramIT extends ESIntegTestCase {
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
LongHashSet buckets = new LongHashSet();
- // TODO: use diamond once JI-9019884 is fixed
List histoBuckets = new ArrayList<>(histo.getBuckets());
long previousCount = Long.MIN_VALUE;
for (int i = 0; i < numValueBuckets; ++i) {
@@ -316,7 +368,7 @@ public class HistogramIT extends ESIntegTestCase {
public void testSingleValuedFieldOrderedByCountDesc() throws Exception {
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.COUNT_DESC))
+ .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.count(false)))
.execute().actionGet();
assertSearchResponse(response);
@@ -328,7 +380,6 @@ public class HistogramIT extends ESIntegTestCase {
assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
LongHashSet buckets = new LongHashSet();
- // TODO: use diamond once JI-9019884 is fixed
List histoBuckets = new ArrayList<>(histo.getBuckets());
long previousCount = Long.MAX_VALUE;
for (int i = 0; i < numValueBuckets; ++i) {
@@ -361,7 +412,6 @@ public class HistogramIT extends ESIntegTestCase {
Object[] propertiesDocCounts = (Object[]) ((InternalAggregation)histo).getProperty("_count");
Object[] propertiesCounts = (Object[]) ((InternalAggregation)histo).getProperty("sum.value");
- // TODO: use diamond once JI-9019884 is fixed
List buckets = new ArrayList<>(histo.getBuckets());
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
@@ -390,7 +440,7 @@ public class HistogramIT extends ESIntegTestCase {
histogram("histo")
.field(SINGLE_VALUED_FIELD_NAME)
.interval(interval)
- .order(Histogram.Order.aggregation("sum", true))
+ .order(BucketOrder.aggregation("sum", true))
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.execute().actionGet();
@@ -404,7 +454,6 @@ public class HistogramIT extends ESIntegTestCase {
LongHashSet visited = new LongHashSet();
double previousSum = Double.NEGATIVE_INFINITY;
- // TODO: use diamond once JI-9019884 is fixed
List buckets = new ArrayList<>(histo.getBuckets());
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
@@ -434,7 +483,7 @@ public class HistogramIT extends ESIntegTestCase {
histogram("histo")
.field(SINGLE_VALUED_FIELD_NAME)
.interval(interval)
- .order(Histogram.Order.aggregation("sum", false))
+ .order(BucketOrder.aggregation("sum", false))
.subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME)))
.execute().actionGet();
@@ -448,7 +497,6 @@ public class HistogramIT extends ESIntegTestCase {
LongHashSet visited = new LongHashSet();
double previousSum = Double.POSITIVE_INFINITY;
- // TODO: use diamond once JI-9019884 is fixed
List buckets = new ArrayList<>(histo.getBuckets());
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
@@ -478,7 +526,7 @@ public class HistogramIT extends ESIntegTestCase {
histogram("histo")
.field(SINGLE_VALUED_FIELD_NAME)
.interval(interval)
- .order(Histogram.Order.aggregation("stats.sum", false))
+ .order(BucketOrder.aggregation("stats.sum", false))
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME)))
.execute().actionGet();
@@ -492,7 +540,7 @@ public class HistogramIT extends ESIntegTestCase {
LongHashSet visited = new LongHashSet();
double previousSum = Double.POSITIVE_INFINITY;
- // TODO: use diamond once JI-9019884 is fixed
+
List buckets = new ArrayList<>(histo.getBuckets());
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
@@ -523,7 +571,7 @@ public class HistogramIT extends ESIntegTestCase {
histogram("histo")
.field(SINGLE_VALUED_FIELD_NAME)
.interval(interval)
- .order(Histogram.Order.aggregation("filter>max", asc))
+ .order(BucketOrder.aggregation("filter>max", asc))
.subAggregation(filter("filter", matchAllQuery())
.subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME))))
.execute().actionGet();
@@ -538,7 +586,6 @@ public class HistogramIT extends ESIntegTestCase {
LongHashSet visited = new LongHashSet();
double prevMax = asc ? Double.NEGATIVE_INFINITY : Double.POSITIVE_INFINITY;
- // TODO: use diamond once JI-9019884 is fixed
List buckets = new ArrayList<>(histo.getBuckets());
for (int i = 0; i < numValueBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(i);
@@ -558,6 +605,62 @@ public class HistogramIT extends ESIntegTestCase {
}
}
+ public void testSingleValuedFieldOrderedByTieBreaker() throws Exception {
+ SearchResponse response = client().prepareSearch("idx")
+ .addAggregation(histogram("histo")
+ .field(SINGLE_VALUED_FIELD_NAME)
+ .interval(interval)
+ .order(BucketOrder.aggregation("max_constant", randomBoolean()))
+ .subAggregation(max("max_constant").field("constant")))
+ .execute().actionGet();
+
+ assertSearchResponse(response);
+
+ Histogram histo = response.getAggregations().get("histo");
+ assertThat(histo, notNullValue());
+ assertThat(histo.getName(), equalTo("histo"));
+ assertThat(histo.getBuckets().size(), equalTo(numValueBuckets));
+
+ List buckets = new ArrayList<>(histo.getBuckets());
+ for (int i = 0; i < numValueBuckets; ++i) {
+ Histogram.Bucket bucket = buckets.get(i);
+ assertThat(bucket, notNullValue());
+ assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval));
+ assertThat(bucket.getDocCount(), equalTo(valueCounts[i]));
+ }
+ }
+
+ public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception {
+ boolean asc = true;
+ try {
+ client()
+ .prepareSearch("idx")
+ .addAggregation(
+ histogram("histo").field(SINGLE_VALUED_FIELD_NAME)
+ .interval(interval)
+ .order(BucketOrder.aggregation("inner_histo>avg", asc))
+ .subAggregation(histogram("inner_histo")
+ .interval(interval)
+ .field(MULTI_VALUED_FIELD_NAME)
+ .subAggregation(avg("avg").field("value"))))
+ .execute().actionGet();
+ fail("Expected an exception");
+ } catch (SearchPhaseExecutionException e) {
+ ElasticsearchException[] rootCauses = e.guessRootCauses();
+ if (rootCauses.length == 1) {
+ ElasticsearchException rootCause = rootCauses[0];
+ if (rootCause instanceof AggregationExecutionException) {
+ AggregationExecutionException aggException = (AggregationExecutionException) rootCause;
+ assertThat(aggException.getMessage(), Matchers.startsWith("Invalid aggregation order path"));
+ } else {
+ throw e;
+ }
+ } else {
+ throw e;
+ }
+ }
+ }
+
public void testSingleValuedFieldWithValueScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(
@@ -614,7 +717,7 @@ public class HistogramIT extends ESIntegTestCase {
public void testMultiValuedFieldOrderedByKeyDesc() throws Exception {
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval).order(Histogram.Order.KEY_DESC))
+ .addAggregation(histogram("histo").field(MULTI_VALUED_FIELD_NAME).interval(interval).order(BucketOrder.key(false)))
.execute().actionGet();
assertSearchResponse(response);
@@ -625,7 +728,6 @@ public class HistogramIT extends ESIntegTestCase {
assertThat(histo.getName(), equalTo("histo"));
assertThat(histo.getBuckets().size(), equalTo(numValuesBuckets));
- // TODO: use diamond once JI-9019884 is fixed
List buckets = new ArrayList<>(histo.getBuckets());
for (int i = 0; i < numValuesBuckets; ++i) {
Histogram.Bucket bucket = buckets.get(numValuesBuckets - i - 1);
@@ -1036,4 +1138,74 @@ public class HistogramIT extends ESIntegTestCase {
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getMissCount(), equalTo(1L));
}
+
+ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndKeyDesc() throws Exception {
+ long[] expectedKeys = new long[] { 1, 2, 4, 3, 7, 6, 5 };
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true), BucketOrder.key(false));
+ }
+
+ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndKeyAsc() throws Exception {
+ long[] expectedKeys = new long[] { 1, 2, 3, 4, 5, 6, 7 };
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true), BucketOrder.key(true));
+ }
+
+ public void testSingleValuedFieldOrderedBySingleValueSubAggregationDescAndKeyAsc() throws Exception {
+ long[] expectedKeys = new long[] { 5, 6, 7, 3, 4, 2, 1 };
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", false), BucketOrder.key(true));
+ }
+
+ public void testSingleValuedFieldOrderedByCountAscAndSingleValueSubAggregationAsc() throws Exception {
+ long[] expectedKeys = new long[] { 6, 7, 3, 4, 5, 1, 2 };
+ assertMultiSortResponse(expectedKeys, BucketOrder.count(true), BucketOrder.aggregation("avg_l", true));
+ }
+
+ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscSingleValueSubAggregationAsc() throws Exception {
+ long[] expectedKeys = new long[] { 6, 7, 3, 5, 4, 1, 2 };
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("sum_d", true), BucketOrder.aggregation("avg_l", true));
+ }
+
+ public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception {
+ long[] expectedKeys = new long[] { 2, 1, 4, 5, 3, 6, 7 };
+ assertMultiSortResponse(expectedKeys, BucketOrder.count(false), BucketOrder.aggregation("sum_d", false),
+ BucketOrder.aggregation("avg_l", false));
+ }
+
+ public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception {
+ long[] expectedKeys = new long[] { 1, 2, 3, 4, 5, 6, 7 };
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true));
+ }
+
+ private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) {
+ SearchResponse response = client()
+ .prepareSearch("sort_idx")
+ .setTypes("type")
+ .addAggregation(
+ histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(1).order(BucketOrder.compound(order))
+ .subAggregation(avg("avg_l").field("l")).subAggregation(sum("sum_d").field("d"))).execute().actionGet();
+
+ assertSearchResponse(response);
+
+ Histogram histogram = response.getAggregations().get("histo");
+ assertThat(histogram, notNullValue());
+ assertThat(histogram.getName(), equalTo("histo"));
+ assertThat(histogram.getBuckets().size(), equalTo(expectedKeys.length));
+
+ int i = 0;
+ for (Histogram.Bucket bucket : histogram.getBuckets()) {
+ assertThat(bucket, notNullValue());
+ assertThat(key(bucket), equalTo(expectedKeys[i]));
+ assertThat(bucket.getDocCount(), equalTo(expectedMultiSortBuckets.get(expectedKeys[i]).get("_count")));
+ Avg avg = bucket.getAggregations().get("avg_l");
+ assertThat(avg, notNullValue());
+ assertThat(avg.getValue(), equalTo(expectedMultiSortBuckets.get(expectedKeys[i]).get("avg_l")));
+ Sum sum = bucket.getAggregations().get("sum_d");
+ assertThat(sum, notNullValue());
+ assertThat(sum.getValue(), equalTo(expectedMultiSortBuckets.get(expectedKeys[i]).get("sum_d")));
+ i++;
+ }
+ }
+
+ private long key(Histogram.Bucket bucket) {
+ return ((Number) bucket.getKey()).longValue();
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java
index ea61a8168ad..ee22b229177 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/HistogramTests.java
@@ -21,7 +21,10 @@ package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
-import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order;
+import org.elasticsearch.search.aggregations.BucketOrder;
+
+import java.util.ArrayList;
+import java.util.List;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.startsWith;
@@ -54,26 +57,11 @@ public class HistogramTests extends BaseAggregationTestCase order = randomOrder();
+ if(order.size() == 1 && randomBoolean()) {
+ factory.order(order.get(0));
+ } else {
+ factory.order(order);
}
}
return factory;
@@ -102,4 +90,31 @@ public class HistogramTests extends BaseAggregationTestCase randomOrder() {
+ List orders = new ArrayList<>();
+ switch (randomInt(4)) {
+ case 0:
+ orders.add(BucketOrder.key(randomBoolean()));
+ break;
+ case 1:
+ orders.add(BucketOrder.count(randomBoolean()));
+ break;
+ case 2:
+ orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomBoolean()));
+ break;
+ case 3:
+ orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomAlphaOfLengthBetween(3, 20), randomBoolean()));
+ break;
+ case 4:
+ int numOrders = randomIntBetween(1, 3);
+ for (int i = 0; i < numOrders; i++) {
+ orders.addAll(randomOrder());
+ }
+ break;
+ default:
+ fail();
+ }
+ return orders;
+ }
+
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java
index a54dc3e2f5e..565cdaaa87e 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/LongTermsIT.java
@@ -40,6 +40,7 @@ import org.elasticsearch.search.aggregations.metrics.max.Max;
import org.elasticsearch.search.aggregations.metrics.stats.Stats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
@@ -121,6 +122,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.field(SINGLE_VALUED_FIELD_NAME, i)
.startArray(MULTI_VALUED_FIELD_NAME).value(i).value(i + 1).endArray()
.field("num_tag", i < lowCardBuilders.length / 2 + 1 ? 1 : 0) // used to test order by single-bucket sub agg
+ .field("constant", 1)
.endObject());
}
indexRandom(true, lowCardBuilders);
@@ -392,7 +394,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.field(SINGLE_VALUED_FIELD_NAME)
.size(20)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.term(true))) // we need to sort by terms cause we're checking the first 20 values
+ .order(BucketOrder.key(true))) // we need to sort by terms cause we're checking the first 20 values
.execute().actionGet();
assertSearchResponse(response);
@@ -417,7 +419,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.term(true)))
+ .order(BucketOrder.key(true)))
.execute().actionGet();
assertSearchResponse(response);
@@ -441,7 +443,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.term(false)))
+ .order(BucketOrder.key(false)))
.execute().actionGet();
assertSearchResponse(response);
@@ -462,6 +464,31 @@ public class LongTermsIT extends AbstractTermsTestCase {
}
}
+ public void testSingleValueFieldOrderedByTieBreaker() throws Exception {
+ SearchResponse response = client().prepareSearch("idx").setTypes("type")
+ .addAggregation(terms("terms")
+ .field(SINGLE_VALUED_FIELD_NAME)
+ .collectMode(randomFrom(SubAggCollectionMode.values()))
+ .order(BucketOrder.aggregation("max_constant", randomBoolean()))
+ .subAggregation(max("max_constant").field("constant")))
+ .execute().actionGet();
+ assertSearchResponse(response);
+
+ Terms terms = response.getAggregations().get("terms");
+ assertThat(terms, notNullValue());
+ assertThat(terms.getName(), equalTo("terms"));
+ assertThat(terms.getBuckets().size(), equalTo(5));
+
+ int i = 0;
+ for (Terms.Bucket bucket : terms.getBuckets()) {
+ assertThat(bucket, notNullValue());
+ assertThat(key(bucket), equalTo("" + i));
+ assertThat(bucket.getKeyAsNumber().intValue(), equalTo(i));
+ assertThat(bucket.getDocCount(), equalTo(1L));
+ i++;
+ }
+ }
+
public void testSingleValuedFieldWithSubAggregation() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.addAggregation(terms("terms")
@@ -769,7 +796,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("avg_i", asc))
+ .order(BucketOrder.aggregation("avg_i", asc))
.subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME))
).execute().actionGet();
@@ -798,7 +825,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("avg_i", asc))
+ .order(BucketOrder.aggregation("avg_i", asc))
.subAggregation(
avg("avg_i").field(SINGLE_VALUED_FIELD_NAME))
.subAggregation(
@@ -842,7 +869,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("num_tags")
.field("num_tag")
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("filter", asc))
+ .order(BucketOrder.aggregation("filter", asc))
.subAggregation(filter("filter", QueryBuilders.matchAllQuery()))
).get();
@@ -879,7 +906,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("tags")
.field("num_tag")
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("filter1>filter2>max", asc))
+ .order(BucketOrder.aggregation("filter1>filter2>max", asc))
.subAggregation(filter("filter1", QueryBuilders.matchAllQuery()).subAggregation(
filter("filter2", QueryBuilders.matchAllQuery())
.subAggregation(max("max").field(SINGLE_VALUED_FIELD_NAME))))
@@ -934,7 +961,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("avg_i", true))
+ .order(BucketOrder.aggregation("avg_i", true))
).execute().actionGet();
fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation that doesn't exist");
@@ -952,7 +979,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("num_tags", true))
+ .order(BucketOrder.aggregation("num_tags", true))
.subAggregation(terms("num_tags").field("num_tags")
.collectMode(randomFrom(SubAggCollectionMode.values())))
).execute().actionGet();
@@ -972,7 +999,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats.foo", true))
+ .order(BucketOrder.aggregation("stats.foo", true))
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
).execute().actionGet();
@@ -992,7 +1019,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats", true))
+ .order(BucketOrder.aggregation("stats", true))
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
).execute().actionGet();
@@ -1011,7 +1038,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("avg_i", asc))
+ .order(BucketOrder.aggregation("avg_i", asc))
.subAggregation(avg("avg_i").field(SINGLE_VALUED_FIELD_NAME))
).execute().actionGet();
@@ -1043,7 +1070,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats.avg", asc))
+ .order(BucketOrder.aggregation("stats.avg", asc))
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
).execute().actionGet();
@@ -1073,7 +1100,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats.avg", asc))
+ .order(BucketOrder.aggregation("stats.avg", asc))
.subAggregation(stats("stats").field(SINGLE_VALUED_FIELD_NAME))
).execute().actionGet();
@@ -1103,7 +1130,7 @@ public class LongTermsIT extends AbstractTermsTestCase {
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats.variance", asc))
+ .order(BucketOrder.aggregation("stats.variance", asc))
.subAggregation(extendedStats("stats").field(SINGLE_VALUED_FIELD_NAME))
).execute().actionGet();
@@ -1129,47 +1156,47 @@ public class LongTermsIT extends AbstractTermsTestCase {
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndTermsDesc() throws Exception {
long[] expectedKeys = new long[] { 1, 2, 4, 3, 7, 6, 5 };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", true), Terms.Order.term(false));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true), BucketOrder.key(false));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndTermsAsc() throws Exception {
long[] expectedKeys = new long[] { 1, 2, 3, 4, 5, 6, 7 };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", true), Terms.Order.term(true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true), BucketOrder.key(true));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationDescAndTermsAsc() throws Exception {
long[] expectedKeys = new long[] { 5, 6, 7, 3, 4, 2, 1 };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", false), Terms.Order.term(true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", false), BucketOrder.key(true));
}
public void testSingleValuedFieldOrderedByCountAscAndSingleValueSubAggregationAsc() throws Exception {
long[] expectedKeys = new long[] { 6, 7, 3, 4, 5, 1, 2 };
- assertMultiSortResponse(expectedKeys, Terms.Order.count(true), Terms.Order.aggregation("avg_l", true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.count(true), BucketOrder.aggregation("avg_l", true));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscSingleValueSubAggregationAsc() throws Exception {
long[] expectedKeys = new long[] { 6, 7, 3, 5, 4, 1, 2 };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("sum_d", true), Terms.Order.aggregation("avg_l", true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("sum_d", true), BucketOrder.aggregation("avg_l", true));
}
public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception {
long[] expectedKeys = new long[] { 2, 1, 4, 5, 3, 6, 7 };
- assertMultiSortResponse(expectedKeys, Terms.Order.count(false),
- Terms.Order.aggregation("sum_d", false),
- Terms.Order.aggregation("avg_l", false));
+ assertMultiSortResponse(expectedKeys, BucketOrder.count(false),
+ BucketOrder.aggregation("sum_d", false),
+ BucketOrder.aggregation("avg_l", false));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception {
long[] expectedKeys = new long[] { 1, 2, 3, 4, 5, 6, 7 };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true));
}
- private void assertMultiSortResponse(long[] expectedKeys, Terms.Order... order) {
+ private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) {
SearchResponse response = client().prepareSearch("sort_idx").setTypes("multi_sort_type")
.addAggregation(terms("terms")
.field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.compound(order))
+ .order(BucketOrder.compound(order))
.subAggregation(avg("avg_l").field("l"))
.subAggregation(sum("sum_d").field("d"))
).execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
index e1e8f1ba660..038227239cc 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java
@@ -37,6 +37,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
@@ -190,122 +191,122 @@ public class MinDocCountIT extends AbstractTermsTestCase {
}
public void testStringTermAsc() throws Exception {
- testMinDocCountOnTerms("s", Script.NO, Terms.Order.term(true));
+ testMinDocCountOnTerms("s", Script.NO, BucketOrder.key(true));
}
public void testStringScriptTermAsc() throws Exception {
- testMinDocCountOnTerms("s", Script.YES, Terms.Order.term(true));
+ testMinDocCountOnTerms("s", Script.YES, BucketOrder.key(true));
}
public void testStringTermDesc() throws Exception {
- testMinDocCountOnTerms("s", Script.NO, Terms.Order.term(false));
+ testMinDocCountOnTerms("s", Script.NO, BucketOrder.key(false));
}
public void testStringScriptTermDesc() throws Exception {
- testMinDocCountOnTerms("s", Script.YES, Terms.Order.term(false));
+ testMinDocCountOnTerms("s", Script.YES, BucketOrder.key(false));
}
public void testStringCountAsc() throws Exception {
- testMinDocCountOnTerms("s", Script.NO, Terms.Order.count(true));
+ testMinDocCountOnTerms("s", Script.NO, BucketOrder.count(true));
}
public void testStringScriptCountAsc() throws Exception {
- testMinDocCountOnTerms("s", Script.YES, Terms.Order.count(true));
+ testMinDocCountOnTerms("s", Script.YES, BucketOrder.count(true));
}
public void testStringCountDesc() throws Exception {
- testMinDocCountOnTerms("s", Script.NO, Terms.Order.count(false));
+ testMinDocCountOnTerms("s", Script.NO, BucketOrder.count(false));
}
public void testStringScriptCountDesc() throws Exception {
- testMinDocCountOnTerms("s", Script.YES, Terms.Order.count(false));
+ testMinDocCountOnTerms("s", Script.YES, BucketOrder.count(false));
}
public void testStringCountAscWithInclude() throws Exception {
- testMinDocCountOnTerms("s", Script.NO, Terms.Order.count(true), ".*a.*", true);
+ testMinDocCountOnTerms("s", Script.NO, BucketOrder.count(true), ".*a.*", true);
}
public void testStringScriptCountAscWithInclude() throws Exception {
- testMinDocCountOnTerms("s", Script.YES, Terms.Order.count(true), ".*a.*", true);
+ testMinDocCountOnTerms("s", Script.YES, BucketOrder.count(true), ".*a.*", true);
}
public void testStringCountDescWithInclude() throws Exception {
- testMinDocCountOnTerms("s", Script.NO, Terms.Order.count(false), ".*a.*", true);
+ testMinDocCountOnTerms("s", Script.NO, BucketOrder.count(false), ".*a.*", true);
}
public void testStringScriptCountDescWithInclude() throws Exception {
- testMinDocCountOnTerms("s", Script.YES, Terms.Order.count(false), ".*a.*", true);
+ testMinDocCountOnTerms("s", Script.YES, BucketOrder.count(false), ".*a.*", true);
}
public void testLongTermAsc() throws Exception {
- testMinDocCountOnTerms("l", Script.NO, Terms.Order.term(true));
+ testMinDocCountOnTerms("l", Script.NO, BucketOrder.key(true));
}
public void testLongScriptTermAsc() throws Exception {
- testMinDocCountOnTerms("l", Script.YES, Terms.Order.term(true));
+ testMinDocCountOnTerms("l", Script.YES, BucketOrder.key(true));
}
public void testLongTermDesc() throws Exception {
- testMinDocCountOnTerms("l", Script.NO, Terms.Order.term(false));
+ testMinDocCountOnTerms("l", Script.NO, BucketOrder.key(false));
}
public void testLongScriptTermDesc() throws Exception {
- testMinDocCountOnTerms("l", Script.YES, Terms.Order.term(false));
+ testMinDocCountOnTerms("l", Script.YES, BucketOrder.key(false));
}
public void testLongCountAsc() throws Exception {
- testMinDocCountOnTerms("l", Script.NO, Terms.Order.count(true));
+ testMinDocCountOnTerms("l", Script.NO, BucketOrder.count(true));
}
public void testLongScriptCountAsc() throws Exception {
- testMinDocCountOnTerms("l", Script.YES, Terms.Order.count(true));
+ testMinDocCountOnTerms("l", Script.YES, BucketOrder.count(true));
}
public void testLongCountDesc() throws Exception {
- testMinDocCountOnTerms("l", Script.NO, Terms.Order.count(false));
+ testMinDocCountOnTerms("l", Script.NO, BucketOrder.count(false));
}
public void testLongScriptCountDesc() throws Exception {
- testMinDocCountOnTerms("l", Script.YES, Terms.Order.count(false));
+ testMinDocCountOnTerms("l", Script.YES, BucketOrder.count(false));
}
public void testDoubleTermAsc() throws Exception {
- testMinDocCountOnTerms("d", Script.NO, Terms.Order.term(true));
+ testMinDocCountOnTerms("d", Script.NO, BucketOrder.key(true));
}
public void testDoubleScriptTermAsc() throws Exception {
- testMinDocCountOnTerms("d", Script.YES, Terms.Order.term(true));
+ testMinDocCountOnTerms("d", Script.YES, BucketOrder.key(true));
}
public void testDoubleTermDesc() throws Exception {
- testMinDocCountOnTerms("d", Script.NO, Terms.Order.term(false));
+ testMinDocCountOnTerms("d", Script.NO, BucketOrder.key(false));
}
public void testDoubleScriptTermDesc() throws Exception {
- testMinDocCountOnTerms("d", Script.YES, Terms.Order.term(false));
+ testMinDocCountOnTerms("d", Script.YES, BucketOrder.key(false));
}
public void testDoubleCountAsc() throws Exception {
- testMinDocCountOnTerms("d", Script.NO, Terms.Order.count(true));
+ testMinDocCountOnTerms("d", Script.NO, BucketOrder.count(true));
}
public void testDoubleScriptCountAsc() throws Exception {
- testMinDocCountOnTerms("d", Script.YES, Terms.Order.count(true));
+ testMinDocCountOnTerms("d", Script.YES, BucketOrder.count(true));
}
public void testDoubleCountDesc() throws Exception {
- testMinDocCountOnTerms("d", Script.NO, Terms.Order.count(false));
+ testMinDocCountOnTerms("d", Script.NO, BucketOrder.count(false));
}
public void testDoubleScriptCountDesc() throws Exception {
- testMinDocCountOnTerms("d", Script.YES, Terms.Order.count(false));
+ testMinDocCountOnTerms("d", Script.YES, BucketOrder.count(false));
}
- private void testMinDocCountOnTerms(String field, Script script, Terms.Order order) throws Exception {
+ private void testMinDocCountOnTerms(String field, Script script, BucketOrder order) throws Exception {
testMinDocCountOnTerms(field, script, order, null, true);
}
- private void testMinDocCountOnTerms(String field, Script script, Terms.Order order, String include, boolean retry) throws Exception {
+ private void testMinDocCountOnTerms(String field, Script script, BucketOrder order, String include, boolean retry) throws Exception {
// all terms
final SearchResponse allTermsResponse = client().prepareSearch("idx").setTypes("type")
.setSize(0)
@@ -342,38 +343,38 @@ public class MinDocCountIT extends AbstractTermsTestCase {
}
public void testHistogramCountAsc() throws Exception {
- testMinDocCountOnHistogram(Histogram.Order.COUNT_ASC);
+ testMinDocCountOnHistogram(BucketOrder.count(true));
}
public void testHistogramCountDesc() throws Exception {
- testMinDocCountOnHistogram(Histogram.Order.COUNT_DESC);
+ testMinDocCountOnHistogram(BucketOrder.count(false));
}
public void testHistogramKeyAsc() throws Exception {
- testMinDocCountOnHistogram(Histogram.Order.KEY_ASC);
+ testMinDocCountOnHistogram(BucketOrder.key(true));
}
public void testHistogramKeyDesc() throws Exception {
- testMinDocCountOnHistogram(Histogram.Order.KEY_DESC);
+ testMinDocCountOnHistogram(BucketOrder.key(false));
}
public void testDateHistogramCountAsc() throws Exception {
- testMinDocCountOnDateHistogram(Histogram.Order.COUNT_ASC);
+ testMinDocCountOnDateHistogram(BucketOrder.count(true));
}
public void testDateHistogramCountDesc() throws Exception {
- testMinDocCountOnDateHistogram(Histogram.Order.COUNT_DESC);
+ testMinDocCountOnDateHistogram(BucketOrder.count(false));
}
public void testDateHistogramKeyAsc() throws Exception {
- testMinDocCountOnDateHistogram(Histogram.Order.KEY_ASC);
+ testMinDocCountOnDateHistogram(BucketOrder.key(true));
}
public void testDateHistogramKeyDesc() throws Exception {
- testMinDocCountOnDateHistogram(Histogram.Order.KEY_DESC);
+ testMinDocCountOnDateHistogram(BucketOrder.key(false));
}
- private void testMinDocCountOnHistogram(Histogram.Order order) throws Exception {
+ private void testMinDocCountOnHistogram(BucketOrder order) throws Exception {
final int interval = randomIntBetween(1, 3);
final SearchResponse allResponse = client().prepareSearch("idx").setTypes("type")
.setSize(0)
@@ -393,7 +394,7 @@ public class MinDocCountIT extends AbstractTermsTestCase {
}
}
- private void testMinDocCountOnDateHistogram(Histogram.Order order) throws Exception {
+ private void testMinDocCountOnDateHistogram(BucketOrder order) throws Exception {
final SearchResponse allResponse = client().prepareSearch("idx").setTypes("type")
.setSize(0)
.setQuery(QUERY)
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java
index f6db12a7f6d..5b8c3b878c1 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/NaNSortingIT.java
@@ -30,6 +30,7 @@ import org.elasticsearch.search.aggregations.metrics.avg.Avg;
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
import org.elasticsearch.test.ESIntegTestCase;
@@ -150,7 +151,7 @@ public class NaNSortingIT extends ESIntegTestCase {
final boolean asc = randomBoolean();
SubAggregation agg = randomFrom(SubAggregation.values());
SearchResponse response = client().prepareSearch("idx")
- .addAggregation(terms("terms").field(fieldName).collectMode(randomFrom(SubAggCollectionMode.values())).subAggregation(agg.builder()).order(Terms.Order.aggregation(agg.sortKey(), asc)))
+ .addAggregation(terms("terms").field(fieldName).collectMode(randomFrom(SubAggCollectionMode.values())).subAggregation(agg.builder()).order(BucketOrder.aggregation(agg.sortKey(), asc)))
.execute().actionGet();
assertSearchResponse(response);
@@ -175,7 +176,7 @@ public class NaNSortingIT extends ESIntegTestCase {
SubAggregation agg = randomFrom(SubAggregation.values());
SearchResponse response = client().prepareSearch("idx")
.addAggregation(histogram("histo")
- .field("long_value").interval(randomIntBetween(1, 2)).subAggregation(agg.builder()).order(Histogram.Order.aggregation(agg.sortKey(), asc)))
+ .field("long_value").interval(randomIntBetween(1, 2)).subAggregation(agg.builder()).order(BucketOrder.aggregation(agg.sortKey(), asc)))
.execute().actionGet();
assertSearchResponse(response);
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java
index 6118cb69ee7..aaf366c7c7b 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ReverseNestedIT.java
@@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.bucket.nested.Nested;
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
@@ -349,13 +350,13 @@ public class ReverseNestedIT extends ESIntegTestCase {
SearchResponse response = client().prepareSearch("idx2")
.addAggregation(nested("nested1", "nested1.nested2")
.subAggregation(
- terms("field2").field("nested1.nested2.field2").order(Terms.Order.term(true))
+ terms("field2").field("nested1.nested2.field2").order(BucketOrder.key(true))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.size(10000)
.subAggregation(
reverseNested("nested1_to_field1").path("nested1")
.subAggregation(
- terms("field1").field("nested1.field1").order(Terms.Order.term(true))
+ terms("field1").field("nested1.field1").order(BucketOrder.key(true))
.collectMode(randomFrom(SubAggCollectionMode.values()))
)
)
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java
index ebd078de674..328ce538feb 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/SamplerIT.java
@@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBu
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket;
import org.elasticsearch.search.aggregations.metrics.max.Max;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.Collection;
@@ -99,7 +100,7 @@ public class SamplerIT extends ESIntegTestCase {
SearchResponse response = client().prepareSearch("test").setTypes("book").setSearchType(SearchType.QUERY_THEN_FETCH)
.addAggregation(terms("genres")
.field("genre")
- .order(Terms.Order.aggregation("sample>max_price.value", asc))
+ .order(BucketOrder.aggregation("sample>max_price.value", asc))
.subAggregation(sampler("sample").shardSize(100)
.subAggregation(max("max_price").field("price")))
).execute().actionGet();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java
index 748c5f886f6..4c03ca9a84e 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/ShardSizeTermsIT.java
@@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
+import org.elasticsearch.search.aggregations.BucketOrder;
import java.util.HashMap;
import java.util.List;
@@ -39,7 +40,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -62,7 +63,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3).shardSize(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -86,7 +87,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -110,7 +111,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type").setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -133,7 +134,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -156,7 +157,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -179,7 +180,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3).shardSize(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -202,7 +203,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -226,7 +227,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type").setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -249,7 +250,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -272,7 +273,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -295,7 +296,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3).shardSize(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -318,7 +319,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -341,7 +342,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type").setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(Terms.Order.count(false)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).shardSize(5).order(BucketOrder.count(false)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
@@ -364,7 +365,7 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
SearchResponse response = client().prepareSearch("idx").setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(terms("keys").field("key").size(3)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true)))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true)))
.execute().actionGet();
Terms terms = response.getAggregations().get("keys");
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java
index df69cfcfa93..0c93ff2f6bb 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/StringTermsIT.java
@@ -42,10 +42,12 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.metrics.avg.Avg;
+import org.elasticsearch.search.aggregations.metrics.max.Max;
import org.elasticsearch.search.aggregations.metrics.stats.Stats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.hamcrest.Matchers;
@@ -72,6 +74,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.count;
import static org.elasticsearch.search.aggregations.AggregationBuilders.extendedStats;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
+import static org.elasticsearch.search.aggregations.AggregationBuilders.max;
import static org.elasticsearch.search.aggregations.AggregationBuilders.stats;
import static org.elasticsearch.search.aggregations.AggregationBuilders.sum;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
@@ -129,9 +132,15 @@ public class StringTermsIT extends AbstractTermsTestCase {
List builders = new ArrayList<>();
for (int i = 0; i < 5; i++) {
builders.add(client().prepareIndex("idx", "type").setSource(
- jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, "val" + i).field("i", i)
- .field("tag", i < 5 / 2 + 1 ? "more" : "less").startArray(MULTI_VALUED_FIELD_NAME).value("val" + i)
- .value("val" + (i + 1)).endArray().endObject()));
+ jsonBuilder().startObject()
+ .field(SINGLE_VALUED_FIELD_NAME, "val" + i)
+ .field("i", i)
+ .field("constant", 1)
+ .field("tag", i < 5 / 2 + 1 ? "more" : "less")
+ .startArray(MULTI_VALUED_FIELD_NAME)
+ .value("val" + i)
+ .value("val" + (i + 1))
+ .endArray().endObject()));
}
getMultiSortDocs(builders);
@@ -456,15 +465,15 @@ public class StringTermsIT extends AbstractTermsTestCase {
}
}
-
+
public void testSingleValueFieldWithPartitionedFiltering() throws Exception {
runTestFieldWithPartitionedFiltering(SINGLE_VALUED_FIELD_NAME);
}
-
+
public void testMultiValueFieldWithPartitionedFiltering() throws Exception {
runTestFieldWithPartitionedFiltering(MULTI_VALUED_FIELD_NAME);
}
-
+
private void runTestFieldWithPartitionedFiltering(String field) throws Exception {
// Find total number of unique terms
SearchResponse allResponse = client().prepareSearch("idx").setTypes("type")
@@ -492,8 +501,8 @@ public class StringTermsIT extends AbstractTermsTestCase {
}
}
assertEquals(expectedCardinality, foundTerms.size());
- }
-
+ }
+
public void testSingleValueFieldWithMaxSize() throws Exception {
SearchResponse response = client()
@@ -503,7 +512,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME).size(20)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.term(true))) // we need to sort by terms cause we're checking the first 20 values
+ .order(BucketOrder.key(true))) // we need to sort by terms cause we're checking the first 20 values
.execute().actionGet();
assertSearchResponse(response);
@@ -527,7 +536,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(true))).execute()
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))).execute()
.actionGet();
assertSearchResponse(response);
@@ -552,7 +561,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.term(false))).execute()
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(false))).execute()
.actionGet();
assertSearchResponse(response);
@@ -944,7 +953,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.aggregation("avg_i", asc))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("avg_i", asc))
.subAggregation(avg("avg_i").field("i"))).execute().actionGet();
assertSearchResponse(response);
@@ -966,6 +975,34 @@ public class StringTermsIT extends AbstractTermsTestCase {
}
}
+ public void testSingleValuedFieldOrderedByTieBreaker() throws Exception {
+ SearchResponse response = client()
+ .prepareSearch("idx")
+ .setTypes("type")
+ .addAggregation(
+ terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("max_constant", randomBoolean()))
+ .subAggregation(max("max_constant").field("constant"))).execute().actionGet();
+
+ assertSearchResponse(response);
+
+ Terms terms = response.getAggregations().get("terms");
+ assertThat(terms, notNullValue());
+ assertThat(terms.getName(), equalTo("terms"));
+ assertThat(terms.getBuckets().size(), equalTo(5));
+
+ int i = 0;
+ for (Terms.Bucket bucket : terms.getBuckets()) {
+ assertThat(bucket, notNullValue());
+ assertThat(key(bucket), equalTo("val" + i));
+ assertThat(bucket.getDocCount(), equalTo(1L));
+ Max max = bucket.getAggregations().get("max_constant");
+ assertThat(max, notNullValue());
+ assertThat(max.getValue(), equalTo((double) 1));
+ i++;
+ }
+ }
+
public void testSingleValuedFieldOrderedByIllegalAgg() throws Exception {
boolean asc = true;
try {
@@ -975,7 +1012,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("inner_terms>avg", asc))
+ .order(BucketOrder.aggregation("inner_terms>avg", asc))
.subAggregation(terms("inner_terms").field(MULTI_VALUED_FIELD_NAME).subAggregation(avg("avg").field("i"))))
.execute().actionGet();
fail("Expected an exception");
@@ -985,7 +1022,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
ElasticsearchException rootCause = rootCauses[0];
if (rootCause instanceof AggregationExecutionException) {
AggregationExecutionException aggException = (AggregationExecutionException) rootCause;
- assertThat(aggException.getMessage(), Matchers.startsWith("Invalid terms aggregation order path"));
+ assertThat(aggException.getMessage(), Matchers.startsWith("Invalid aggregation order path"));
} else {
throw e;
}
@@ -1002,7 +1039,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.setTypes("type")
.addAggregation(
terms("tags").executionHint(randomExecutionHint()).field("tag")
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.aggregation("filter", asc))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("filter", asc))
.subAggregation(filter("filter", QueryBuilders.matchAllQuery()))).execute().actionGet();
assertSearchResponse(response);
@@ -1041,7 +1078,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.executionHint(randomExecutionHint())
.field("tag")
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("filter1>filter2>stats.max", asc))
+ .order(BucketOrder.aggregation("filter1>filter2>stats.max", asc))
.subAggregation(
filter("filter1", QueryBuilders.matchAllQuery()).subAggregation(
filter("filter2", QueryBuilders.matchAllQuery()).subAggregation(
@@ -1104,7 +1141,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.executionHint(randomExecutionHint())
.field("tag")
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("filter1>" + filter2Name + ">" + statsName + ".max", asc))
+ .order(BucketOrder.aggregation("filter1>" + filter2Name + ">" + statsName + ".max", asc))
.subAggregation(
filter("filter1", QueryBuilders.matchAllQuery()).subAggregation(
filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation(
@@ -1167,7 +1204,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.executionHint(randomExecutionHint())
.field("tag")
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("filter1>" + filter2Name + ">" + statsName + "[max]", asc))
+ .order(BucketOrder.aggregation("filter1>" + filter2Name + ">" + statsName + "[max]", asc))
.subAggregation(
filter("filter1", QueryBuilders.matchAllQuery()).subAggregation(
filter(filter2Name, QueryBuilders.matchAllQuery()).subAggregation(
@@ -1222,7 +1259,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("avg_i", true))).execute().actionGet();
+ .order(BucketOrder.aggregation("avg_i", true))).execute().actionGet();
fail("Expected search to fail when trying to sort terms aggregation by sug-aggregation that doesn't exist");
@@ -1240,7 +1277,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("values", true))
+ .order(BucketOrder.aggregation("values", true))
.subAggregation(terms("values").field("i").collectMode(randomFrom(SubAggCollectionMode.values()))))
.execute().actionGet();
@@ -1262,7 +1299,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats.foo", true)).subAggregation(stats("stats").field("i")))
+ .order(BucketOrder.aggregation("stats.foo", true)).subAggregation(stats("stats").field("i")))
.execute().actionGet();
fail("Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation "
+ "with an unknown specified metric to order by. response had " + response.getFailedShards() + " failed shards.");
@@ -1281,7 +1318,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats", true)).subAggregation(stats("stats").field("i"))).execute()
+ .order(BucketOrder.aggregation("stats", true)).subAggregation(stats("stats").field("i"))).execute()
.actionGet();
fail("Expected search to fail when trying to sort terms aggregation by multi-valued sug-aggregation "
@@ -1300,7 +1337,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.aggregation("avg_i", asc))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("avg_i", asc))
.subAggregation(avg("avg_i").field("i"))).execute().actionGet();
assertSearchResponse(response);
@@ -1331,7 +1368,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.aggregation("stats.avg", asc))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("stats.avg", asc))
.subAggregation(stats("stats").field("i"))).execute().actionGet();
assertSearchResponse(response);
@@ -1361,7 +1398,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.aggregation("stats.avg", asc))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.aggregation("stats.avg", asc))
.subAggregation(stats("stats").field("i"))).execute().actionGet();
assertSearchResponse(response);
@@ -1393,7 +1430,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats.sum_of_squares", asc))
+ .order(BucketOrder.aggregation("stats.sum_of_squares", asc))
.subAggregation(extendedStats("stats").field("i"))).execute().actionGet();
assertSearchResponse(response);
@@ -1425,7 +1462,7 @@ public class StringTermsIT extends AbstractTermsTestCase {
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))
- .order(Terms.Order.aggregation("stats.sum_of_squares", asc))
+ .order(BucketOrder.aggregation("stats.sum_of_squares", asc))
.subAggregation(extendedStats("stats").field("i"))
.subAggregation(terms("subTerms").field("s_values").collectMode(randomFrom(SubAggCollectionMode.values()))))
.execute().actionGet();
@@ -1464,46 +1501,46 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndTermsDesc() throws Exception {
String[] expectedKeys = new String[] { "val1", "val2", "val4", "val3", "val7", "val6", "val5" };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", true), Terms.Order.term(false));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true), BucketOrder.key(false));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAndTermsAsc() throws Exception {
String[] expectedKeys = new String[] { "val1", "val2", "val3", "val4", "val5", "val6", "val7" };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", true), Terms.Order.term(true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true), BucketOrder.key(true));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationDescAndTermsAsc() throws Exception {
String[] expectedKeys = new String[] { "val5", "val6", "val7", "val3", "val4", "val2", "val1" };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", false), Terms.Order.term(true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", false), BucketOrder.key(true));
}
public void testSingleValuedFieldOrderedByCountAscAndSingleValueSubAggregationAsc() throws Exception {
String[] expectedKeys = new String[] { "val6", "val7", "val3", "val4", "val5", "val1", "val2" };
- assertMultiSortResponse(expectedKeys, Terms.Order.count(true), Terms.Order.aggregation("avg_l", true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.count(true), BucketOrder.aggregation("avg_l", true));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscSingleValueSubAggregationAsc() throws Exception {
String[] expectedKeys = new String[] { "val6", "val7", "val3", "val5", "val4", "val1", "val2" };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("sum_d", true), Terms.Order.aggregation("avg_l", true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("sum_d", true), BucketOrder.aggregation("avg_l", true));
}
public void testSingleValuedFieldOrderedByThreeCriteria() throws Exception {
String[] expectedKeys = new String[] { "val2", "val1", "val4", "val5", "val3", "val6", "val7" };
- assertMultiSortResponse(expectedKeys, Terms.Order.count(false), Terms.Order.aggregation("sum_d", false),
- Terms.Order.aggregation("avg_l", false));
+ assertMultiSortResponse(expectedKeys, BucketOrder.count(false), BucketOrder.aggregation("sum_d", false),
+ BucketOrder.aggregation("avg_l", false));
}
public void testSingleValuedFieldOrderedBySingleValueSubAggregationAscAsCompound() throws Exception {
String[] expectedKeys = new String[] { "val1", "val2", "val3", "val4", "val5", "val6", "val7" };
- assertMultiSortResponse(expectedKeys, Terms.Order.aggregation("avg_l", true));
+ assertMultiSortResponse(expectedKeys, BucketOrder.aggregation("avg_l", true));
}
- private void assertMultiSortResponse(String[] expectedKeys, Terms.Order... order) {
+ private void assertMultiSortResponse(String[] expectedKeys, BucketOrder... order) {
SearchResponse response = client()
.prepareSearch("sort_idx")
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field(SINGLE_VALUED_FIELD_NAME)
- .collectMode(randomFrom(SubAggCollectionMode.values())).order(Terms.Order.compound(order))
+ .collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.compound(order))
.subAggregation(avg("avg_l").field("l")).subAggregation(sum("sum_d").field("d"))).execute().actionGet();
assertSearchResponse(response);
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java
index 9ed32ca2e7b..204de33440d 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsDocCountErrorIT.java
@@ -27,8 +27,8 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.Terms.Bucket;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
@@ -347,7 +347,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(STRING_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.count(true))
+ .order(BucketOrder.count(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -360,7 +360,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.count(true))
+ .order(BucketOrder.count(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -378,7 +378,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(STRING_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -391,7 +391,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -409,7 +409,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(STRING_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.term(false))
+ .order(BucketOrder.key(false))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -422,7 +422,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.term(false))
+ .order(BucketOrder.key(false))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -440,7 +440,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(STRING_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.aggregation("sortAgg", true))
+ .order(BucketOrder.aggregation("sortAgg", true))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)))
.execute().actionGet();
@@ -454,7 +454,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.aggregation("sortAgg", true))
+ .order(BucketOrder.aggregation("sortAgg", true))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)))
.execute().actionGet();
@@ -473,7 +473,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(STRING_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.aggregation("sortAgg", false))
+ .order(BucketOrder.aggregation("sortAgg", false))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)))
.execute().actionGet();
@@ -487,7 +487,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.aggregation("sortAgg", false))
+ .order(BucketOrder.aggregation("sortAgg", false))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)))
.execute().actionGet();
@@ -583,7 +583,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(LONG_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.count(true))
+ .order(BucketOrder.count(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -596,7 +596,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.count(true))
+ .order(BucketOrder.count(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -614,7 +614,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(LONG_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -627,7 +627,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -645,7 +645,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(LONG_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.term(false))
+ .order(BucketOrder.key(false))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -658,7 +658,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.term(false))
+ .order(BucketOrder.key(false))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -676,7 +676,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(LONG_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.aggregation("sortAgg", true))
+ .order(BucketOrder.aggregation("sortAgg", true))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)))
.execute().actionGet();
@@ -690,7 +690,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.aggregation("sortAgg", true))
+ .order(BucketOrder.aggregation("sortAgg", true))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)))
.execute().actionGet();
@@ -709,7 +709,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(LONG_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.aggregation("sortAgg", false))
+ .order(BucketOrder.aggregation("sortAgg", false))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(DOUBLE_FIELD_NAME)))
.execute().actionGet();
@@ -723,7 +723,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.aggregation("sortAgg", false))
+ .order(BucketOrder.aggregation("sortAgg", false))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(DOUBLE_FIELD_NAME)))
.execute().actionGet();
@@ -819,7 +819,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(DOUBLE_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.count(true))
+ .order(BucketOrder.count(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -832,7 +832,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.count(true))
+ .order(BucketOrder.count(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -850,7 +850,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(DOUBLE_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -863,7 +863,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -881,7 +881,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(DOUBLE_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.term(false))
+ .order(BucketOrder.key(false))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -894,7 +894,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.term(false))
+ .order(BucketOrder.key(false))
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
@@ -912,7 +912,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(DOUBLE_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.aggregation("sortAgg", true))
+ .order(BucketOrder.aggregation("sortAgg", true))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)))
.execute().actionGet();
@@ -926,7 +926,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.aggregation("sortAgg", true))
+ .order(BucketOrder.aggregation("sortAgg", true))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)))
.execute().actionGet();
@@ -945,7 +945,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.field(DOUBLE_FIELD_NAME)
.showTermDocCountError(true)
.size(10000).shardSize(10000)
- .order(Order.aggregation("sortAgg", false))
+ .order(BucketOrder.aggregation("sortAgg", false))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)))
.execute().actionGet();
@@ -959,7 +959,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.showTermDocCountError(true)
.size(size)
.shardSize(shardSize)
- .order(Order.aggregation("sortAgg", false))
+ .order(BucketOrder.aggregation("sortAgg", false))
.collectMode(randomFrom(SubAggCollectionMode.values()))
.subAggregation(sum("sortAgg").field(LONG_FIELD_NAME)))
.execute().actionGet();
@@ -968,7 +968,7 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
assertUnboundedDocCountError(size, accurateResponse, testResponse);
}
-
+
/**
* Test a case where we know exactly how many of each term is on each shard
* so we know the exact error value for each term. To do this we search over
@@ -984,39 +984,39 @@ public class TermsDocCountErrorIT extends ESIntegTestCase {
.collectMode(randomFrom(SubAggCollectionMode.values())))
.execute().actionGet();
assertSearchResponse(response);
-
+
Terms terms = response.getAggregations().get("terms");
assertThat(terms, notNullValue());
assertThat(terms.getDocCountError(), equalTo(46L));
List extends Bucket> buckets = terms.getBuckets();
assertThat(buckets, notNullValue());
assertThat(buckets.size(), equalTo(5));
-
+
Bucket bucket = buckets.get(0);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("A"));
assertThat(bucket.getDocCount(), equalTo(100L));
assertThat(bucket.getDocCountError(), equalTo(0L));
-
+
bucket = buckets.get(1);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("Z"));
assertThat(bucket.getDocCount(), equalTo(52L));
assertThat(bucket.getDocCountError(), equalTo(2L));
-
+
bucket = buckets.get(2);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("C"));
assertThat(bucket.getDocCount(), equalTo(50L));
assertThat(bucket.getDocCountError(), equalTo(15L));
-
-
+
+
bucket = buckets.get(3);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("G"));
assertThat(bucket.getDocCount(), equalTo(45L));
assertThat(bucket.getDocCountError(), equalTo(2L));
-
+
bucket = buckets.get(4);
assertThat(bucket, notNullValue());
assertThat(bucket.getKey(), equalTo("B"));
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java
index f7e3d9a61b5..c094c245dac 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsShardMinDocCountIT.java
@@ -26,6 +26,7 @@ import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
@@ -129,7 +130,7 @@ public class TermsShardMinDocCountIT extends ESIntegTestCase {
// first, check that indeed when not setting the shardMinDocCount parameter 0 terms are returned
SearchResponse response = client().prepareSearch(index)
.addAggregation(
- terms("myTerms").field("text").minDocCount(2).size(2).executionHint(randomExecutionHint()).order(Terms.Order.term(true))
+ terms("myTerms").field("text").minDocCount(2).size(2).executionHint(randomExecutionHint()).order(BucketOrder.key(true))
)
.execute()
.actionGet();
@@ -140,7 +141,7 @@ public class TermsShardMinDocCountIT extends ESIntegTestCase {
response = client().prepareSearch(index)
.addAggregation(
- terms("myTerms").field("text").minDocCount(2).shardMinDocCount(2).size(2).executionHint(randomExecutionHint()).order(Terms.Order.term(true))
+ terms("myTerms").field("text").minDocCount(2).shardMinDocCount(2).size(2).executionHint(randomExecutionHint()).order(BucketOrder.key(true))
)
.execute()
.actionGet();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java
index 42f6ef78f4b..73c275cfd23 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/TermsTests.java
@@ -23,10 +23,10 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.automaton.RegExp;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
+import org.elasticsearch.search.aggregations.BucketOrder;
import java.util.ArrayList;
import java.util.List;
@@ -155,8 +155,12 @@ public class TermsTests extends BaseAggregationTestCase
factory.includeExclude(incExc);
}
if (randomBoolean()) {
- List order = randomOrder();
- factory.order(order);
+ List order = randomOrder();
+ if(order.size() == 1 && randomBoolean()) {
+ factory.order(order.get(0));
+ } else {
+ factory.order(order);
+ }
}
if (randomBoolean()) {
factory.showTermDocCountError(randomBoolean());
@@ -164,20 +168,20 @@ public class TermsTests extends BaseAggregationTestCase
return factory;
}
- private List randomOrder() {
- List orders = new ArrayList<>();
+ private List randomOrder() {
+ List orders = new ArrayList<>();
switch (randomInt(4)) {
case 0:
- orders.add(Terms.Order.term(randomBoolean()));
+ orders.add(BucketOrder.key(randomBoolean()));
break;
case 1:
- orders.add(Terms.Order.count(randomBoolean()));
+ orders.add(BucketOrder.count(randomBoolean()));
break;
case 2:
- orders.add(Terms.Order.aggregation(randomAlphaOfLengthBetween(3, 20), randomBoolean()));
+ orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomBoolean()));
break;
case 3:
- orders.add(Terms.Order.aggregation(randomAlphaOfLengthBetween(3, 20), randomAlphaOfLengthBetween(3, 20), randomBoolean()));
+ orders.add(BucketOrder.aggregation(randomAlphaOfLengthBetween(3, 20), randomAlphaOfLengthBetween(3, 20), randomBoolean()));
break;
case 4:
int numOrders = randomIntBetween(1, 3);
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java
index 50cb4530493..c34b6093e2d 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogramTests.java
@@ -25,6 +25,7 @@ import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.InternalAggregationTestCase;
import org.joda.time.DateTime;
@@ -66,7 +67,7 @@ public class InternalDateHistogramTests extends InternalMultiBucketAggregationTe
buckets.add(i, new InternalDateHistogram.Bucket(key, randomIntBetween(1, 100), keyed, format, aggregations));
}
- InternalOrder order = (InternalOrder) randomFrom(InternalHistogram.Order.KEY_ASC, InternalHistogram.Order.KEY_DESC);
+ BucketOrder order = randomFrom(BucketOrder.key(true), BucketOrder.key(false));
return new InternalDateHistogram(name, buckets, order, 1, 0L, null, format, keyed, pipelineAggregators, metaData);
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java
index b4ecf828e7d..cb37dd9a373 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogramTests.java
@@ -22,11 +22,11 @@ package org.elasticsearch.search.aggregations.bucket.histogram;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.InternalMultiBucketAggregationTestCase;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
-import org.elasticsearch.test.InternalAggregationTestCase;
import java.util.ArrayList;
import java.util.List;
@@ -58,7 +58,7 @@ public class InternalHistogramTests extends InternalMultiBucketAggregationTestCa
final int docCount = TestUtil.nextInt(random(), 1, 50);
buckets.add(new InternalHistogram.Bucket(base + i * interval, docCount, keyed, format, aggregations));
}
- InternalOrder order = (InternalOrder) randomFrom(InternalHistogram.Order.KEY_ASC, InternalHistogram.Order.KEY_DESC);
+ BucketOrder order = BucketOrder.key(randomBoolean());
return new InternalHistogram(name, buckets, order, 1, null, format, keyed, pipelineAggregators, metaData);
}
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java
index 45531e27dde..c2a0b726b86 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/DoubleTermsTests.java
@@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
@@ -40,7 +41,7 @@ public class DoubleTermsTests extends InternalTermsTestCase {
InternalAggregations aggregations,
boolean showTermDocCountError,
long docCountError) {
- Terms.Order order = Terms.Order.count(false);
+ BucketOrder order = BucketOrder.count(false);
long minDocCount = 1;
int requiredSize = 3;
int shardSize = requiredSize + 2;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java
index cc97e4989a9..941997d3372 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsTests.java
@@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.terms;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
@@ -40,7 +41,7 @@ public class LongTermsTests extends InternalTermsTestCase {
InternalAggregations aggregations,
boolean showTermDocCountError,
long docCountError) {
- Terms.Order order = Terms.Order.count(false);
+ BucketOrder order = BucketOrder.count(false);
long minDocCount = 1;
int requiredSize = 3;
int shardSize = requiredSize + 2;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java
index e909358be5e..bdafb139d78 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsTests.java
@@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.terms;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.search.DocValueFormat;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
@@ -41,7 +42,7 @@ public class StringTermsTests extends InternalTermsTestCase {
InternalAggregations aggregations,
boolean showTermDocCountError,
long docCountError) {
- Terms.Order order = Terms.Order.count(false);
+ BucketOrder order = BucketOrder.count(false);
long minDocCount = 1;
int requiredSize = 3;
int shardSize = requiredSize + 2;
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java
index 1648d8ede9f..7b93653fff8 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java
@@ -37,6 +37,7 @@ import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.InternalAggregation;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.aggregations.support.ValueType;
import java.io.IOException;
@@ -70,7 +71,7 @@ public class TermsAggregatorTests extends AggregatorTestCase {
TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING)
.executionHint(executionMode.toString())
.field("string")
- .order(Terms.Order.term(true));
+ .order(BucketOrder.key(true));
MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType();
fieldType.setName("string");
fieldType.setHasDocValues(true );
@@ -99,7 +100,7 @@ public class TermsAggregatorTests extends AggregatorTestCase {
TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.LONG)
.executionHint(executionMode.toString())
.field("number")
- .order(Terms.Order.term(true));
+ .order(BucketOrder.key(true));
List aggs = new ArrayList<> ();
int numLongs = randomIntBetween(1, 3);
for (int i = 0; i < numLongs; i++) {
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java
index c952f43eb30..c51c0aec4bb 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/AvgIT.java
@@ -36,8 +36,8 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.metrics.avg.Avg;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
@@ -326,7 +326,7 @@ public class AvgIT extends AbstractNumericTestCase {
@Override
public void testOrderByEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(terms("terms").field("value").order(Order.compound(Order.aggregation("filter>avg", true)))
+ .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>avg", true)))
.subAggregation(filter("filter", termQuery("value", 100)).subAggregation(avg("avg").field("value"))))
.get();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java
index 3903dd8b0bc..7de333e8127 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsIT.java
@@ -30,9 +30,9 @@ import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.missing.Missing;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats.Bounds;
+import org.elasticsearch.search.aggregations.BucketOrder;
import java.util.Collection;
import java.util.Collections;
@@ -595,7 +595,8 @@ public class ExtendedStatsIT extends AbstractNumericTestCase {
@Override
public void testOrderByEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(terms("terms").field("value").order(Order.compound(Order.aggregation("filter>extendedStats.avg", true)))
+ .addAggregation(terms("terms").field("value")
+ .order(BucketOrder.compound(BucketOrder.aggregation("filter>extendedStats.avg", true)))
.subAggregation(
filter("filter", termQuery("value", 100)).subAggregation(extendedStats("extendedStats").field("value"))))
.get();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java
index 5b56e6b7efb..586af22755c 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentileRanksIT.java
@@ -29,11 +29,11 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
-import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod;
+import org.elasticsearch.search.aggregations.BucketOrder;
import java.util.Arrays;
import java.util.Collection;
@@ -485,7 +485,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
.subAggregation(
percentileRanks("percentile_ranks").field("value").method(PercentilesMethod.HDR)
.numberOfSignificantValueDigits(sigDigits).values(99))
- .order(Order.aggregation("percentile_ranks", "99", asc))).execute().actionGet();
+ .order(BucketOrder.aggregation("percentile_ranks", "99", asc))).execute().actionGet();
assertHitCount(searchResponse, 10);
@@ -506,7 +506,7 @@ public class HDRPercentileRanksIT extends AbstractNumericTestCase {
@Override
public void testOrderByEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(terms("terms").field("value").order(Terms.Order.compound(Terms.Order.aggregation("filter>ranks.99", true)))
+ .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>ranks.99", true)))
.subAggregation(filter("filter", termQuery("value", 100))
.subAggregation(percentileRanks("ranks").method(PercentilesMethod.HDR).values(99).field("value"))))
.get();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java
index 56fb14402ad..ae745e1f1ad 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/HDRPercentilesIT.java
@@ -30,11 +30,11 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
-import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod;
+import org.elasticsearch.search.aggregations.BucketOrder;
import java.util.Arrays;
import java.util.Collection;
@@ -474,7 +474,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
.method(PercentilesMethod.HDR)
.numberOfSignificantValueDigits(sigDigits)
.percentiles(99))
- .order(Order.aggregation("percentiles", "99", asc))).execute().actionGet();
+ .order(BucketOrder.aggregation("percentiles", "99", asc))).execute().actionGet();
assertHitCount(searchResponse, 10);
@@ -497,7 +497,7 @@ public class HDRPercentilesIT extends AbstractNumericTestCase {
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(matchAllQuery())
.addAggregation(
- terms("terms").field("value").order(Terms.Order.compound(Terms.Order.aggregation("filter>percentiles.99", true)))
+ terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>percentiles.99", true)))
.subAggregation(filter("filter", termQuery("value", 100))
.subAggregation(percentiles("percentiles").method(PercentilesMethod.HDR).field("value"))))
.get();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java
index 03eb9a09237..a192b3c4a12 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java
@@ -29,8 +29,8 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.metrics.max.Max;
+import org.elasticsearch.search.aggregations.BucketOrder;
import java.util.Collection;
import java.util.Collections;
@@ -328,7 +328,7 @@ public class MaxIT extends AbstractNumericTestCase {
@Override
public void testOrderByEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(terms("terms").field("value").order(Order.compound(Order.aggregation("filter>max", true)))
+ .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>max", true)))
.subAggregation(filter("filter", termQuery("value", 100)).subAggregation(max("max").field("value"))))
.get();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java
index cba2ba9eb97..7f2522c04bb 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java
@@ -29,8 +29,8 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.metrics.min.Min;
+import org.elasticsearch.search.aggregations.BucketOrder;
import java.util.Collection;
import java.util.Collections;
@@ -340,7 +340,7 @@ public class MinIT extends AbstractNumericTestCase {
@Override
public void testOrderByEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(terms("terms").field("value").order(Order.compound(Order.aggregation("filter>min", true)))
+ .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>min", true)))
.subAggregation(filter("filter", termQuery("value", 100)).subAggregation(min("min").field("value"))))
.get();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java
index 9231f093963..0fcf794ee1d 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/StatsIT.java
@@ -32,8 +32,8 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.metrics.stats.Stats;
+import org.elasticsearch.search.aggregations.BucketOrder;
import java.util.Collection;
import java.util.Collections;
@@ -447,7 +447,7 @@ public class StatsIT extends AbstractNumericTestCase {
@Override
public void testOrderByEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(terms("terms").field("value").order(Order.compound(Order.aggregation("filter>stats.avg", true)))
+ .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>stats.avg", true)))
.subAggregation(filter("filter", termQuery("value", 100)).subAggregation(stats("stats").field("value"))))
.get();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java
index 227ffc7251b..86f59659ebc 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/SumIT.java
@@ -36,8 +36,8 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import org.elasticsearch.search.lookup.SearchLookup;
@@ -325,7 +325,7 @@ public class SumIT extends AbstractNumericTestCase {
@Override
public void testOrderByEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(terms("terms").field("value").order(Order.compound(Order.aggregation("filter>sum", true)))
+ .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>sum", true)))
.subAggregation(filter("filter", termQuery("value", 100)).subAggregation(sum("sum").field("value"))))
.get();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java
index f1943747ceb..11ff1edbc53 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentileRanksIT.java
@@ -30,12 +30,12 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
-import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod;
+import org.elasticsearch.search.aggregations.BucketOrder;
import java.util.Arrays;
import java.util.Collection;
@@ -435,7 +435,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
.addAggregation(
histogram("histo").field("value").interval(2L)
.subAggregation(randomCompression(percentileRanks("percentile_ranks").field("value").values(99)))
- .order(Order.aggregation("percentile_ranks", "99", asc)))
+ .order(BucketOrder.aggregation("percentile_ranks", "99", asc)))
.execute().actionGet();
assertHitCount(searchResponse, 10);
@@ -457,7 +457,7 @@ public class TDigestPercentileRanksIT extends AbstractNumericTestCase {
@Override
public void testOrderByEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
- .addAggregation(terms("terms").field("value").order(Terms.Order.compound(Terms.Order.aggregation("filter>ranks.99", true)))
+ .addAggregation(terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>ranks.99", true)))
.subAggregation(filter("filter", termQuery("value", 100))
.subAggregation(percentileRanks("ranks").method(PercentilesMethod.TDIGEST).values(99).field("value"))))
.get();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java
index 2589e9977a6..89c7d12c746 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TDigestPercentilesIT.java
@@ -30,12 +30,12 @@ import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.global.Global;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
-import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Order;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesMethod;
+import org.elasticsearch.search.aggregations.BucketOrder;
import java.util.Arrays;
import java.util.Collection;
@@ -419,7 +419,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
.addAggregation(
histogram("histo").field("value").interval(2L)
.subAggregation(randomCompression(percentiles("percentiles").field("value").percentiles(99)))
- .order(Order.aggregation("percentiles", "99", asc)))
+ .order(BucketOrder.aggregation("percentiles", "99", asc)))
.execute().actionGet();
assertHitCount(searchResponse, 10);
@@ -442,7 +442,7 @@ public class TDigestPercentilesIT extends AbstractNumericTestCase {
public void testOrderByEmptyAggregation() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx").setQuery(matchAllQuery())
.addAggregation(
- terms("terms").field("value").order(Terms.Order.compound(Terms.Order.aggregation("filter>percentiles.99", true)))
+ terms("terms").field("value").order(BucketOrder.compound(BucketOrder.aggregation("filter>percentiles.99", true)))
.subAggregation(filter("filter", termQuery("value", 100))
.subAggregation(percentiles("percentiles").method(PercentilesMethod.TDIGEST).field("value"))))
.get();
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
index 6819fddf3e3..563fac1ba7d 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
@@ -47,6 +47,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory.ExecutionMode;
import org.elasticsearch.search.aggregations.metrics.max.Max;
import org.elasticsearch.search.aggregations.metrics.tophits.TopHits;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.rescore.RescoreBuilder;
@@ -398,7 +399,7 @@ public class TopHitsIT extends ESIntegTestCase {
.executionHint(randomExecutionHint())
.collectMode(SubAggCollectionMode.BREADTH_FIRST)
.field(TERMS_AGGS_FIELD)
- .order(Terms.Order.aggregation("max", false))
+ .order(BucketOrder.aggregation("max", false))
.subAggregation(max("max").field(SORT_FIELD))
.subAggregation(topHits("hits").size(3))
).get();
@@ -494,7 +495,7 @@ public class TopHitsIT extends ESIntegTestCase {
.addAggregation(terms("terms")
.executionHint(randomExecutionHint())
.field(TERMS_AGGS_FIELD)
- .order(Terms.Order.aggregation("max_sort", false))
+ .order(BucketOrder.aggregation("max_sort", false))
.subAggregation(
topHits("hits").sort(SortBuilders.fieldSort(SORT_FIELD).order(SortOrder.DESC)).trackScores(true)
)
@@ -535,7 +536,7 @@ public class TopHitsIT extends ESIntegTestCase {
.setQuery(matchQuery("text", "term rare"))
.addAggregation(
terms("terms").executionHint(randomExecutionHint()).field("group")
- .order(Terms.Order.aggregation("max_score", false)).subAggregation(topHits("hits").size(1))
+ .order(BucketOrder.aggregation("max_score", false)).subAggregation(topHits("hits").size(1))
.subAggregation(max("max_score").field("value"))).get();
assertSearchResponse(response);
@@ -908,7 +909,6 @@ public class TopHitsIT extends ESIntegTestCase {
histogram("dates")
.field("date")
.interval(5)
- .order(Histogram.Order.aggregation("to-comments", true))
.subAggregation(
nested("to-comments", "comments")
.subAggregation(topHits("comments")
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java
index 4f6ff0e32ed..1d29518a300 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketIT.java
@@ -24,10 +24,10 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
@@ -129,7 +129,7 @@ public class AvgBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
@@ -211,7 +211,7 @@ public class AvgBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -264,7 +264,7 @@ public class AvgBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -337,7 +337,7 @@ public class AvgBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java
index 607124ecb15..fea143bddcc 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketIT.java
@@ -26,12 +26,12 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats.Bounds;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.extended.ExtendedStatsBucket;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
@@ -200,7 +200,7 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
@@ -300,7 +300,7 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -362,7 +362,7 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -445,7 +445,7 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -475,7 +475,7 @@ public class ExtendedStatsBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java
index 632f11f7ec7..50b512ee194 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketIT.java
@@ -25,11 +25,11 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
@@ -138,7 +138,7 @@ public class MaxBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
@@ -230,7 +230,7 @@ public class MaxBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -335,7 +335,7 @@ public class MaxBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -413,7 +413,7 @@ public class MaxBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java
index 04fdd0c3133..33678f146db 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/MinBucketIT.java
@@ -24,11 +24,11 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.InternalBucketMetricValue;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
@@ -135,7 +135,7 @@ public class MinBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
@@ -227,7 +227,7 @@ public class MinBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -285,7 +285,7 @@ public class MinBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -363,7 +363,7 @@ public class MinBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java
index e23e5441431..62f9ad462e9 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java
@@ -29,6 +29,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucket;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
@@ -133,7 +134,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Terms.Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
@@ -248,7 +249,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Terms.Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -301,7 +302,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Terms.Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -435,7 +436,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Terms.Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
@@ -466,7 +467,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Terms.Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
@@ -525,7 +526,7 @@ public class PercentilesBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Terms.Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java
index 231005f1b5b..ff5a85f198e 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketIT.java
@@ -24,11 +24,11 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.stats.StatsBucket;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
@@ -136,7 +136,7 @@ public class StatsBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
@@ -230,7 +230,7 @@ public class StatsBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -289,7 +289,7 @@ public class StatsBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -368,7 +368,7 @@ public class StatsBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java
index 048dfac8648..69451435d58 100644
--- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java
+++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/SumBucketIT.java
@@ -24,10 +24,10 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
-import org.elasticsearch.search.aggregations.bucket.terms.Terms.Order;
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
+import org.elasticsearch.search.aggregations.BucketOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.ArrayList;
@@ -126,7 +126,7 @@ public class SumBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
@@ -202,7 +202,7 @@ public class SumBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -252,7 +252,7 @@ public class SumBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue)
@@ -322,7 +322,7 @@ public class SumBucketIT extends ESIntegTestCase {
.addAggregation(
terms("terms")
.field("tag")
- .order(Order.term(true))
+ .order(BucketOrder.key(true))
.subAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval)
.extendedBounds(minRandomValue, maxRandomValue))
diff --git a/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc
index 99b871730e6..1fe945077fd 100644
--- a/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc
+++ b/docs/java-api/aggregations/bucket/datehistogram-aggregation.asciidoc
@@ -67,3 +67,7 @@ key [2005-01-01T00:00:00.000Z], date [2005], doc_count [1]
key [2007-01-01T00:00:00.000Z], date [2007], doc_count [2]
key [2008-01-01T00:00:00.000Z], date [2008], doc_count [3]
--------------------------------------------------
+
+===== Order
+
+Supports the same order functionality as the <>.
diff --git a/docs/java-api/aggregations/bucket/histogram-aggregation.asciidoc b/docs/java-api/aggregations/bucket/histogram-aggregation.asciidoc
index 28e9cd3ecd0..59bb555401c 100644
--- a/docs/java-api/aggregations/bucket/histogram-aggregation.asciidoc
+++ b/docs/java-api/aggregations/bucket/histogram-aggregation.asciidoc
@@ -42,3 +42,7 @@ for (Histogram.Bucket entry : agg.getBuckets()) {
logger.info("key [{}], doc_count [{}]", key, docCount);
}
--------------------------------------------------
+
+===== Order
+
+Supports the same order functionality as the <>.
diff --git a/docs/java-api/aggregations/bucket/terms-aggregation.asciidoc b/docs/java-api/aggregations/bucket/terms-aggregation.asciidoc
index ad83faccd31..db584fd4ced 100644
--- a/docs/java-api/aggregations/bucket/terms-aggregation.asciidoc
+++ b/docs/java-api/aggregations/bucket/terms-aggregation.asciidoc
@@ -39,7 +39,14 @@ for (Terms.Bucket entry : genders.getBuckets()) {
}
--------------------------------------------------
-==== Order
+===== Order
+
+Import bucket ordering strategy classes:
+
+[source,java]
+--------------------------------------------------
+import org.elasticsearch.search.aggregations.BucketOrder;
+--------------------------------------------------
Ordering the buckets by their `doc_count` in an ascending manner:
@@ -48,7 +55,7 @@ Ordering the buckets by their `doc_count` in an ascending manner:
AggregationBuilders
.terms("genders")
.field("gender")
- .order(Terms.Order.count(true))
+ .order(BucketOrder.count(true))
--------------------------------------------------
Ordering the buckets alphabetically by their terms in an ascending manner:
@@ -58,7 +65,7 @@ Ordering the buckets alphabetically by their terms in an ascending manner:
AggregationBuilders
.terms("genders")
.field("gender")
- .order(Terms.Order.term(true))
+ .order(BucketOrder.key(true))
--------------------------------------------------
Ordering the buckets by single value metrics sub-aggregation (identified by the aggregation name):
@@ -68,7 +75,22 @@ Ordering the buckets by single value metrics sub-aggregation (identified by the
AggregationBuilders
.terms("genders")
.field("gender")
- .order(Terms.Order.aggregation("avg_height", false))
+ .order(BucketOrder.aggregation("avg_height", false))
+ .subAggregation(
+ AggregationBuilders.avg("avg_height").field("height")
+ )
+--------------------------------------------------
+
+Ordering the buckets by multiple criteria:
+
+[source,java]
+--------------------------------------------------
+AggregationBuilders
+ .terms("genders")
+ .field("gender")
+ .order(BucketOrder.compound( // in order of priority:
+ BucketOrder.aggregation("avg_height", false), // sort by sub-aggregation first
+ BucketOrder.count(true))) // then bucket count as a tie-breaker
.subAggregation(
AggregationBuilders.avg("avg_height").field("height")
)
diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
index b7619b175df..47265a0b224 100644
--- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
+++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
@@ -390,3 +390,10 @@ POST /sales/_search?size=0
// TEST[setup:sales]
<1> Documents without a value in the `publish_date` field will fall into the same bucket as documents that have the value `2000-01-01`.
+
+==== Order
+
+By default the returned buckets are sorted by their `key` ascending, though the order behaviour can be controlled using
+the `order` setting. Supports the same `order` functionality as the <>.
+
+deprecated[6.0.0, Use `_key` instead of `_time` to order buckets by their dates/keys]
diff --git a/docs/reference/aggregations/bucket/histogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/histogram-aggregation.asciidoc
index de828c62aa9..380d06258da 100644
--- a/docs/reference/aggregations/bucket/histogram-aggregation.asciidoc
+++ b/docs/reference/aggregations/bucket/histogram-aggregation.asciidoc
@@ -179,120 +179,8 @@ POST /sales/_search?size=0
==== Order
-By default the returned buckets are sorted by their `key` ascending, though the order behaviour can be controlled
-using the `order` setting.
-
-Ordering the buckets by their key - descending:
-
-[source,js]
---------------------------------------------------
-POST /sales/_search?size=0
-{
- "aggs" : {
- "prices" : {
- "histogram" : {
- "field" : "price",
- "interval" : 50,
- "order" : { "_key" : "desc" }
- }
- }
- }
-}
---------------------------------------------------
-// CONSOLE
-// TEST[setup:sales]
-
-Ordering the buckets by their `doc_count` - ascending:
-
-[source,js]
---------------------------------------------------
-POST /sales/_search?size=0
-{
- "aggs" : {
- "prices" : {
- "histogram" : {
- "field" : "price",
- "interval" : 50,
- "order" : { "_count" : "asc" }
- }
- }
- }
-}
---------------------------------------------------
-// CONSOLE
-// TEST[setup:sales]
-
-If the histogram aggregation has a direct metrics sub-aggregation, the latter can determine the order of the buckets:
-
-[source,js]
---------------------------------------------------
-POST /sales/_search?size=0
-{
- "aggs" : {
- "prices" : {
- "histogram" : {
- "field" : "price",
- "interval" : 50,
- "order" : { "price_stats.min" : "asc" } <1>
- },
- "aggs" : {
- "price_stats" : { "stats" : {"field" : "price"} }
- }
- }
- }
-}
---------------------------------------------------
-// CONSOLE
-// TEST[setup:sales]
-
-<1> The `{ "price_stats.min" : asc" }` will sort the buckets based on `min` value of their `price_stats` sub-aggregation.
-
-It is also possible to order the buckets based on a "deeper" aggregation in the hierarchy. This is supported as long
-as the aggregations path are of a single-bucket type, where the last aggregation in the path may either by a single-bucket
-one or a metrics one. If it's a single-bucket type, the order will be defined by the number of docs in the bucket (i.e. `doc_count`),
-in case it's a metrics one, the same rules as above apply (where the path must indicate the metric name to sort by in case of
-a multi-value metrics aggregation, and in case of a single-value metrics aggregation the sort will be applied on that value).
-
-The path must be defined in the following form:
-
-// https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_Form
-[source,ebnf]
---------------------------------------------------
-AGG_SEPARATOR = '>' ;
-METRIC_SEPARATOR = '.' ;
-AGG_NAME = ;
-METRIC = ;
-PATH = [ , ]* [ , ] ;
---------------------------------------------------
-
-[source,js]
---------------------------------------------------
-POST /sales/_search?size=0
-{
- "aggs" : {
- "prices" : {
- "histogram" : {
- "field" : "price",
- "interval" : 50,
- "order" : { "promoted_products>rating_stats.avg" : "desc" } <1>
- },
- "aggs" : {
- "promoted_products" : {
- "filter" : { "term" : { "promoted" : true }},
- "aggs" : {
- "rating_stats" : { "stats" : { "field" : "rating" }}
- }
- }
- }
- }
- }
-}
---------------------------------------------------
-// CONSOLE
-// TEST[setup:sales]
-
-The above will sort the buckets based on the avg rating among the promoted products
-
+By default the returned buckets are sorted by their `key` ascending, though the order behaviour can be controlled using
+the `order` setting. Supports the same `order` functionality as the <>.
==== Offset
diff --git a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc
index 0b028c1a940..90a5586d9e4 100644
--- a/docs/reference/aggregations/bucket/terms-aggregation.asciidoc
+++ b/docs/reference/aggregations/bucket/terms-aggregation.asciidoc
@@ -280,13 +280,14 @@ Ordering the buckets alphabetically by their terms in an ascending manner:
"genres" : {
"terms" : {
"field" : "genre",
- "order" : { "_term" : "asc" }
+ "order" : { "_key" : "asc" }
}
}
}
}
--------------------------------------------------
+deprecated[6.0.0, Use `_key` instead of `_term` to order buckets by their term]
Ordering the buckets by single value metrics sub-aggregation (identified by the aggregation name):
diff --git a/docs/reference/migration/migrate_6_0/java.asciidoc b/docs/reference/migration/migrate_6_0/java.asciidoc
index 5693d508526..43feb15f84b 100644
--- a/docs/reference/migration/migrate_6_0/java.asciidoc
+++ b/docs/reference/migration/migrate_6_0/java.asciidoc
@@ -26,4 +26,12 @@ When sending a request through the request builders e.g. client.prepareSearch().
be possible to call `addListener` against the returned `ListenableActionFuture`. With this change an
`ActionFuture` is returned instead, which is consistent with what the `Client` methods return, hence
it is not possible to associate the future with listeners. The `execute` method that accept a listener
-as an argument can be used instead.
\ No newline at end of file
+as an argument can be used instead.
+
+==== `Terms.Order` and `Histogram.Order` classes replace by `BucketOrder`
+
+The `terms`, `histogram`, and `date_histogram` aggregation code has been refactored to use common
+code for ordering buckets. The `BucketOrder` class must be used instead of `Terms.Order` and
+`Histogram.Order`. The `static` methods in the `BucketOrder` class must be called instead of directly
+accessing internal order instances, e.g. `BucketOrder.count(boolean)` and `BucketOrder.aggregation(String, boolean)`.
+Use `BucketOrder.key(boolean)` to order the `terms` aggregation buckets by `_term`.
diff --git a/docs/reference/migration/migrate_6_0/search.asciidoc b/docs/reference/migration/migrate_6_0/search.asciidoc
index 80d67eae72d..82c2ba8f717 100644
--- a/docs/reference/migration/migrate_6_0/search.asciidoc
+++ b/docs/reference/migration/migrate_6_0/search.asciidoc
@@ -51,6 +51,8 @@
* The `disable_coord` parameter of the `bool` and `common_terms` queries has
been removed. If provided, it will be ignored and issue a deprecation warning.
+* The `template` query has been removed. This query was deprecated since 5.0
+
==== Search shards API
The search shards API no longer accepts the `type` url parameter, which didn't
diff --git a/docs/reference/modules/scripting.asciidoc b/docs/reference/modules/scripting.asciidoc
index 460b3be461d..c6a8a252b93 100644
--- a/docs/reference/modules/scripting.asciidoc
+++ b/docs/reference/modules/scripting.asciidoc
@@ -51,7 +51,7 @@ certain tasks.
|built-in
|templates
-|<>
+|<>
|n/a
|you write it!
|expert API
@@ -83,6 +83,4 @@ include::scripting/painless-debugging.asciidoc[]
include::scripting/expression.asciidoc[]
-include::scripting/native.asciidoc[]
-
-include::scripting/advanced-scripting.asciidoc[]
+include::scripting/engine.asciidoc[]
diff --git a/docs/reference/modules/scripting/advanced-scripting.asciidoc b/docs/reference/modules/scripting/advanced-scripting.asciidoc
deleted file mode 100644
index a5fcc12777d..00000000000
--- a/docs/reference/modules/scripting/advanced-scripting.asciidoc
+++ /dev/null
@@ -1,189 +0,0 @@
-[[modules-advanced-scripting]]
-=== Advanced text scoring in scripts
-
-experimental[The functionality described on this page is considered experimental and may be changed or removed in a future release]
-
-Text features, such as term or document frequency for a specific term can be
-accessed in scripts with the `_index` variable. This can be useful if, for
-example, you want to implement your own scoring model using for example a
-script inside a <>.
-Statistics over the document collection are computed *per shard*, not per
-index.
-
-It should be noted that the `_index` variable is not supported in the painless language, but `_index` is defined when using the groovy language.
-
-[float]
-=== Nomenclature:
-
-
-[horizontal]
-`df`::
-
- document frequency. The number of documents a term appears in. Computed
- per field.
-
-
-`tf`::
-
- term frequency. The number times a term appears in a field in one specific
- document.
-
-`ttf`::
-
- total term frequency. The number of times this term appears in all
- documents, that is, the sum of `tf` over all documents. Computed per
- field.
-
-`df` and `ttf` are computed per shard and therefore these numbers can vary
-depending on the shard the current document resides in.
-
-
-[float]
-=== Shard statistics:
-
-`_index.numDocs()`::
-
- Number of documents in shard.
-
-`_index.maxDoc()`::
-
- Maximal document number in shard.
-
-`_index.numDeletedDocs()`::
-
- Number of deleted documents in shard.
-
-
-[float]
-=== Field statistics:
-
-Field statistics can be accessed with a subscript operator like this:
-`_index['FIELD']`.
-
-
-`_index['FIELD'].docCount()`::
-
- Number of documents containing the field `FIELD`. Does not take deleted documents into account.
-
-`_index['FIELD'].sumttf()`::
-
- Sum of `ttf` over all terms that appear in field `FIELD` in all documents.
-
-`_index['FIELD'].sumdf()`::
-
- The sum of `df` s over all terms that appear in field `FIELD` in all
- documents.
-
-
-Field statistics are computed per shard and therefore these numbers can vary
-depending on the shard the current document resides in.
-The number of terms in a field cannot be accessed using the `_index` variable. See <> for how to do that.
-
-[float]
-=== Term statistics:
-
-Term statistics for a field can be accessed with a subscript operator like
-this: `_index['FIELD']['TERM']`. This will never return null, even if term or field does not exist.
-If you do not need the term frequency, call `_index['FIELD'].get('TERM', 0)`
-to avoid unnecessary initialization of the frequencies. The flag will have only
-affect is your set the <> to `docs`.
-
-
-`_index['FIELD']['TERM'].df()`::
-
- `df` of term `TERM` in field `FIELD`. Will be returned, even if the term
- is not present in the current document.
-
-`_index['FIELD']['TERM'].ttf()`::
-
- The sum of term frequencies of term `TERM` in field `FIELD` over all
- documents. Will be returned, even if the term is not present in the
- current document.
-
-`_index['FIELD']['TERM'].tf()`::
-
- `tf` of term `TERM` in field `FIELD`. Will be 0 if the term is not present
- in the current document.
-
-
-[float]
-=== Term positions, offsets and payloads:
-
-If you need information on the positions of terms in a field, call
-`_index['FIELD'].get('TERM', flag)` where flag can be
-
-[horizontal]
-`_POSITIONS`:: if you need the positions of the term
-`_OFFSETS`:: if you need the offsets of the term
-`_PAYLOADS`:: if you need the payloads of the term
-`_CACHE`:: if you need to iterate over all positions several times
-
-The iterator uses the underlying lucene classes to iterate over positions. For efficiency reasons, you can only iterate over positions once. If you need to iterate over the positions several times, set the `_CACHE` flag.
-
-You can combine the operators with a `|` if you need more than one info. For
-example, the following will return an object holding the positions and payloads,
-as well as all statistics:
-
-
- `_index['FIELD'].get('TERM', _POSITIONS | _PAYLOADS)`
-
-
-Positions can be accessed with an iterator that returns an object
-(`POS_OBJECT`) holding position, offsets and payload for each term position.
-
-`POS_OBJECT.position`::
-
- The position of the term.
-
-`POS_OBJECT.startOffset`::
-
- The start offset of the term.
-
-`POS_OBJECT.endOffset`::
-
- The end offset of the term.
-
-`POS_OBJECT.payload`::
-
- The payload of the term.
-
-`POS_OBJECT.payloadAsInt(missingValue)`::
-
- The payload of the term converted to integer. If the current position has
- no payload, the `missingValue` will be returned. Call this only if you
- know that your payloads are integers.
-
-`POS_OBJECT.payloadAsFloat(missingValue)`::
-
- The payload of the term converted to float. If the current position has no
- payload, the `missingValue` will be returned. Call this only if you know
- that your payloads are floats.
-
-`POS_OBJECT.payloadAsString()`::
-
- The payload of the term converted to string. If the current position has
- no payload, `null` will be returned. Call this only if you know that your
- payloads are strings.
-
-
-Example: sums up all payloads for the term `foo`.
-
-[source,groovy]
----------------------------------------------------------
-termInfo = _index['my_field'].get('foo',_PAYLOADS);
-score = 0;
-for (pos in termInfo) {
- score = score + pos.payloadAsInt(0);
-}
-return score;
----------------------------------------------------------
-
-
-[float]
-=== Term vectors:
-
-The `_index` variable can only be used to gather statistics for single terms. If you want to use information on all terms in a field, you must store the term vectors (see <>). To access them, call
-`_index.termVectors()` to get a
-https://lucene.apache.org/core/4_0_0/core/org/apache/lucene/index/Fields.html[Fields]
-instance. This object can then be used as described in https://lucene.apache.org/core/4_0_0/core/org/apache/lucene/index/Fields.html[lucene doc] to iterate over fields and then for each field iterate over each term in the field.
-The method will return null if the term vectors were not stored.
diff --git a/docs/reference/modules/scripting/engine.asciidoc b/docs/reference/modules/scripting/engine.asciidoc
new file mode 100644
index 00000000000..207722d8feb
--- /dev/null
+++ b/docs/reference/modules/scripting/engine.asciidoc
@@ -0,0 +1,57 @@
+[[modules-scripting-engine]]
+=== Advanced scripts using script engines
+
+A `ScriptEngine` is a backend for implementing a scripting language. It may also
+be used to write scripts that need to use advanced internals of scripting. For example,
+a script that wants to use term frequencies while scoring.
+
+The plugin {plugins}/plugin-authors.html[documentation] has more information on
+how to write a plugin so that Elasticsearch will properly load it. To register
+the `ScriptEngine`, your plugin should implement the `ScriptPlugin` interface
+and override the `getScriptEngine(Settings settings)` method.
+
+The following is an example of a custom `ScriptEngine` which uses the language
+name `expert_scripts`. It implements a single script called `pure_df` which
+may be used as a search script to override each document's score as
+the document frequency of a provided term.
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{docdir}/../../plugins/examples/script-expert-scoring/src/main/java/org/elasticsearch/example/expertscript/ExpertScriptPlugin.java[expert_engine]
+--------------------------------------------------
+
+You can execute the script by specifying its `lang` as `expert_scripts`, and the name
+of the script as the the script source:
+
+
+[source,js]
+--------------------------------------------------
+POST /_search
+{
+ "query": {
+ "function_score": {
+ "query": {
+ "match": {
+ "body": "foo"
+ }
+ },
+ "functions": [
+ {
+ "script_score": {
+ "script": {
+ "inline": "pure_df",
+ "lang" : "expert_scripts",
+ "params": {
+ "field": "body",
+ "term": "foo"
+ }
+ }
+ }
+ }
+ ]
+ }
+ }
+}
+--------------------------------------------------
+// CONSOLE
+// TEST[skip:we don't have an expert script plugin installed to test this]
diff --git a/docs/reference/modules/scripting/native.asciidoc b/docs/reference/modules/scripting/native.asciidoc
deleted file mode 100644
index bc0ad3bcaf9..00000000000
--- a/docs/reference/modules/scripting/native.asciidoc
+++ /dev/null
@@ -1,86 +0,0 @@
-[[modules-scripting-native]]
-=== Native (Java) Scripts
-
-Sometimes `painless` and <> aren't enough. For those times you can
-implement a native script.
-
-The best way to implement a native script is to write a plugin and install it.
-The plugin {plugins}/plugin-authors.html[documentation] has more information on
-how to write a plugin so that Elasticsearch will properly load it.
-
-To register the actual script you'll need to implement `NativeScriptFactory`
-to construct the script. The actual script will extend either
-`AbstractExecutableScript` or `AbstractSearchScript`. The second one is likely
-the most useful and has several helpful subclasses you can extend like
-`AbstractLongSearchScript` and `AbstractDoubleSearchScript`.
-Finally, your plugin should register the native script by implementing the
-`ScriptPlugin` interface.
-
-If you squashed the whole thing into one class it'd look like:
-
-[source,java]
---------------------------------------------------
-public class MyNativeScriptPlugin extends Plugin implements ScriptPlugin {
-
- @Override
- public List getNativeScripts() {
- return Collections.singletonList(new MyNativeScriptFactory());
- }
-
- public static class MyNativeScriptFactory implements NativeScriptFactory {
- @Override
- public ExecutableScript newScript(@Nullable Map params) {
- return new MyNativeScript();
- }
- @Override
- public boolean needsScores() {
- return false;
- }
- @Override
- public String getName() {
- return "my_script";
- }
- }
-
- public static class MyNativeScript extends AbstractDoubleSearchScript {
- @Override
- public double runAsDouble() {
- double a = (double) source().get("a");
- double b = (double) source().get("b");
- return a * b;
- }
- }
-}
---------------------------------------------------
-
-You can execute the script by specifying its `lang` as `native`, and the name
-of the script as the `id`:
-
-
-[source,js]
---------------------------------------------------
-POST /_search
-{
- "query": {
- "function_score": {
- "query": {
- "match": {
- "body": "foo"
- }
- },
- "functions": [
- {
- "script_score": {
- "script": {
- "inline": "my_script",
- "lang" : "native"
- }
- }
- }
- ]
- }
- }
-}
---------------------------------------------------
-// CONSOLE
-// TEST[skip:we don't have a native plugin installed to test this]
diff --git a/docs/reference/query-dsl/special-queries.asciidoc b/docs/reference/query-dsl/special-queries.asciidoc
index b705f01c6fc..3e3c140d6f5 100644
--- a/docs/reference/query-dsl/special-queries.asciidoc
+++ b/docs/reference/query-dsl/special-queries.asciidoc
@@ -9,12 +9,6 @@ This group contains queries which do not fit into the other groups:
This query finds documents which are similar to the specified text, document,
or collection of documents.
-<>::
-
-The `template` query accepts a Mustache template (either inline, indexed, or
-from a file), and a map of parameters, and combines the two to generate the
-final query to execute.
-
<>::
This query allows a script to act as a filter. Also see the
@@ -27,8 +21,6 @@ the specified document.
include::mlt-query.asciidoc[]
-include::template-query.asciidoc[]
-
include::script-query.asciidoc[]
include::percolate-query.asciidoc[]
diff --git a/docs/reference/query-dsl/template-query.asciidoc b/docs/reference/query-dsl/template-query.asciidoc
deleted file mode 100644
index 2d3b5724d49..00000000000
--- a/docs/reference/query-dsl/template-query.asciidoc
+++ /dev/null
@@ -1,127 +0,0 @@
-[[query-dsl-template-query]]
-=== Template Query
-
-deprecated[5.0.0, Use the <> API]
-
-A query that accepts a query template and a map of key/value pairs to fill in
-template parameters. Templating is based on Mustache. For simple token substitution all you provide
-is a query containing some variable that you want to substitute and the actual
-values:
-
-[source,js]
-------------------------------------------
-GET /_search
-{
- "query": {
- "template": {
- "inline": { "match": { "text": "{{query_string}}" }},
- "params" : {
- "query_string" : "all about search"
- }
- }
- }
-}
-------------------------------------------
-// CONSOLE
-// TEST[warning:[template] query is deprecated, use search template api instead]
-
-The above request is translated into:
-
-[source,js]
-------------------------------------------
-GET /_search
-{
- "query": {
- "match": {
- "text": "all about search"
- }
- }
-}
-------------------------------------------
-// CONSOLE
-
-Alternatively passing the template as an escaped string works as well:
-
-[source,js]
-------------------------------------------
-GET /_search
-{
- "query": {
- "template": {
- "inline": "{ \"match\": { \"text\": \"{{query_string}}\" }}", <1>
- "params" : {
- "query_string" : "all about search"
- }
- }
- }
-}
-------------------------------------------
-// CONSOLE
-// TEST[warning:[template] query is deprecated, use search template api instead]
-
-<1> New line characters (`\n`) should be escaped as `\\n` or removed,
- and quotes (`"`) should be escaped as `\\"`.
-
-==== Stored templates
-
-You can register a template by storing it in the `config/scripts` directory, in a file using the `.mustache` extension.
-In order to execute the stored template, reference it by name in the `file`
-parameter:
-
-
-[source,js]
-------------------------------------------
-GET /_search
-{
- "query": {
- "template": {
- "file": "my_template", <1>
- "params" : {
- "query_string" : "all about search"
- }
- }
- }
-}
-------------------------------------------
-// CONSOLE
-// TEST[warning:[template] query is deprecated, use search template api instead]
-
-<1> Name of the query template in `config/scripts/`, i.e., `my_template.mustache`.
-
-Alternatively, you can register a query template in the cluster state with:
-
-[source,js]
-------------------------------------------
-PUT /_search/template/my_template
-{
- "template": { "match": { "text": "{{query_string}}" }}
-}
-------------------------------------------
-// CONSOLE
-
-and refer to it in the `template` query with the `id` parameter:
-
-
-[source,js]
-------------------------------------------
-GET /_search
-{
- "query": {
- "template": {
- "stored": "my_template", <1>
- "params" : {
- "query_string" : "all about search"
- }
- }
- }
-}
-------------------------------------------
-// CONSOLE
-// TEST[continued]
-// TEST[warning:[template] query is deprecated, use search template api instead]
-
-<1> Name of the query template in `config/scripts/`, i.e., `my_template.mustache`.
-
-
-There is also a dedicated `template` endpoint, allows you to template an entire search request.
-Please see <> for more details.
diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc
index cf5d1ef5f4d..0644b549812 100644
--- a/docs/reference/redirects.asciidoc
+++ b/docs/reference/redirects.asciidoc
@@ -463,3 +463,13 @@ index that make warmers not necessary anymore.
=== Index time boosting
The index time boost mapping has been replaced with query time boost (see <