Merge branch 'master' into azure/fix-delete
This commit is contained in:
commit
1d75ee6fb9
|
@ -1,2 +1,2 @@
|
|||
#!/bin/sh -e
|
||||
#!/bin/bash -e
|
||||
<% commands.each {command -> %><%= command %><% } %>
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
#!/bin/sh -e
|
||||
#!/bin/bash -e
|
||||
<% commands.each {command -> %><%= command %><% } %>
|
||||
|
|
|
@ -69,6 +69,8 @@ public class Version {
|
|||
public static final Version V_2_3_1 = new Version(V_2_3_1_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
|
||||
public static final int V_2_3_2_ID = 2030299;
|
||||
public static final Version V_2_3_2 = new Version(V_2_3_2_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
|
||||
public static final int V_2_3_3_ID = 2030399;
|
||||
public static final Version V_2_3_3 = new Version(V_2_3_3_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
|
||||
public static final int V_5_0_0_alpha1_ID = 5000001;
|
||||
public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
|
||||
public static final int V_5_0_0_alpha2_ID = 5000002;
|
||||
|
@ -94,6 +96,8 @@ public class Version {
|
|||
return V_5_0_0_alpha2;
|
||||
case V_5_0_0_alpha1_ID:
|
||||
return V_5_0_0_alpha1;
|
||||
case V_2_3_3_ID:
|
||||
return V_2_3_3;
|
||||
case V_2_3_2_ID:
|
||||
return V_2_3_2;
|
||||
case V_2_3_1_ID:
|
||||
|
|
|
@ -54,7 +54,7 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc
|
|||
TransportService transportService, IndicesService indicesService, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(settings, ClearIndicesCacheAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
|
||||
ClearIndicesCacheRequest::new, ThreadPool.Names.MANAGEMENT);
|
||||
ClearIndicesCacheRequest::new, ThreadPool.Names.MANAGEMENT, false);
|
||||
this.indicesService = indicesService;
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
@ -165,9 +165,9 @@ public class PercolateRequestBuilder extends ActionRequestBuilder<PercolateReque
|
|||
|
||||
/**
|
||||
* Delegates to
|
||||
* {@link PercolateSourceBuilder#addAggregation(AggregatorBuilder)}
|
||||
* {@link PercolateSourceBuilder#addAggregation(AggregationBuilder)}
|
||||
*/
|
||||
public PercolateRequestBuilder addAggregation(AggregatorBuilder<?> aggregationBuilder) {
|
||||
public PercolateRequestBuilder addAggregation(AggregationBuilder<?> aggregationBuilder) {
|
||||
sourceBuilder().addAggregation(aggregationBuilder);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
|
@ -53,7 +53,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
private List<SortBuilder<?>> sorts;
|
||||
private Boolean trackScores;
|
||||
private HighlightBuilder highlightBuilder;
|
||||
private List<AggregatorBuilder<?>> aggregationBuilders;
|
||||
private List<AggregationBuilder<?>> aggregationBuilders;
|
||||
private List<PipelineAggregatorBuilder<?>> pipelineAggregationBuilders;
|
||||
|
||||
/**
|
||||
|
@ -126,7 +126,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
/**
|
||||
* Add an aggregation definition.
|
||||
*/
|
||||
public PercolateSourceBuilder addAggregation(AggregatorBuilder<?> aggregationBuilder) {
|
||||
public PercolateSourceBuilder addAggregation(AggregationBuilder<?> aggregationBuilder) {
|
||||
if (aggregationBuilders == null) {
|
||||
aggregationBuilders = new ArrayList<>();
|
||||
}
|
||||
|
@ -175,7 +175,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
builder.field("aggregations");
|
||||
builder.startObject();
|
||||
if (aggregationBuilders != null) {
|
||||
for (AggregatorBuilder<?> aggregation : aggregationBuilders) {
|
||||
for (AggregationBuilder<?> aggregation : aggregationBuilders) {
|
||||
aggregation.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.index.query.QueryBuilder;
|
|||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
|
@ -373,7 +373,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
/**
|
||||
* Adds an aggregation to the search operation.
|
||||
*/
|
||||
public SearchRequestBuilder addAggregation(AggregatorBuilder<?> aggregation) {
|
||||
public SearchRequestBuilder addAggregation(AggregationBuilder<?> aggregation) {
|
||||
sourceBuilder().aggregation(aggregation);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -94,6 +94,21 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
|
|||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
Supplier<Request> request,
|
||||
String executor) {
|
||||
this(settings, actionName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, request,
|
||||
executor, true);
|
||||
}
|
||||
|
||||
public TransportBroadcastByNodeAction(
|
||||
Settings settings,
|
||||
String actionName,
|
||||
ThreadPool threadPool,
|
||||
ClusterService clusterService,
|
||||
TransportService transportService,
|
||||
ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
Supplier<Request> request,
|
||||
String executor,
|
||||
boolean canTripCircuitBreaker) {
|
||||
super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request);
|
||||
|
||||
this.clusterService = clusterService;
|
||||
|
@ -101,7 +116,8 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
|
|||
|
||||
transportNodeBroadcastAction = actionName + "[n]";
|
||||
|
||||
transportService.registerRequestHandler(transportNodeBroadcastAction, NodeRequest::new, executor, new BroadcastByNodeTransportRequestHandler());
|
||||
transportService.registerRequestHandler(transportNodeBroadcastAction, NodeRequest::new, executor, false, canTripCircuitBreaker,
|
||||
new BroadcastByNodeTransportRequestHandler());
|
||||
}
|
||||
|
||||
private Response newResponse(
|
||||
|
|
|
@ -281,8 +281,11 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
// Also the order of the mappings may be backwards.
|
||||
if (newMapper.parentFieldMapper().active()) {
|
||||
for (ObjectCursor<MappingMetaData> mapping : indexMetaData.getMappings().values()) {
|
||||
if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) {
|
||||
throw new IllegalArgumentException("can't add a _parent field that points to an already existing type");
|
||||
String parentType = newMapper.parentFieldMapper().type();
|
||||
if (parentType.equals(mapping.value.type()) &&
|
||||
indexService.mapperService().getParentTypes().contains(parentType) == false) {
|
||||
throw new IllegalArgumentException("can't add a _parent field that points to an " +
|
||||
"already existing type, that isn't already a parent");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,6 +53,15 @@ public enum DateTimeUnit {
|
|||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param unit the {@link DateTimeUnit} to check
|
||||
* @return true if the unit is a day or longer
|
||||
*/
|
||||
public static boolean isDayOrLonger(DateTimeUnit unit) {
|
||||
return (unit == DateTimeUnit.HOUR_OF_DAY || unit == DateTimeUnit.MINUTES_OF_HOUR
|
||||
|| unit == DateTimeUnit.SECOND_OF_MINUTE) == false;
|
||||
}
|
||||
|
||||
public static DateTimeUnit resolve(byte id) {
|
||||
switch (id) {
|
||||
case 1: return WEEK_OF_WEEKYEAR;
|
||||
|
|
|
@ -46,8 +46,8 @@ public abstract class TimeZoneRounding extends Rounding {
|
|||
|
||||
public static class Builder {
|
||||
|
||||
private DateTimeUnit unit;
|
||||
private long interval = -1;
|
||||
private final DateTimeUnit unit;
|
||||
private final long interval;
|
||||
|
||||
private DateTimeZone timeZone = DateTimeZone.UTC;
|
||||
|
||||
|
@ -142,10 +142,15 @@ public abstract class TimeZoneRounding extends Rounding {
|
|||
|
||||
@Override
|
||||
public long nextRoundingValue(long time) {
|
||||
long timeLocal = time;
|
||||
timeLocal = timeZone.convertUTCToLocal(time);
|
||||
long nextInLocalTime = durationField.add(timeLocal, 1);
|
||||
return timeZone.convertLocalToUTC(nextInLocalTime, false);
|
||||
if (DateTimeUnit.isDayOrLonger(unit)) {
|
||||
time = timeZone.convertUTCToLocal(time);
|
||||
}
|
||||
long next = durationField.add(time, 1);
|
||||
if (DateTimeUnit.isDayOrLonger(unit)) {
|
||||
return timeZone.convertLocalToUTC(next, false);
|
||||
} else {
|
||||
return next;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -87,6 +87,7 @@ import org.elasticsearch.repositories.fs.FsRepository;
|
|||
import org.elasticsearch.repositories.uri.URLRepository;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.SearchService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
|
@ -397,6 +398,9 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
|||
JvmGcMonitorService.ENABLED_SETTING,
|
||||
JvmGcMonitorService.REFRESH_INTERVAL_SETTING,
|
||||
JvmGcMonitorService.GC_SETTING,
|
||||
JvmGcMonitorService.GC_OVERHEAD_WARN_SETTING,
|
||||
JvmGcMonitorService.GC_OVERHEAD_INFO_SETTING,
|
||||
JvmGcMonitorService.GC_OVERHEAD_DEBUG_SETTING,
|
||||
PageCacheRecycler.LIMIT_HEAP_SETTING,
|
||||
PageCacheRecycler.WEIGHT_BYTES_SETTING,
|
||||
PageCacheRecycler.WEIGHT_INT_SETTING,
|
||||
|
@ -417,6 +421,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
|||
ResourceWatcherService.ENABLED,
|
||||
ResourceWatcherService.RELOAD_INTERVAL_HIGH,
|
||||
ResourceWatcherService.RELOAD_INTERVAL_MEDIUM,
|
||||
ResourceWatcherService.RELOAD_INTERVAL_LOW
|
||||
ResourceWatcherService.RELOAD_INTERVAL_LOW,
|
||||
SearchModule.INDICES_MAX_CLAUSE_COUNT_SETTING
|
||||
)));
|
||||
}
|
||||
|
|
|
@ -18,19 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.common.settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
|
@ -50,6 +37,19 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A setting. Encapsulates typical stuff like default value, parsing, and scope.
|
||||
* Some (SettingsProperty.Dynamic) can by modified at run time using the API.
|
||||
|
@ -504,7 +504,7 @@ public class Setting<T> extends ToXContentToBytes {
|
|||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
|
||||
}
|
||||
if (value > maxValue) {
|
||||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be =< " + maxValue);
|
||||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be <= " + maxValue);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
@ -572,7 +572,7 @@ public class Setting<T> extends ToXContentToBytes {
|
|||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
|
||||
}
|
||||
if (value.bytes() > maxValue.bytes()) {
|
||||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be =< " + maxValue);
|
||||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be <= " + maxValue);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
|
|
@ -65,7 +65,12 @@ public class SettingsModule extends AbstractModule {
|
|||
protected void configure() {
|
||||
final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values()));
|
||||
final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values()));
|
||||
Settings indexSettings = settings.filter((s) -> s.startsWith("index.") && clusterSettings.get(s) == null);
|
||||
Settings indexSettings = settings.filter((s) -> (s.startsWith("index.") &&
|
||||
// special case - we want to get Did you mean indices.query.bool.max_clause_count
|
||||
// which means we need to by-pass this check for this setting
|
||||
// TODO remove in 6.0!!
|
||||
"index.query.bool.max_clause_count".equals(s) == false)
|
||||
&& clusterSettings.get(s) == null);
|
||||
if (indexSettings.isEmpty() == false) {
|
||||
try {
|
||||
String separator = IntStream.range(0, 85).mapToObj(s -> "*").collect(Collectors.joining("")).trim();
|
||||
|
|
|
@ -1,629 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import org.apache.lucene.store.DataInput;
|
||||
import org.apache.lucene.store.DataOutput;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.hash.MurmurHash3;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.SizeValue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
|
||||
/**
|
||||
* A bloom filter. Inspired by Guava bloom filter implementation though with some optimizations.
|
||||
*/
|
||||
public class BloomFilter {
|
||||
|
||||
/**
|
||||
* A factory that can use different fpp based on size.
|
||||
*/
|
||||
public static class Factory {
|
||||
|
||||
public static final Factory DEFAULT = buildDefault();
|
||||
|
||||
private static Factory buildDefault() {
|
||||
// Some numbers:
|
||||
// 10k =0.001: 140.4kb , 10 Hashes
|
||||
// 10k =0.01 : 93.6kb , 6 Hashes
|
||||
// 100k=0.01 : 936.0kb , 6 Hashes
|
||||
// 100k=0.03 : 712.7kb , 5 Hashes
|
||||
// 500k=0.01 : 4.5mb , 6 Hashes
|
||||
// 500k=0.03 : 3.4mb , 5 Hashes
|
||||
// 500k=0.05 : 2.9mb , 4 Hashes
|
||||
// 1m=0.01 : 9.1mb , 6 Hashes
|
||||
// 1m=0.03 : 6.9mb , 5 Hashes
|
||||
// 1m=0.05 : 5.9mb , 4 Hashes
|
||||
// 5m=0.01 : 45.7mb , 6 Hashes
|
||||
// 5m=0.03 : 34.8mb , 5 Hashes
|
||||
// 5m=0.05 : 29.7mb , 4 Hashes
|
||||
// 50m=0.01 : 457.0mb , 6 Hashes
|
||||
// 50m=0.03 : 297.3mb , 4 Hashes
|
||||
// 50m=0.10 : 228.5mb , 3 Hashes
|
||||
return buildFromString("10k=0.01,1m=0.03");
|
||||
}
|
||||
|
||||
/**
|
||||
* Supports just passing fpp, as in "0.01", and also ranges, like "50k=0.01,1m=0.05". If
|
||||
* its null, returns {@link #buildDefault()}.
|
||||
*/
|
||||
public static Factory buildFromString(@Nullable String config) {
|
||||
if (config == null) {
|
||||
return buildDefault();
|
||||
}
|
||||
String[] sEntries = config.split(",");
|
||||
if (sEntries.length == 0) {
|
||||
if (config.length() > 0) {
|
||||
return new Factory(new Entry[]{new Entry(0, Double.parseDouble(config))});
|
||||
}
|
||||
return buildDefault();
|
||||
}
|
||||
Entry[] entries = new Entry[sEntries.length];
|
||||
for (int i = 0; i < sEntries.length; i++) {
|
||||
int index = sEntries[i].indexOf('=');
|
||||
entries[i] = new Entry(
|
||||
(int) SizeValue.parseSizeValue(sEntries[i].substring(0, index).trim()).singles(),
|
||||
Double.parseDouble(sEntries[i].substring(index + 1).trim())
|
||||
);
|
||||
}
|
||||
return new Factory(entries);
|
||||
}
|
||||
|
||||
private final Entry[] entries;
|
||||
|
||||
public Factory(Entry[] entries) {
|
||||
this.entries = entries;
|
||||
// the order is from the upper most expected insertions to the lowest
|
||||
Arrays.sort(this.entries, new Comparator<Entry>() {
|
||||
@Override
|
||||
public int compare(Entry o1, Entry o2) {
|
||||
return o2.expectedInsertions - o1.expectedInsertions;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public BloomFilter createFilter(int expectedInsertions) {
|
||||
for (Entry entry : entries) {
|
||||
if (expectedInsertions > entry.expectedInsertions) {
|
||||
return BloomFilter.create(expectedInsertions, entry.fpp);
|
||||
}
|
||||
}
|
||||
return BloomFilter.create(expectedInsertions, 0.03);
|
||||
}
|
||||
|
||||
public static class Entry {
|
||||
public final int expectedInsertions;
|
||||
public final double fpp;
|
||||
|
||||
Entry(int expectedInsertions, double fpp) {
|
||||
this.expectedInsertions = expectedInsertions;
|
||||
this.fpp = fpp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a bloom filter based on the with the expected number
|
||||
* of insertions and expected false positive probability.
|
||||
*
|
||||
* @param expectedInsertions the number of expected insertions to the constructed
|
||||
* @param fpp the desired false positive probability (must be positive and less than 1.0)
|
||||
*/
|
||||
public static BloomFilter create(int expectedInsertions, double fpp) {
|
||||
return create(expectedInsertions, fpp, -1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a bloom filter based on the expected number of insertions, expected false positive probability,
|
||||
* and number of hash functions.
|
||||
*
|
||||
* @param expectedInsertions the number of expected insertions to the constructed
|
||||
* @param fpp the desired false positive probability (must be positive and less than 1.0)
|
||||
* @param numHashFunctions the number of hash functions to use (must be less than or equal to 255)
|
||||
*/
|
||||
public static BloomFilter create(int expectedInsertions, double fpp, int numHashFunctions) {
|
||||
if (expectedInsertions == 0) {
|
||||
expectedInsertions = 1;
|
||||
}
|
||||
/*
|
||||
* TODO(user): Put a warning in the javadoc about tiny fpp values,
|
||||
* since the resulting size is proportional to -log(p), but there is not
|
||||
* much of a point after all, e.g. optimalM(1000, 0.0000000000000001) = 76680
|
||||
* which is less that 10kb. Who cares!
|
||||
*/
|
||||
long numBits = optimalNumOfBits(expectedInsertions, fpp);
|
||||
|
||||
// calculate the optimal number of hash functions
|
||||
if (numHashFunctions == -1) {
|
||||
numHashFunctions = optimalNumOfHashFunctions(expectedInsertions, numBits);
|
||||
}
|
||||
|
||||
try {
|
||||
return new BloomFilter(new BitArray(numBits), numHashFunctions, Hashing.DEFAULT);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("Could not create BloomFilter of " + numBits + " bits", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static void skipBloom(IndexInput in) throws IOException {
|
||||
int version = in.readInt(); // we do nothing with this now..., defaults to 0
|
||||
final int numLongs = in.readInt();
|
||||
in.seek(in.getFilePointer() + (numLongs * 8) + 4 + 4); // filter + numberOfHashFunctions + hashType
|
||||
}
|
||||
|
||||
public static BloomFilter deserialize(DataInput in) throws IOException {
|
||||
int version = in.readInt(); // we do nothing with this now..., defaults to 0
|
||||
int numLongs = in.readInt();
|
||||
long[] data = new long[numLongs];
|
||||
for (int i = 0; i < numLongs; i++) {
|
||||
data[i] = in.readLong();
|
||||
}
|
||||
int numberOfHashFunctions = in.readInt();
|
||||
int hashType = in.readInt();
|
||||
return new BloomFilter(new BitArray(data), numberOfHashFunctions, Hashing.fromType(hashType));
|
||||
}
|
||||
|
||||
public static void serilaize(BloomFilter filter, DataOutput out) throws IOException {
|
||||
out.writeInt(0); // version
|
||||
BitArray bits = filter.bits;
|
||||
out.writeInt(bits.data.length);
|
||||
for (long l : bits.data) {
|
||||
out.writeLong(l);
|
||||
}
|
||||
out.writeInt(filter.numHashFunctions);
|
||||
out.writeInt(filter.hashing.type()); // hashType
|
||||
}
|
||||
|
||||
public static BloomFilter readFrom(StreamInput in) throws IOException {
|
||||
int version = in.readVInt(); // we do nothing with this now..., defaults to 0
|
||||
int numLongs = in.readVInt();
|
||||
long[] data = new long[numLongs];
|
||||
for (int i = 0; i < numLongs; i++) {
|
||||
data[i] = in.readLong();
|
||||
}
|
||||
int numberOfHashFunctions = in.readVInt();
|
||||
int hashType = in.readVInt(); // again, nothing to do now...
|
||||
return new BloomFilter(new BitArray(data), numberOfHashFunctions, Hashing.fromType(hashType));
|
||||
}
|
||||
|
||||
public static void writeTo(BloomFilter filter, StreamOutput out) throws IOException {
|
||||
out.writeVInt(0); // version
|
||||
BitArray bits = filter.bits;
|
||||
out.writeVInt(bits.data.length);
|
||||
for (long l : bits.data) {
|
||||
out.writeLong(l);
|
||||
}
|
||||
out.writeVInt(filter.numHashFunctions);
|
||||
out.writeVInt(filter.hashing.type()); // hashType
|
||||
}
|
||||
|
||||
/**
|
||||
* The bit set of the BloomFilter (not necessarily power of 2!)
|
||||
*/
|
||||
final BitArray bits;
|
||||
/**
|
||||
* Number of hashes per element
|
||||
*/
|
||||
final int numHashFunctions;
|
||||
|
||||
final Hashing hashing;
|
||||
|
||||
BloomFilter(BitArray bits, int numHashFunctions, Hashing hashing) {
|
||||
this.bits = bits;
|
||||
this.numHashFunctions = numHashFunctions;
|
||||
this.hashing = hashing;
|
||||
/*
|
||||
* This only exists to forbid BFs that cannot use the compact persistent representation.
|
||||
* If it ever throws, at a user who was not intending to use that representation, we should
|
||||
* reconsider
|
||||
*/
|
||||
if (numHashFunctions > 255) {
|
||||
throw new IllegalArgumentException("Currently we don't allow BloomFilters that would use more than 255 hash functions");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean put(BytesRef value) {
|
||||
return hashing.put(value, numHashFunctions, bits);
|
||||
}
|
||||
|
||||
public boolean mightContain(BytesRef value) {
|
||||
return hashing.mightContain(value, numHashFunctions, bits);
|
||||
}
|
||||
|
||||
public int getNumHashFunctions() {
|
||||
return this.numHashFunctions;
|
||||
}
|
||||
|
||||
public long getSizeInBytes() {
|
||||
return bits.ramBytesUsed();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return bits.hashCode() + numHashFunctions;
|
||||
}
|
||||
|
||||
/*
|
||||
* Cheat sheet:
|
||||
*
|
||||
* m: total bits
|
||||
* n: expected insertions
|
||||
* b: m/n, bits per insertion
|
||||
|
||||
* p: expected false positive probability
|
||||
*
|
||||
* 1) Optimal k = b * ln2
|
||||
* 2) p = (1 - e ^ (-kn/m))^k
|
||||
* 3) For optimal k: p = 2 ^ (-k) ~= 0.6185^b
|
||||
* 4) For optimal k: m = -nlnp / ((ln2) ^ 2)
|
||||
*/
|
||||
|
||||
/**
|
||||
* Computes the optimal k (number of hashes per element inserted in Bloom filter), given the
|
||||
* expected insertions and total number of bits in the Bloom filter.
|
||||
* <p>
|
||||
* See http://en.wikipedia.org/wiki/File:Bloom_filter_fp_probability.svg for the formula.
|
||||
*
|
||||
* @param n expected insertions (must be positive)
|
||||
* @param m total number of bits in Bloom filter (must be positive)
|
||||
*/
|
||||
static int optimalNumOfHashFunctions(long n, long m) {
|
||||
return Math.max(1, (int) Math.round(m / n * Math.log(2)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes m (total bits of Bloom filter) which is expected to achieve, for the specified
|
||||
* expected insertions, the required false positive probability.
|
||||
* <p>
|
||||
* See http://en.wikipedia.org/wiki/Bloom_filter#Probability_of_false_positives for the formula.
|
||||
*
|
||||
* @param n expected insertions (must be positive)
|
||||
* @param p false positive rate (must be 0 < p < 1)
|
||||
*/
|
||||
static long optimalNumOfBits(long n, double p) {
|
||||
if (p == 0) {
|
||||
p = Double.MIN_VALUE;
|
||||
}
|
||||
return (long) (-n * Math.log(p) / (Math.log(2) * Math.log(2)));
|
||||
}
|
||||
|
||||
// Note: We use this instead of java.util.BitSet because we need access to the long[] data field
|
||||
static final class BitArray {
|
||||
final long[] data;
|
||||
final long bitSize;
|
||||
long bitCount;
|
||||
|
||||
BitArray(long bits) {
|
||||
this(new long[size(bits)]);
|
||||
}
|
||||
|
||||
private static int size(long bits) {
|
||||
long quotient = bits / 64;
|
||||
long remainder = bits - quotient * 64;
|
||||
return Math.toIntExact(remainder == 0 ? quotient : 1 + quotient);
|
||||
}
|
||||
|
||||
// Used by serialization
|
||||
BitArray(long[] data) {
|
||||
this.data = data;
|
||||
long bitCount = 0;
|
||||
for (long value : data) {
|
||||
bitCount += Long.bitCount(value);
|
||||
}
|
||||
this.bitCount = bitCount;
|
||||
this.bitSize = data.length * Long.SIZE;
|
||||
}
|
||||
|
||||
/** Returns true if the bit changed value. */
|
||||
boolean set(long index) {
|
||||
if (!get(index)) {
|
||||
data[(int) (index >>> 6)] |= (1L << index);
|
||||
bitCount++;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean get(long index) {
|
||||
return (data[(int) (index >>> 6)] & (1L << index)) != 0;
|
||||
}
|
||||
|
||||
/** Number of bits */
|
||||
long bitSize() {
|
||||
return bitSize;
|
||||
}
|
||||
|
||||
/** Number of set bits (1s) */
|
||||
long bitCount() {
|
||||
return bitCount;
|
||||
}
|
||||
|
||||
BitArray copy() {
|
||||
return new BitArray(data.clone());
|
||||
}
|
||||
|
||||
/** Combines the two BitArrays using bitwise OR. */
|
||||
void putAll(BitArray array) {
|
||||
bitCount = 0;
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
data[i] |= array.data[i];
|
||||
bitCount += Long.bitCount(data[i]);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public boolean equals(Object o) {
|
||||
if (o instanceof BitArray) {
|
||||
BitArray bitArray = (BitArray) o;
|
||||
return Arrays.equals(data, bitArray.data);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override public int hashCode() {
|
||||
return Arrays.hashCode(data);
|
||||
}
|
||||
|
||||
public long ramBytesUsed() {
|
||||
return Long.BYTES * data.length + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 16;
|
||||
}
|
||||
}
|
||||
|
||||
static enum Hashing {
|
||||
|
||||
V0() {
|
||||
@Override
|
||||
protected boolean put(BytesRef value, int numHashFunctions, BitArray bits) {
|
||||
long bitSize = bits.bitSize();
|
||||
long hash64 = hash3_x64_128(value.bytes, value.offset, value.length, 0);
|
||||
int hash1 = (int) hash64;
|
||||
int hash2 = (int) (hash64 >>> 32);
|
||||
boolean bitsChanged = false;
|
||||
for (int i = 1; i <= numHashFunctions; i++) {
|
||||
int nextHash = hash1 + i * hash2;
|
||||
if (nextHash < 0) {
|
||||
nextHash = ~nextHash;
|
||||
}
|
||||
bitsChanged |= bits.set(nextHash % bitSize);
|
||||
}
|
||||
return bitsChanged;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits) {
|
||||
long bitSize = bits.bitSize();
|
||||
long hash64 = hash3_x64_128(value.bytes, value.offset, value.length, 0);
|
||||
int hash1 = (int) hash64;
|
||||
int hash2 = (int) (hash64 >>> 32);
|
||||
for (int i = 1; i <= numHashFunctions; i++) {
|
||||
int nextHash = hash1 + i * hash2;
|
||||
if (nextHash < 0) {
|
||||
nextHash = ~nextHash;
|
||||
}
|
||||
if (!bits.get(nextHash % bitSize)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int type() {
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
V1() {
|
||||
@Override
|
||||
protected boolean put(BytesRef value, int numHashFunctions, BitArray bits) {
|
||||
long bitSize = bits.bitSize();
|
||||
MurmurHash3.Hash128 hash128 = MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, new MurmurHash3.Hash128());
|
||||
|
||||
boolean bitsChanged = false;
|
||||
long combinedHash = hash128.h1;
|
||||
for (int i = 0; i < numHashFunctions; i++) {
|
||||
// Make the combined hash positive and indexable
|
||||
bitsChanged |= bits.set((combinedHash & Long.MAX_VALUE) % bitSize);
|
||||
combinedHash += hash128.h2;
|
||||
}
|
||||
return bitsChanged;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits) {
|
||||
long bitSize = bits.bitSize();
|
||||
MurmurHash3.Hash128 hash128 = MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, new MurmurHash3.Hash128());
|
||||
|
||||
long combinedHash = hash128.h1;
|
||||
for (int i = 0; i < numHashFunctions; i++) {
|
||||
// Make the combined hash positive and indexable
|
||||
if (!bits.get((combinedHash & Long.MAX_VALUE) % bitSize)) {
|
||||
return false;
|
||||
}
|
||||
combinedHash += hash128.h2;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int type() {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
;
|
||||
|
||||
protected abstract boolean put(BytesRef value, int numHashFunctions, BitArray bits);
|
||||
|
||||
protected abstract boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits);
|
||||
|
||||
protected abstract int type();
|
||||
|
||||
public static final Hashing DEFAULT = Hashing.V1;
|
||||
|
||||
public static Hashing fromType(int type) {
|
||||
if (type == 0) {
|
||||
return Hashing.V0;
|
||||
} if (type == 1) {
|
||||
return Hashing.V1;
|
||||
} else {
|
||||
throw new IllegalArgumentException("no hashing type matching " + type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// START : MURMUR 3_128 USED FOR Hashing.V0
|
||||
// NOTE: don't replace this code with the o.e.common.hashing.MurmurHash3 method which returns a different hash
|
||||
|
||||
protected static long getblock(byte[] key, int offset, int index) {
|
||||
int i_8 = index << 3;
|
||||
int blockOffset = offset + i_8;
|
||||
return ((long) key[blockOffset + 0] & 0xff) + (((long) key[blockOffset + 1] & 0xff) << 8) +
|
||||
(((long) key[blockOffset + 2] & 0xff) << 16) + (((long) key[blockOffset + 3] & 0xff) << 24) +
|
||||
(((long) key[blockOffset + 4] & 0xff) << 32) + (((long) key[blockOffset + 5] & 0xff) << 40) +
|
||||
(((long) key[blockOffset + 6] & 0xff) << 48) + (((long) key[blockOffset + 7] & 0xff) << 56);
|
||||
}
|
||||
|
||||
protected static long rotl64(long v, int n) {
|
||||
return ((v << n) | (v >>> (64 - n)));
|
||||
}
|
||||
|
||||
protected static long fmix(long k) {
|
||||
k ^= k >>> 33;
|
||||
k *= 0xff51afd7ed558ccdL;
|
||||
k ^= k >>> 33;
|
||||
k *= 0xc4ceb9fe1a85ec53L;
|
||||
k ^= k >>> 33;
|
||||
|
||||
return k;
|
||||
}
|
||||
|
||||
@SuppressWarnings("fallthrough") // Uses fallthrough to implement a well know hashing algorithm
|
||||
public static long hash3_x64_128(byte[] key, int offset, int length, long seed) {
|
||||
final int nblocks = length >> 4; // Process as 128-bit blocks.
|
||||
|
||||
long h1 = seed;
|
||||
long h2 = seed;
|
||||
|
||||
long c1 = 0x87c37b91114253d5L;
|
||||
long c2 = 0x4cf5ad432745937fL;
|
||||
|
||||
//----------
|
||||
// body
|
||||
|
||||
for (int i = 0; i < nblocks; i++) {
|
||||
long k1 = getblock(key, offset, i * 2 + 0);
|
||||
long k2 = getblock(key, offset, i * 2 + 1);
|
||||
|
||||
k1 *= c1;
|
||||
k1 = rotl64(k1, 31);
|
||||
k1 *= c2;
|
||||
h1 ^= k1;
|
||||
|
||||
h1 = rotl64(h1, 27);
|
||||
h1 += h2;
|
||||
h1 = h1 * 5 + 0x52dce729;
|
||||
|
||||
k2 *= c2;
|
||||
k2 = rotl64(k2, 33);
|
||||
k2 *= c1;
|
||||
h2 ^= k2;
|
||||
|
||||
h2 = rotl64(h2, 31);
|
||||
h2 += h1;
|
||||
h2 = h2 * 5 + 0x38495ab5;
|
||||
}
|
||||
|
||||
//----------
|
||||
// tail
|
||||
|
||||
// Advance offset to the unprocessed tail of the data.
|
||||
offset += nblocks * 16;
|
||||
|
||||
long k1 = 0;
|
||||
long k2 = 0;
|
||||
|
||||
switch (length & 15) {
|
||||
case 15:
|
||||
k2 ^= ((long) key[offset + 14]) << 48;
|
||||
case 14:
|
||||
k2 ^= ((long) key[offset + 13]) << 40;
|
||||
case 13:
|
||||
k2 ^= ((long) key[offset + 12]) << 32;
|
||||
case 12:
|
||||
k2 ^= ((long) key[offset + 11]) << 24;
|
||||
case 11:
|
||||
k2 ^= ((long) key[offset + 10]) << 16;
|
||||
case 10:
|
||||
k2 ^= ((long) key[offset + 9]) << 8;
|
||||
case 9:
|
||||
k2 ^= ((long) key[offset + 8]) << 0;
|
||||
k2 *= c2;
|
||||
k2 = rotl64(k2, 33);
|
||||
k2 *= c1;
|
||||
h2 ^= k2;
|
||||
|
||||
case 8:
|
||||
k1 ^= ((long) key[offset + 7]) << 56;
|
||||
case 7:
|
||||
k1 ^= ((long) key[offset + 6]) << 48;
|
||||
case 6:
|
||||
k1 ^= ((long) key[offset + 5]) << 40;
|
||||
case 5:
|
||||
k1 ^= ((long) key[offset + 4]) << 32;
|
||||
case 4:
|
||||
k1 ^= ((long) key[offset + 3]) << 24;
|
||||
case 3:
|
||||
k1 ^= ((long) key[offset + 2]) << 16;
|
||||
case 2:
|
||||
k1 ^= ((long) key[offset + 1]) << 8;
|
||||
case 1:
|
||||
k1 ^= (key[offset]);
|
||||
k1 *= c1;
|
||||
k1 = rotl64(k1, 31);
|
||||
k1 *= c2;
|
||||
h1 ^= k1;
|
||||
}
|
||||
|
||||
//----------
|
||||
// finalization
|
||||
|
||||
h1 ^= length;
|
||||
h2 ^= length;
|
||||
|
||||
h1 += h2;
|
||||
h2 += h1;
|
||||
|
||||
h1 = fmix(h1);
|
||||
h2 = fmix(h2);
|
||||
|
||||
h1 += h2;
|
||||
h2 += h1;
|
||||
|
||||
//return (new long[]{h1, h2});
|
||||
// SAME AS GUAVA, they take the first long out of the 128bit
|
||||
return h1;
|
||||
}
|
||||
|
||||
// END: MURMUR 3_128
|
||||
}
|
|
@ -321,7 +321,7 @@ public final class AnalysisRegistry implements Closeable {
|
|||
if (currentSettings.get("tokenizer") != null) {
|
||||
factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings);
|
||||
} else {
|
||||
throw new IllegalArgumentException(toBuild + " [" + name + "] must have a type associated with it");
|
||||
throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer");
|
||||
}
|
||||
} else if (typeName.equals("custom")) {
|
||||
factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings);
|
||||
|
@ -335,7 +335,7 @@ public final class AnalysisRegistry implements Closeable {
|
|||
factories.put(name, factory);
|
||||
} else {
|
||||
if (typeName == null) {
|
||||
throw new IllegalArgumentException(toBuild + " [" + name + "] must have a type associated with it");
|
||||
throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer");
|
||||
}
|
||||
AnalysisModule.AnalysisProvider<T> type = providerMap.get(typeName);
|
||||
if (type == null) {
|
||||
|
|
|
@ -39,6 +39,7 @@ import java.nio.file.OpenOption;
|
|||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
|
||||
public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
||||
|
||||
|
@ -60,7 +61,8 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
private volatile long totalOffset;
|
||||
|
||||
protected final AtomicBoolean closed = new AtomicBoolean(false);
|
||||
|
||||
// lock order synchronized(syncLock) -> synchronized(this)
|
||||
private final Object syncLock = new Object();
|
||||
|
||||
public TranslogWriter(ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException {
|
||||
super(generation, channel, path, channel.position());
|
||||
|
@ -146,23 +148,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
* raising the exception.
|
||||
*/
|
||||
public void sync() throws IOException {
|
||||
if (syncNeeded()) {
|
||||
synchronized (this) {
|
||||
ensureOpen();
|
||||
final long offsetToSync;
|
||||
final int opsCounter;
|
||||
try {
|
||||
outputStream.flush();
|
||||
offsetToSync = totalOffset;
|
||||
opsCounter = operationCounter;
|
||||
checkpoint(offsetToSync, opsCounter, generation, channel, path);
|
||||
} catch (Throwable ex) {
|
||||
closeWithTragicEvent(ex);
|
||||
throw ex;
|
||||
}
|
||||
lastSyncedOffset = offsetToSync;
|
||||
}
|
||||
}
|
||||
syncUpTo(Long.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -229,10 +215,39 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
* @return <code>true</code> if this call caused an actual sync operation
|
||||
*/
|
||||
public boolean syncUpTo(long offset) throws IOException {
|
||||
if (lastSyncedOffset < offset) {
|
||||
sync();
|
||||
if (lastSyncedOffset < offset && syncNeeded()) {
|
||||
synchronized (syncLock) { // only one sync/checkpoint should happen concurrently but we wait
|
||||
if (lastSyncedOffset < offset && syncNeeded()) {
|
||||
// double checked locking - we don't want to fsync unless we have to and now that we have
|
||||
// the lock we should check again since if this code is busy we might have fsynced enough already
|
||||
final long offsetToSync;
|
||||
final int opsCounter;
|
||||
synchronized (this) {
|
||||
ensureOpen();
|
||||
try {
|
||||
outputStream.flush();
|
||||
offsetToSync = totalOffset;
|
||||
opsCounter = operationCounter;
|
||||
} catch (Throwable ex) {
|
||||
closeWithTragicEvent(ex);
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
// now do the actual fsync outside of the synchronized block such that
|
||||
// we can continue writing to the buffer etc.
|
||||
try {
|
||||
channel.force(false);
|
||||
writeCheckpoint(offsetToSync, opsCounter, path.getParent(), generation, StandardOpenOption.WRITE);
|
||||
} catch (Throwable ex) {
|
||||
closeWithTragicEvent(ex);
|
||||
throw ex;
|
||||
}
|
||||
assert lastSyncedOffset <= offsetToSync : "illegal state: " + lastSyncedOffset + " <= " + offsetToSync;
|
||||
lastSyncedOffset = offsetToSync; // write protected by syncLock
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -254,11 +269,6 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
Channels.readFromFileChannelWithEofException(channel, position, targetBuffer);
|
||||
}
|
||||
|
||||
private synchronized void checkpoint(long lastSyncPosition, int operationCounter, long generation, FileChannel translogFileChannel, Path translogFilePath) throws IOException {
|
||||
translogFileChannel.force(false);
|
||||
writeCheckpoint(lastSyncPosition, operationCounter, translogFilePath.getParent(), generation, StandardOpenOption.WRITE);
|
||||
}
|
||||
|
||||
private static void writeCheckpoint(long syncPosition, int numOperations, Path translogFile, long generation, OpenOption... options) throws IOException {
|
||||
final Path checkpointFile = translogFile.resolve(Translog.CHECKPOINT_FILE_NAME);
|
||||
Checkpoint checkpoint = new Checkpoint(syncPosition, numOperations, generation);
|
||||
|
@ -269,7 +279,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
|
||||
static final ChannelFactory DEFAULT = new ChannelFactory();
|
||||
|
||||
// only for testing until we have a disk-full FileSystemt
|
||||
// only for testing until we have a disk-full FileSystem
|
||||
public FileChannel open(Path file) throws IOException {
|
||||
return FileChannel.open(file, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW);
|
||||
}
|
||||
|
|
|
@ -396,12 +396,14 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContent {
|
|||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
builder.startObject("total");
|
||||
builder.field(OPERATIONS, totalOperations);
|
||||
builder.field(READ_OPERATIONS, totalReadOperations);
|
||||
builder.field(WRITE_OPERATIONS, totalWriteOperations);
|
||||
builder.field(READ_KILOBYTES, totalReadKilobytes);
|
||||
builder.field(WRITE_KILOBYTES, totalWriteKilobytes);
|
||||
builder.endObject();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.monitor.jvm.JvmStats.GarbageCollector;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
|
@ -45,6 +46,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
private final boolean enabled;
|
||||
private final TimeValue interval;
|
||||
private final Map<String, GcThreshold> gcThresholds;
|
||||
private final GcOverheadThreshold gcOverheadThreshold;
|
||||
|
||||
private volatile ScheduledFuture scheduledFuture;
|
||||
|
||||
|
@ -57,6 +59,27 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
private static String GC_COLLECTOR_PREFIX = "monitor.jvm.gc.collector.";
|
||||
public final static Setting<Settings> GC_SETTING = Setting.groupSetting(GC_COLLECTOR_PREFIX, Property.NodeScope);
|
||||
|
||||
public final static Setting<Integer> GC_OVERHEAD_WARN_SETTING =
|
||||
Setting.intSetting("monitor.jvm.gc.overhead.warn", 50, 0, 100, Property.NodeScope);
|
||||
public final static Setting<Integer> GC_OVERHEAD_INFO_SETTING =
|
||||
Setting.intSetting("monitor.jvm.gc.overhead.info", 25, 0, 100, Property.NodeScope);
|
||||
public final static Setting<Integer> GC_OVERHEAD_DEBUG_SETTING =
|
||||
Setting.intSetting("monitor.jvm.gc.overhead.debug", 10, 0, 100, Property.NodeScope);
|
||||
|
||||
static class GcOverheadThreshold {
|
||||
final int warnThreshold;
|
||||
final int infoThreshold;
|
||||
final int debugThreshold;
|
||||
|
||||
public GcOverheadThreshold(final int warnThreshold, final int infoThreshold, final int debugThreshold) {
|
||||
this.warnThreshold = warnThreshold;
|
||||
this.infoThreshold = infoThreshold;
|
||||
this.debugThreshold = debugThreshold;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
static class GcThreshold {
|
||||
public final String name;
|
||||
public final long warnThreshold;
|
||||
|
@ -102,7 +125,42 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
gcThresholds.putIfAbsent("default", new GcThreshold("default", 10000, 5000, 2000));
|
||||
this.gcThresholds = unmodifiableMap(gcThresholds);
|
||||
|
||||
logger.debug("enabled [{}], interval [{}], gc_threshold [{}]", enabled, interval, this.gcThresholds);
|
||||
if (GC_OVERHEAD_WARN_SETTING.get(settings) <= GC_OVERHEAD_INFO_SETTING.get(settings)) {
|
||||
final String message =
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"[%s] must be greater than [%s] [%d] but was [%d]",
|
||||
GC_OVERHEAD_WARN_SETTING.getKey(),
|
||||
GC_OVERHEAD_INFO_SETTING.getKey(),
|
||||
GC_OVERHEAD_INFO_SETTING.get(settings),
|
||||
GC_OVERHEAD_WARN_SETTING.get(settings));
|
||||
throw new IllegalArgumentException(message);
|
||||
}
|
||||
if (GC_OVERHEAD_INFO_SETTING.get(settings) <= GC_OVERHEAD_DEBUG_SETTING.get(settings)) {
|
||||
final String message =
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"[%s] must be greater than [%s] [%d] but was [%d]",
|
||||
GC_OVERHEAD_INFO_SETTING.getKey(),
|
||||
GC_OVERHEAD_DEBUG_SETTING.getKey(),
|
||||
GC_OVERHEAD_DEBUG_SETTING.get(settings),
|
||||
GC_OVERHEAD_INFO_SETTING.get(settings));
|
||||
throw new IllegalArgumentException(message);
|
||||
}
|
||||
|
||||
this.gcOverheadThreshold = new GcOverheadThreshold(
|
||||
GC_OVERHEAD_WARN_SETTING.get(settings),
|
||||
GC_OVERHEAD_INFO_SETTING.get(settings),
|
||||
GC_OVERHEAD_DEBUG_SETTING.get(settings));
|
||||
|
||||
logger.debug(
|
||||
"enabled [{}], interval [{}], gc_threshold [{}], overhead [{}, {}, {}]",
|
||||
this.enabled,
|
||||
this.interval,
|
||||
this.gcThresholds,
|
||||
this.gcOverheadThreshold.warnThreshold,
|
||||
this.gcOverheadThreshold.infoThreshold,
|
||||
this.gcOverheadThreshold.debugThreshold);
|
||||
}
|
||||
|
||||
private static TimeValue getValidThreshold(Settings settings, String key, String level) {
|
||||
|
@ -120,15 +178,12 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
return GC_COLLECTOR_PREFIX + key + "." + level;
|
||||
}
|
||||
|
||||
private static final String LOG_MESSAGE =
|
||||
"[gc][{}][{}][{}] duration [{}], collections [{}]/[{}], total [{}]/[{}], memory [{}]->[{}]/[{}], all_pools {}";
|
||||
|
||||
@Override
|
||||
protected void doStart() {
|
||||
if (!enabled) {
|
||||
return;
|
||||
}
|
||||
scheduledFuture = threadPool.scheduleWithFixedDelay(new JvmMonitor(gcThresholds) {
|
||||
scheduledFuture = threadPool.scheduleWithFixedDelay(new JvmMonitor(gcThresholds, gcOverheadThreshold) {
|
||||
@Override
|
||||
void onMonitorFailure(Throwable t) {
|
||||
logger.debug("failed to monitor", t);
|
||||
|
@ -138,9 +193,17 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
void onSlowGc(final Threshold threshold, final long seq, final SlowGcEvent slowGcEvent) {
|
||||
logSlowGc(logger, threshold, seq, slowGcEvent, JvmGcMonitorService::buildPools);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onGcOverhead(final Threshold threshold, final long current, final long elapsed, final long seq) {
|
||||
logGcOverhead(logger, threshold, current, elapsed, seq);
|
||||
}
|
||||
}, interval);
|
||||
}
|
||||
|
||||
private static final String SLOW_GC_LOG_MESSAGE =
|
||||
"[gc][{}][{}][{}] duration [{}], collections [{}]/[{}], total [{}]/[{}], memory [{}]->[{}]/[{}], all_pools {}";
|
||||
|
||||
static void logSlowGc(
|
||||
final ESLogger logger,
|
||||
final JvmMonitor.Threshold threshold,
|
||||
|
@ -162,7 +225,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
case WARN:
|
||||
if (logger.isWarnEnabled()) {
|
||||
logger.warn(
|
||||
LOG_MESSAGE,
|
||||
SLOW_GC_LOG_MESSAGE,
|
||||
name,
|
||||
seq,
|
||||
totalGcCollectionCount,
|
||||
|
@ -180,7 +243,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
case INFO:
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info(
|
||||
LOG_MESSAGE,
|
||||
SLOW_GC_LOG_MESSAGE,
|
||||
name,
|
||||
seq,
|
||||
totalGcCollectionCount,
|
||||
|
@ -198,7 +261,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
case DEBUG:
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(
|
||||
LOG_MESSAGE,
|
||||
SLOW_GC_LOG_MESSAGE,
|
||||
name,
|
||||
seq,
|
||||
totalGcCollectionCount,
|
||||
|
@ -239,6 +302,33 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
return sb.toString();
|
||||
}
|
||||
|
||||
private static final String OVERHEAD_LOG_MESSAGE = "[gc][{}] overhead, spent [{}] collecting in the last [{}]";
|
||||
|
||||
static void logGcOverhead(
|
||||
final ESLogger logger,
|
||||
final JvmMonitor.Threshold threshold,
|
||||
final long current,
|
||||
final long elapsed,
|
||||
final long seq) {
|
||||
switch (threshold) {
|
||||
case WARN:
|
||||
if (logger.isWarnEnabled()) {
|
||||
logger.warn(OVERHEAD_LOG_MESSAGE, seq, TimeValue.timeValueMillis(current), TimeValue.timeValueMillis(elapsed));
|
||||
}
|
||||
break;
|
||||
case INFO:
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info(OVERHEAD_LOG_MESSAGE, seq, TimeValue.timeValueMillis(current), TimeValue.timeValueMillis(elapsed));
|
||||
}
|
||||
break;
|
||||
case DEBUG:
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(OVERHEAD_LOG_MESSAGE, seq, TimeValue.timeValueMillis(current), TimeValue.timeValueMillis(elapsed));
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doStop() {
|
||||
if (!enabled) {
|
||||
|
@ -287,16 +377,18 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
private long lastTime = now();
|
||||
private JvmStats lastJvmStats = jvmStats();
|
||||
private long seq = 0;
|
||||
private final Map<String, GcThreshold> gcThresholds;
|
||||
private final Map<String, JvmGcMonitorService.GcThreshold> gcThresholds;
|
||||
final GcOverheadThreshold gcOverheadThreshold;
|
||||
|
||||
public JvmMonitor(Map<String, GcThreshold> gcThresholds) {
|
||||
public JvmMonitor(final Map<String, GcThreshold> gcThresholds, final GcOverheadThreshold gcOverheadThreshold) {
|
||||
this.gcThresholds = Objects.requireNonNull(gcThresholds);
|
||||
this.gcOverheadThreshold = Objects.requireNonNull(gcOverheadThreshold);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
monitorLongGc();
|
||||
monitorGc();
|
||||
} catch (Throwable t) {
|
||||
onMonitorFailure(t);
|
||||
}
|
||||
|
@ -304,12 +396,21 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
|
||||
abstract void onMonitorFailure(Throwable t);
|
||||
|
||||
synchronized void monitorLongGc() {
|
||||
synchronized void monitorGc() {
|
||||
seq++;
|
||||
final long currentTime = now();
|
||||
JvmStats currentJvmStats = jvmStats();
|
||||
|
||||
final long elapsed = TimeUnit.NANOSECONDS.toMillis(currentTime - lastTime);
|
||||
|
||||
monitorSlowGc(currentJvmStats, elapsed);
|
||||
monitorGcOverhead(currentJvmStats, elapsed);
|
||||
|
||||
lastTime = currentTime;
|
||||
lastJvmStats = currentJvmStats;
|
||||
}
|
||||
|
||||
final void monitorSlowGc(JvmStats currentJvmStats, long elapsed) {
|
||||
for (int i = 0; i < currentJvmStats.getGc().getCollectors().length; i++) {
|
||||
GarbageCollector gc = currentJvmStats.getGc().getCollectors()[i];
|
||||
GarbageCollector prevGc = lastJvmStats.getGc().getCollectors()[i];
|
||||
|
@ -350,8 +451,31 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
JvmInfo.jvmInfo().getMem().getHeapMax()));
|
||||
}
|
||||
}
|
||||
lastTime = currentTime;
|
||||
lastJvmStats = currentJvmStats;
|
||||
}
|
||||
|
||||
final void monitorGcOverhead(final JvmStats currentJvmStats, final long elapsed) {
|
||||
long current = 0;
|
||||
for (int i = 0; i < currentJvmStats.getGc().getCollectors().length; i++) {
|
||||
GarbageCollector gc = currentJvmStats.getGc().getCollectors()[i];
|
||||
GarbageCollector prevGc = lastJvmStats.getGc().getCollectors()[i];
|
||||
current += gc.getCollectionTime().millis() - prevGc.getCollectionTime().millis();
|
||||
}
|
||||
checkGcOverhead(current, elapsed, seq);
|
||||
}
|
||||
|
||||
void checkGcOverhead(final long current, final long elapsed, final long seq) {
|
||||
final int fraction = (int) ((100 * current) / (double) elapsed);
|
||||
Threshold overheadThreshold = null;
|
||||
if (fraction >= gcOverheadThreshold.warnThreshold) {
|
||||
overheadThreshold = Threshold.WARN;
|
||||
} else if (fraction >= gcOverheadThreshold.infoThreshold) {
|
||||
overheadThreshold = Threshold.INFO;
|
||||
} else if (fraction >= gcOverheadThreshold.debugThreshold) {
|
||||
overheadThreshold = Threshold.DEBUG;
|
||||
}
|
||||
if (overheadThreshold != null) {
|
||||
onGcOverhead(overheadThreshold, current, elapsed, seq);
|
||||
}
|
||||
}
|
||||
|
||||
JvmStats jvmStats() {
|
||||
|
@ -364,6 +488,8 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
|
||||
abstract void onSlowGc(final Threshold threshold, final long seq, final SlowGcEvent slowGcEvent);
|
||||
|
||||
abstract void onGcOverhead(final Threshold threshold, final long total, final long elapsed, final long seq);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -126,6 +126,7 @@ class InstallPluginCommand extends Command {
|
|||
"mapper-murmur3",
|
||||
"mapper-size",
|
||||
"repository-azure",
|
||||
"repository-gcs",
|
||||
"repository-hdfs",
|
||||
"repository-s3",
|
||||
"store-smb",
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable;
|
|||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
|
||||
import org.elasticsearch.index.percolator.PercolatorHighlightSubFetchPhase;
|
||||
|
@ -97,51 +98,51 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
|||
import org.elasticsearch.search.action.SearchTransportService;
|
||||
import org.elasticsearch.search.aggregations.AggregationPhase;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.InternalChildren;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.InternalFilters;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.InternalNested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.InternalDateRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.InternalGeoDistance;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.InternalBinaryRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedSamplerParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.UnmappedSampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantLongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
|
||||
|
@ -155,50 +156,50 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi
|
|||
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroid;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.InternalMin;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMetric;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.InternalStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.InternalSum;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
@ -290,6 +291,8 @@ public class SearchModule extends AbstractModule {
|
|||
|
||||
private final Settings settings;
|
||||
private final NamedWriteableRegistry namedWriteableRegistry;
|
||||
public static final Setting<Integer> INDICES_MAX_CLAUSE_COUNT_SETTING = Setting.intSetting("indices.query.bool.max_clause_count",
|
||||
1024, 1, Integer.MAX_VALUE, Setting.Property.NodeScope);
|
||||
|
||||
// pkg private so tests can mock
|
||||
Class<? extends SearchService> searchServiceImpl = SearchService.class;
|
||||
|
@ -421,10 +424,10 @@ public class SearchModule extends AbstractModule {
|
|||
* @param aggregationName names by which the aggregation may be parsed. The first name is special because it is the name that the reader
|
||||
* is registered under.
|
||||
*/
|
||||
public <AB extends AggregatorBuilder<AB>> void registerAggregation(Writeable.Reader<AB> reader, Aggregator.Parser aggregationParser,
|
||||
public <AB extends AggregationBuilder<AB>> void registerAggregation(Writeable.Reader<AB> reader, Aggregator.Parser aggregationParser,
|
||||
ParseField aggregationName) {
|
||||
aggregationParserRegistry.register(aggregationParser, aggregationName);
|
||||
namedWriteableRegistry.register(AggregatorBuilder.class, aggregationName.getPreferredName(), reader);
|
||||
namedWriteableRegistry.register(AggregationBuilder.class, aggregationName.getPreferredName(), reader);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -478,55 +481,57 @@ public class SearchModule extends AbstractModule {
|
|||
}
|
||||
|
||||
protected void configureAggs() {
|
||||
registerAggregation(AvgAggregatorBuilder::new, new AvgParser(), AvgAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SumAggregatorBuilder::new, new SumParser(), SumAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MinAggregatorBuilder::new, new MinParser(), MinAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MaxAggregatorBuilder::new, new MaxParser(), MaxAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(StatsAggregatorBuilder::new, new StatsParser(), StatsAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ExtendedStatsAggregatorBuilder::new, new ExtendedStatsParser(),
|
||||
ExtendedStatsAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ValueCountAggregatorBuilder::new, new ValueCountParser(), ValueCountAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(PercentilesAggregatorBuilder::new, new PercentilesParser(),
|
||||
PercentilesAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(PercentileRanksAggregatorBuilder::new, new PercentileRanksParser(),
|
||||
PercentileRanksAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(CardinalityAggregatorBuilder::new, new CardinalityParser(),
|
||||
CardinalityAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GlobalAggregatorBuilder::new, GlobalAggregatorBuilder::parse, GlobalAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MissingAggregatorBuilder::new, new MissingParser(), MissingAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(FilterAggregatorBuilder::new, FilterAggregatorBuilder::parse, FilterAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(FiltersAggregatorBuilder::new, FiltersAggregatorBuilder::parse,
|
||||
FiltersAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SamplerAggregatorBuilder::new, SamplerAggregatorBuilder::parse,
|
||||
SamplerAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DiversifiedAggregatorBuilder::new, new DiversifiedSamplerParser(),
|
||||
DiversifiedAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(TermsAggregatorBuilder::new, new TermsParser(), TermsAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SignificantTermsAggregatorBuilder::new,
|
||||
registerAggregation(AvgAggregationBuilder::new, new AvgParser(), AvgAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SumAggregationBuilder::new, new SumParser(), SumAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MinAggregationBuilder::new, new MinParser(), MinAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MaxAggregationBuilder::new, new MaxParser(), MaxAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(StatsAggregationBuilder::new, new StatsParser(), StatsAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ExtendedStatsAggregationBuilder::new, new ExtendedStatsParser(),
|
||||
ExtendedStatsAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ValueCountAggregationBuilder::new, new ValueCountParser(), ValueCountAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(PercentilesAggregationBuilder::new, new PercentilesParser(),
|
||||
PercentilesAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(PercentileRanksAggregationBuilder::new, new PercentileRanksParser(),
|
||||
PercentileRanksAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(CardinalityAggregationBuilder::new, new CardinalityParser(),
|
||||
CardinalityAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GlobalAggregationBuilder::new, GlobalAggregationBuilder::parse,
|
||||
GlobalAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MissingAggregationBuilder::new, new MissingParser(), MissingAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(FilterAggregationBuilder::new, FilterAggregationBuilder::parse,
|
||||
FilterAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(FiltersAggregationBuilder::new, FiltersAggregationBuilder::parse,
|
||||
FiltersAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SamplerAggregationBuilder::new, SamplerAggregationBuilder::parse,
|
||||
SamplerAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DiversifiedAggregationBuilder::new, new DiversifiedSamplerParser(),
|
||||
DiversifiedAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(TermsAggregationBuilder::new, new TermsParser(), TermsAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SignificantTermsAggregationBuilder::new,
|
||||
new SignificantTermsParser(significanceHeuristicParserRegistry, queryParserRegistry),
|
||||
SignificantTermsAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(RangeAggregatorBuilder::new, new RangeParser(), RangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DateRangeAggregatorBuilder::new, new DateRangeParser(), DateRangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(IpRangeAggregatorBuilder::new, new IpRangeParser(), IpRangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(HistogramAggregatorBuilder::new, new HistogramParser(), HistogramAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DateHistogramAggregatorBuilder::new, new DateHistogramParser(),
|
||||
DateHistogramAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoDistanceAggregatorBuilder::new, new GeoDistanceParser(),
|
||||
GeoDistanceAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoGridAggregatorBuilder::new, new GeoHashGridParser(), GeoGridAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(NestedAggregatorBuilder::new, NestedAggregatorBuilder::parse, NestedAggregatorBuilder.AGGREGATION_FIELD_NAME);
|
||||
registerAggregation(ReverseNestedAggregatorBuilder::new, ReverseNestedAggregatorBuilder::parse,
|
||||
ReverseNestedAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(TopHitsAggregatorBuilder::new, TopHitsAggregatorBuilder::parse,
|
||||
TopHitsAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoBoundsAggregatorBuilder::new, new GeoBoundsParser(), GeoBoundsAggregatorBuilder.AGGREGATION_NAME_FIED);
|
||||
registerAggregation(GeoCentroidAggregatorBuilder::new, new GeoCentroidParser(),
|
||||
GeoCentroidAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ScriptedMetricAggregatorBuilder::new, ScriptedMetricAggregatorBuilder::parse,
|
||||
ScriptedMetricAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ChildrenAggregatorBuilder::new, ChildrenAggregatorBuilder::parse,
|
||||
ChildrenAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
|
||||
SignificantTermsAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(RangeAggregationBuilder::new, new RangeParser(), RangeAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DateRangeAggregationBuilder::new, new DateRangeParser(), DateRangeAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(IpRangeAggregationBuilder::new, new IpRangeParser(), IpRangeAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(HistogramAggregationBuilder::new, new HistogramParser(), HistogramAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DateHistogramAggregationBuilder::new, new DateHistogramParser(),
|
||||
DateHistogramAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoDistanceAggregationBuilder::new, new GeoDistanceParser(),
|
||||
GeoDistanceAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoGridAggregationBuilder::new, new GeoHashGridParser(), GeoGridAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(NestedAggregationBuilder::new, NestedAggregationBuilder::parse,
|
||||
NestedAggregationBuilder.AGGREGATION_FIELD_NAME);
|
||||
registerAggregation(ReverseNestedAggregationBuilder::new, ReverseNestedAggregationBuilder::parse,
|
||||
ReverseNestedAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(TopHitsAggregationBuilder::new, TopHitsAggregationBuilder::parse,
|
||||
TopHitsAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoBoundsAggregationBuilder::new, new GeoBoundsParser(), GeoBoundsAggregationBuilder.AGGREGATION_NAME_FIED);
|
||||
registerAggregation(GeoCentroidAggregationBuilder::new, new GeoCentroidParser(),
|
||||
GeoCentroidAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ScriptedMetricAggregationBuilder::new, ScriptedMetricAggregationBuilder::parse,
|
||||
ScriptedMetricAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ChildrenAggregationBuilder::new, ChildrenAggregationBuilder::parse,
|
||||
ChildrenAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerPipelineAggregation(DerivativePipelineAggregatorBuilder::new, DerivativePipelineAggregatorBuilder::parse,
|
||||
DerivativePipelineAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerPipelineAggregation(MaxBucketPipelineAggregatorBuilder::new, MaxBucketPipelineAggregatorBuilder.PARSER,
|
||||
|
@ -650,8 +655,7 @@ public class SearchModule extends AbstractModule {
|
|||
registerQuery(MatchAllQueryBuilder::new, MatchAllQueryBuilder::fromXContent, MatchAllQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(QueryStringQueryBuilder::new, QueryStringQueryBuilder::fromXContent, QueryStringQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(BoostingQueryBuilder::new, BoostingQueryBuilder::fromXContent, BoostingQueryBuilder.QUERY_NAME_FIELD);
|
||||
BooleanQuery.setMaxClauseCount(settings.getAsInt("index.query.bool.max_clause_count",
|
||||
settings.getAsInt("indices.query.bool.max_clause_count", BooleanQuery.getMaxClauseCount())));
|
||||
BooleanQuery.setMaxClauseCount(INDICES_MAX_CLAUSE_COUNT_SETTING.get(settings));
|
||||
registerQuery(BoolQueryBuilder::new, BoolQueryBuilder::fromXContent, BoolQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(TermQueryBuilder::new, TermQueryBuilder::fromXContent, TermQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(TermsQueryBuilder::new, TermsQueryBuilder::fromXContent, TermsQueryBuilder.QUERY_NAME_FIELD);
|
||||
|
|
|
@ -36,7 +36,9 @@ import java.util.Objects;
|
|||
/**
|
||||
* A factory that knows how to create an {@link Aggregator} of a specific type.
|
||||
*/
|
||||
public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extends ToXContentToBytes implements NamedWriteable, ToXContent {
|
||||
public abstract class AggregationBuilder<AB extends AggregationBuilder<AB>>
|
||||
extends ToXContentToBytes
|
||||
implements NamedWriteable, ToXContent {
|
||||
|
||||
protected String name;
|
||||
protected Type type;
|
||||
|
@ -44,12 +46,12 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
|
|||
protected Map<String, Object> metaData;
|
||||
|
||||
/**
|
||||
* Constructs a new aggregator factory.
|
||||
* Constructs a new aggregation builder.
|
||||
*
|
||||
* @param name The aggregation name
|
||||
* @param type The aggregation type
|
||||
*/
|
||||
public AggregatorBuilder(String name, Type type) {
|
||||
public AggregationBuilder(String name, Type type) {
|
||||
if (name == null) {
|
||||
throw new IllegalArgumentException("[name] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -63,7 +65,7 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
protected AggregatorBuilder(StreamInput in, Type type) throws IOException {
|
||||
protected AggregationBuilder(StreamInput in, Type type) throws IOException {
|
||||
name = in.readString();
|
||||
this.type = type;
|
||||
factoriesBuilder = new AggregatorFactories.Builder(in);
|
||||
|
@ -84,7 +86,7 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
|
|||
* Add a sub aggregation to this aggregation.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public AB subAggregation(AggregatorBuilder<?> aggregation) {
|
||||
public AB subAggregation(AggregationBuilder<?> aggregation) {
|
||||
if (aggregation == null) {
|
||||
throw new IllegalArgumentException("[aggregation] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -178,7 +180,7 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
|
|||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
@SuppressWarnings("unchecked")
|
||||
AggregatorBuilder<AB> other = (AggregatorBuilder<AB>) obj;
|
||||
AggregationBuilder<AB> other = (AggregationBuilder<AB>) obj;
|
||||
if (!Objects.equals(name, other.name))
|
||||
return false;
|
||||
if (!Objects.equals(type, other.type))
|
|
@ -22,65 +22,65 @@ import org.elasticsearch.common.geo.GeoDistance;
|
|||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.Children;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.Filters;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.Missing;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.Nested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.Range;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.Sampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.Avg;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroid;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.Max;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.Min;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetric;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.Stats;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHits;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
|
||||
|
||||
/**
|
||||
* Utility class to create aggregations.
|
||||
|
@ -93,234 +93,234 @@ public class AggregationBuilders {
|
|||
/**
|
||||
* Create a new {@link ValueCount} aggregation with the given name.
|
||||
*/
|
||||
public static ValueCountAggregatorBuilder count(String name) {
|
||||
return new ValueCountAggregatorBuilder(name, null);
|
||||
public static ValueCountAggregationBuilder count(String name) {
|
||||
return new ValueCountAggregationBuilder(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Avg} aggregation with the given name.
|
||||
*/
|
||||
public static AvgAggregatorBuilder avg(String name) {
|
||||
return new AvgAggregatorBuilder(name);
|
||||
public static AvgAggregationBuilder avg(String name) {
|
||||
return new AvgAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Max} aggregation with the given name.
|
||||
*/
|
||||
public static MaxAggregatorBuilder max(String name) {
|
||||
return new MaxAggregatorBuilder(name);
|
||||
public static MaxAggregationBuilder max(String name) {
|
||||
return new MaxAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Min} aggregation with the given name.
|
||||
*/
|
||||
public static MinAggregatorBuilder min(String name) {
|
||||
return new MinAggregatorBuilder(name);
|
||||
public static MinAggregationBuilder min(String name) {
|
||||
return new MinAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Sum} aggregation with the given name.
|
||||
*/
|
||||
public static SumAggregatorBuilder sum(String name) {
|
||||
return new SumAggregatorBuilder(name);
|
||||
public static SumAggregationBuilder sum(String name) {
|
||||
return new SumAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Stats} aggregation with the given name.
|
||||
*/
|
||||
public static StatsAggregatorBuilder stats(String name) {
|
||||
return new StatsAggregatorBuilder(name);
|
||||
public static StatsAggregationBuilder stats(String name) {
|
||||
return new StatsAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ExtendedStats} aggregation with the given name.
|
||||
*/
|
||||
public static ExtendedStatsAggregatorBuilder extendedStats(String name) {
|
||||
return new ExtendedStatsAggregatorBuilder(name);
|
||||
public static ExtendedStatsAggregationBuilder extendedStats(String name) {
|
||||
return new ExtendedStatsAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Filter} aggregation with the given name.
|
||||
*/
|
||||
public static FilterAggregatorBuilder filter(String name, QueryBuilder filter) {
|
||||
return new FilterAggregatorBuilder(name, filter);
|
||||
public static FilterAggregationBuilder filter(String name, QueryBuilder filter) {
|
||||
return new FilterAggregationBuilder(name, filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Filters} aggregation with the given name.
|
||||
*/
|
||||
public static FiltersAggregatorBuilder filters(String name, KeyedFilter... filters) {
|
||||
return new FiltersAggregatorBuilder(name, filters);
|
||||
public static FiltersAggregationBuilder filters(String name, KeyedFilter... filters) {
|
||||
return new FiltersAggregationBuilder(name, filters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Filters} aggregation with the given name.
|
||||
*/
|
||||
public static FiltersAggregatorBuilder filters(String name, QueryBuilder... filters) {
|
||||
return new FiltersAggregatorBuilder(name, filters);
|
||||
public static FiltersAggregationBuilder filters(String name, QueryBuilder... filters) {
|
||||
return new FiltersAggregationBuilder(name, filters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Sampler} aggregation with the given name.
|
||||
*/
|
||||
public static SamplerAggregatorBuilder sampler(String name) {
|
||||
return new SamplerAggregatorBuilder(name);
|
||||
public static SamplerAggregationBuilder sampler(String name) {
|
||||
return new SamplerAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Sampler} aggregation with the given name.
|
||||
*/
|
||||
public static DiversifiedAggregatorBuilder diversifiedSampler(String name) {
|
||||
return new DiversifiedAggregatorBuilder(name);
|
||||
public static DiversifiedAggregationBuilder diversifiedSampler(String name) {
|
||||
return new DiversifiedAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Global} aggregation with the given name.
|
||||
*/
|
||||
public static GlobalAggregatorBuilder global(String name) {
|
||||
return new GlobalAggregatorBuilder(name);
|
||||
public static GlobalAggregationBuilder global(String name) {
|
||||
return new GlobalAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Missing} aggregation with the given name.
|
||||
*/
|
||||
public static MissingAggregatorBuilder missing(String name) {
|
||||
return new MissingAggregatorBuilder(name, null);
|
||||
public static MissingAggregationBuilder missing(String name) {
|
||||
return new MissingAggregationBuilder(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Nested} aggregation with the given name.
|
||||
*/
|
||||
public static NestedAggregatorBuilder nested(String name, String path) {
|
||||
return new NestedAggregatorBuilder(name, path);
|
||||
public static NestedAggregationBuilder nested(String name, String path) {
|
||||
return new NestedAggregationBuilder(name, path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReverseNested} aggregation with the given name.
|
||||
*/
|
||||
public static ReverseNestedAggregatorBuilder reverseNested(String name) {
|
||||
return new ReverseNestedAggregatorBuilder(name);
|
||||
public static ReverseNestedAggregationBuilder reverseNested(String name) {
|
||||
return new ReverseNestedAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Children} aggregation with the given name.
|
||||
*/
|
||||
public static ChildrenAggregatorBuilder children(String name, String childType) {
|
||||
return new ChildrenAggregatorBuilder(name, childType);
|
||||
public static ChildrenAggregationBuilder children(String name, String childType) {
|
||||
return new ChildrenAggregationBuilder(name, childType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link GeoDistance} aggregation with the given name.
|
||||
*/
|
||||
public static GeoDistanceAggregatorBuilder geoDistance(String name, GeoPoint origin) {
|
||||
return new GeoDistanceAggregatorBuilder(name, origin);
|
||||
public static GeoDistanceAggregationBuilder geoDistance(String name, GeoPoint origin) {
|
||||
return new GeoDistanceAggregationBuilder(name, origin);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Histogram} aggregation with the given name.
|
||||
*/
|
||||
public static HistogramAggregatorBuilder histogram(String name) {
|
||||
return new HistogramAggregatorBuilder(name);
|
||||
public static HistogramAggregationBuilder histogram(String name) {
|
||||
return new HistogramAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link GeoHashGrid} aggregation with the given name.
|
||||
*/
|
||||
public static GeoGridAggregatorBuilder geohashGrid(String name) {
|
||||
return new GeoGridAggregatorBuilder(name);
|
||||
public static GeoGridAggregationBuilder geohashGrid(String name) {
|
||||
return new GeoGridAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link SignificantTerms} aggregation with the given name.
|
||||
*/
|
||||
public static SignificantTermsAggregatorBuilder significantTerms(String name) {
|
||||
return new SignificantTermsAggregatorBuilder(name, null);
|
||||
public static SignificantTermsAggregationBuilder significantTerms(String name) {
|
||||
return new SignificantTermsAggregationBuilder(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DateHistogramAggregatorBuilder} aggregation with the given
|
||||
* Create a new {@link DateHistogramAggregationBuilder} aggregation with the given
|
||||
* name.
|
||||
*/
|
||||
public static DateHistogramAggregatorBuilder dateHistogram(String name) {
|
||||
return new DateHistogramAggregatorBuilder(name);
|
||||
public static DateHistogramAggregationBuilder dateHistogram(String name) {
|
||||
return new DateHistogramAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Range} aggregation with the given name.
|
||||
*/
|
||||
public static RangeAggregatorBuilder range(String name) {
|
||||
return new RangeAggregatorBuilder(name);
|
||||
public static RangeAggregationBuilder range(String name) {
|
||||
return new RangeAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DateRangeAggregatorBuilder} aggregation with the
|
||||
* Create a new {@link DateRangeAggregationBuilder} aggregation with the
|
||||
* given name.
|
||||
*/
|
||||
public static DateRangeAggregatorBuilder dateRange(String name) {
|
||||
return new DateRangeAggregatorBuilder(name);
|
||||
public static DateRangeAggregationBuilder dateRange(String name) {
|
||||
return new DateRangeAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link IpRangeAggregatorBuilder} aggregation with the
|
||||
* Create a new {@link IpRangeAggregationBuilder} aggregation with the
|
||||
* given name.
|
||||
*/
|
||||
public static IpRangeAggregatorBuilder ipRange(String name) {
|
||||
return new IpRangeAggregatorBuilder(name);
|
||||
public static IpRangeAggregationBuilder ipRange(String name) {
|
||||
return new IpRangeAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Terms} aggregation with the given name.
|
||||
*/
|
||||
public static TermsAggregatorBuilder terms(String name) {
|
||||
return new TermsAggregatorBuilder(name, null);
|
||||
public static TermsAggregationBuilder terms(String name) {
|
||||
return new TermsAggregationBuilder(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Percentiles} aggregation with the given name.
|
||||
*/
|
||||
public static PercentilesAggregatorBuilder percentiles(String name) {
|
||||
return new PercentilesAggregatorBuilder(name);
|
||||
public static PercentilesAggregationBuilder percentiles(String name) {
|
||||
return new PercentilesAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link PercentileRanks} aggregation with the given name.
|
||||
*/
|
||||
public static PercentileRanksAggregatorBuilder percentileRanks(String name) {
|
||||
return new PercentileRanksAggregatorBuilder(name);
|
||||
public static PercentileRanksAggregationBuilder percentileRanks(String name) {
|
||||
return new PercentileRanksAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Cardinality} aggregation with the given name.
|
||||
*/
|
||||
public static CardinalityAggregatorBuilder cardinality(String name) {
|
||||
return new CardinalityAggregatorBuilder(name, null);
|
||||
public static CardinalityAggregationBuilder cardinality(String name) {
|
||||
return new CardinalityAggregationBuilder(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link TopHits} aggregation with the given name.
|
||||
*/
|
||||
public static TopHitsAggregatorBuilder topHits(String name) {
|
||||
return new TopHitsAggregatorBuilder(name);
|
||||
public static TopHitsAggregationBuilder topHits(String name) {
|
||||
return new TopHitsAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link GeoBounds} aggregation with the given name.
|
||||
*/
|
||||
public static GeoBoundsAggregatorBuilder geoBounds(String name) {
|
||||
return new GeoBoundsAggregatorBuilder(name);
|
||||
public static GeoBoundsAggregationBuilder geoBounds(String name) {
|
||||
return new GeoBoundsAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link GeoCentroid} aggregation with the given name.
|
||||
*/
|
||||
public static GeoCentroidAggregatorBuilder geoCentroid(String name) {
|
||||
return new GeoCentroidAggregatorBuilder(name);
|
||||
public static GeoCentroidAggregationBuilder geoCentroid(String name) {
|
||||
return new GeoCentroidAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ScriptedMetric} aggregation with the given name.
|
||||
*/
|
||||
public static ScriptedMetricAggregatorBuilder scriptedMetric(String name) {
|
||||
return new ScriptedMetricAggregatorBuilder(name);
|
||||
public static ScriptedMetricAggregationBuilder scriptedMetric(String name) {
|
||||
return new ScriptedMetricAggregationBuilder(name);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
|
|||
/**
|
||||
* Parses the aggregation request and creates the appropriate aggregator factory for it.
|
||||
*
|
||||
* @see AggregatorBuilder
|
||||
* @see AggregationBuilder
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface Parser {
|
||||
|
@ -55,7 +55,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
|
|||
* @return The resolved aggregator factory or {@code null} in case the aggregation should be skipped
|
||||
* @throws java.io.IOException When parsing fails
|
||||
*/
|
||||
AggregatorBuilder<?> parse(String aggregationName, QueryParseContext context) throws IOException;
|
||||
AggregationBuilder<?> parse(String aggregationName, QueryParseContext context) throws IOException;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -124,7 +124,7 @@ public class AggregatorFactories {
|
|||
|
||||
public static class Builder extends ToXContentToBytes implements Writeable {
|
||||
private final Set<String> names = new HashSet<>();
|
||||
private final List<AggregatorBuilder<?>> aggregatorBuilders = new ArrayList<>();
|
||||
private final List<AggregationBuilder<?>> aggregationBuilders = new ArrayList<>();
|
||||
private final List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders = new ArrayList<>();
|
||||
private boolean skipResolveOrder;
|
||||
|
||||
|
@ -140,7 +140,7 @@ public class AggregatorFactories {
|
|||
public Builder(StreamInput in) throws IOException {
|
||||
int factoriesSize = in.readVInt();
|
||||
for (int i = 0; i < factoriesSize; i++) {
|
||||
addAggregator(in.readNamedWriteable(AggregatorBuilder.class));
|
||||
addAggregator(in.readNamedWriteable(AggregationBuilder.class));
|
||||
}
|
||||
int pipelineFactoriesSize = in.readVInt();
|
||||
for (int i = 0; i < pipelineFactoriesSize; i++) {
|
||||
|
@ -150,8 +150,8 @@ public class AggregatorFactories {
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(this.aggregatorBuilders.size());
|
||||
for (AggregatorBuilder<?> factory : aggregatorBuilders) {
|
||||
out.writeVInt(this.aggregationBuilders.size());
|
||||
for (AggregationBuilder<?> factory : aggregationBuilders) {
|
||||
out.writeNamedWriteable(factory);
|
||||
}
|
||||
out.writeVInt(this.pipelineAggregatorBuilders.size());
|
||||
|
@ -164,11 +164,11 @@ public class AggregatorFactories {
|
|||
throw new UnsupportedOperationException("This needs to be removed");
|
||||
}
|
||||
|
||||
public Builder addAggregator(AggregatorBuilder<?> factory) {
|
||||
public Builder addAggregator(AggregationBuilder<?> factory) {
|
||||
if (!names.add(factory.name)) {
|
||||
throw new IllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]");
|
||||
}
|
||||
aggregatorBuilders.add(factory);
|
||||
aggregationBuilders.add(factory);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -186,30 +186,30 @@ public class AggregatorFactories {
|
|||
}
|
||||
|
||||
public AggregatorFactories build(AggregationContext context, AggregatorFactory<?> parent) throws IOException {
|
||||
if (aggregatorBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) {
|
||||
if (aggregationBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) {
|
||||
return EMPTY;
|
||||
}
|
||||
List<PipelineAggregatorBuilder<?>> orderedpipelineAggregators = null;
|
||||
if (skipResolveOrder) {
|
||||
orderedpipelineAggregators = new ArrayList<>(pipelineAggregatorBuilders);
|
||||
} else {
|
||||
orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregatorBuilders);
|
||||
orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregationBuilders);
|
||||
}
|
||||
AggregatorFactory<?>[] aggFactories = new AggregatorFactory<?>[aggregatorBuilders.size()];
|
||||
for (int i = 0; i < aggregatorBuilders.size(); i++) {
|
||||
aggFactories[i] = aggregatorBuilders.get(i).build(context, parent);
|
||||
AggregatorFactory<?>[] aggFactories = new AggregatorFactory<?>[aggregationBuilders.size()];
|
||||
for (int i = 0; i < aggregationBuilders.size(); i++) {
|
||||
aggFactories[i] = aggregationBuilders.get(i).build(context, parent);
|
||||
}
|
||||
return new AggregatorFactories(parent, aggFactories, orderedpipelineAggregators);
|
||||
}
|
||||
|
||||
private List<PipelineAggregatorBuilder<?>> resolvePipelineAggregatorOrder(
|
||||
List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders, List<AggregatorBuilder<?>> aggBuilders) {
|
||||
List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders, List<AggregationBuilder<?>> aggBuilders) {
|
||||
Map<String, PipelineAggregatorBuilder<?>> pipelineAggregatorBuildersMap = new HashMap<>();
|
||||
for (PipelineAggregatorBuilder<?> builder : pipelineAggregatorBuilders) {
|
||||
pipelineAggregatorBuildersMap.put(builder.getName(), builder);
|
||||
}
|
||||
Map<String, AggregatorBuilder<?>> aggBuildersMap = new HashMap<>();
|
||||
for (AggregatorBuilder<?> aggBuilder : aggBuilders) {
|
||||
Map<String, AggregationBuilder<?>> aggBuildersMap = new HashMap<>();
|
||||
for (AggregationBuilder<?> aggBuilder : aggBuilders) {
|
||||
aggBuildersMap.put(aggBuilder.name, aggBuilder);
|
||||
}
|
||||
List<PipelineAggregatorBuilder<?>> orderedPipelineAggregatorrs = new LinkedList<>();
|
||||
|
@ -223,7 +223,7 @@ public class AggregatorFactories {
|
|||
return orderedPipelineAggregatorrs;
|
||||
}
|
||||
|
||||
private void resolvePipelineAggregatorOrder(Map<String, AggregatorBuilder<?>> aggBuildersMap,
|
||||
private void resolvePipelineAggregatorOrder(Map<String, AggregationBuilder<?>> aggBuildersMap,
|
||||
Map<String, PipelineAggregatorBuilder<?>> pipelineAggregatorBuildersMap,
|
||||
List<PipelineAggregatorBuilder<?>> orderedPipelineAggregators, List<PipelineAggregatorBuilder<?>> unmarkedBuilders,
|
||||
Set<PipelineAggregatorBuilder<?>> temporarilyMarked, PipelineAggregatorBuilder<?> builder) {
|
||||
|
@ -238,7 +238,7 @@ public class AggregatorFactories {
|
|||
if (bucketsPath.equals("_count") || bucketsPath.equals("_key")) {
|
||||
continue;
|
||||
} else if (aggBuildersMap.containsKey(firstAggName)) {
|
||||
AggregatorBuilder<?> aggBuilder = aggBuildersMap.get(firstAggName);
|
||||
AggregationBuilder<?> aggBuilder = aggBuildersMap.get(firstAggName);
|
||||
for (int i = 1; i < bucketsPathElements.size(); i++) {
|
||||
PathElement pathElement = bucketsPathElements.get(i);
|
||||
String aggName = pathElement.name;
|
||||
|
@ -247,9 +247,9 @@ public class AggregatorFactories {
|
|||
} else {
|
||||
// Check the non-pipeline sub-aggregator
|
||||
// factories
|
||||
AggregatorBuilder<?>[] subBuilders = aggBuilder.factoriesBuilder.getAggregatorFactories();
|
||||
AggregationBuilder<?>[] subBuilders = aggBuilder.factoriesBuilder.getAggregatorFactories();
|
||||
boolean foundSubBuilder = false;
|
||||
for (AggregatorBuilder<?> subBuilder : subBuilders) {
|
||||
for (AggregationBuilder<?> subBuilder : subBuilders) {
|
||||
if (aggName.equals(subBuilder.name)) {
|
||||
aggBuilder = subBuilder;
|
||||
foundSubBuilder = true;
|
||||
|
@ -289,8 +289,8 @@ public class AggregatorFactories {
|
|||
}
|
||||
}
|
||||
|
||||
AggregatorBuilder<?>[] getAggregatorFactories() {
|
||||
return this.aggregatorBuilders.toArray(new AggregatorBuilder<?>[this.aggregatorBuilders.size()]);
|
||||
AggregationBuilder<?>[] getAggregatorFactories() {
|
||||
return this.aggregationBuilders.toArray(new AggregationBuilder<?>[this.aggregationBuilders.size()]);
|
||||
}
|
||||
|
||||
List<PipelineAggregatorBuilder<?>> getPipelineAggregatorFactories() {
|
||||
|
@ -298,14 +298,14 @@ public class AggregatorFactories {
|
|||
}
|
||||
|
||||
public int count() {
|
||||
return aggregatorBuilders.size() + pipelineAggregatorBuilders.size();
|
||||
return aggregationBuilders.size() + pipelineAggregatorBuilders.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (aggregatorBuilders != null) {
|
||||
for (AggregatorBuilder<?> subAgg : aggregatorBuilders) {
|
||||
if (aggregationBuilders != null) {
|
||||
for (AggregationBuilder<?> subAgg : aggregationBuilders) {
|
||||
subAgg.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
|
@ -320,7 +320,7 @@ public class AggregatorFactories {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(aggregatorBuilders, pipelineAggregatorBuilders);
|
||||
return Objects.hash(aggregationBuilders, pipelineAggregatorBuilders);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -330,7 +330,7 @@ public class AggregatorFactories {
|
|||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
Builder other = (Builder) obj;
|
||||
if (!Objects.equals(aggregatorBuilders, other.aggregatorBuilders))
|
||||
if (!Objects.equals(aggregationBuilders, other.aggregationBuilders))
|
||||
return false;
|
||||
if (!Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders))
|
||||
return false;
|
||||
|
|
|
@ -104,7 +104,7 @@ public class AggregatorParsers {
|
|||
+ token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
|
||||
}
|
||||
|
||||
AggregatorBuilder<?> aggFactory = null;
|
||||
AggregationBuilder<?> aggFactory = null;
|
||||
PipelineAggregatorBuilder<?> pipelineAggregatorFactory = null;
|
||||
AggregatorFactories.Builder subFactories = null;
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.FieldContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.ParentChild;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -44,7 +44,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<ParentChild, ChildrenAggregatorBuilder> {
|
||||
public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder<ParentChild, ChildrenAggregationBuilder> {
|
||||
public static final String NAME = InternalChildren.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -59,7 +59,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
|
|||
* @param childType
|
||||
* the type of children documents
|
||||
*/
|
||||
public ChildrenAggregatorBuilder(String name, String childType) {
|
||||
public ChildrenAggregationBuilder(String name, String childType) {
|
||||
super(name, InternalChildren.TYPE, ValuesSourceType.BYTES, ValueType.STRING);
|
||||
if (childType == null) {
|
||||
throw new IllegalArgumentException("[childType] must not be null: [" + name + "]");
|
||||
|
@ -70,7 +70,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public ChildrenAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public ChildrenAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalChildren.TYPE, ValuesSourceType.BYTES, ValueType.STRING);
|
||||
childType = in.readString();
|
||||
}
|
||||
|
@ -121,7 +121,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static ChildrenAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
public static ChildrenAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
String childType = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
|
@ -148,7 +148,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
|
|||
}
|
||||
|
||||
|
||||
return new ChildrenAggregatorBuilder(aggregationName, childType);
|
||||
return new ChildrenAggregationBuilder(aggregationName, childType);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -158,7 +158,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
ChildrenAggregatorBuilder other = (ChildrenAggregatorBuilder) obj;
|
||||
ChildrenAggregationBuilder other = (ChildrenAggregationBuilder) obj;
|
||||
return Objects.equals(childType, other.childType);
|
||||
}
|
||||
|
|
@ -24,12 +24,11 @@ import org.elasticsearch.common.ParsingException;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.EmptyQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
@ -37,7 +36,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorBuilder> {
|
||||
public class FilterAggregationBuilder extends AggregationBuilder<FilterAggregationBuilder> {
|
||||
public static final String NAME = InternalFilter.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -51,7 +50,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
|
|||
* filter will fall into the bucket defined by this
|
||||
* {@link Filter} aggregation.
|
||||
*/
|
||||
public FilterAggregatorBuilder(String name, QueryBuilder filter) {
|
||||
public FilterAggregationBuilder(String name, QueryBuilder filter) {
|
||||
super(name, InternalFilter.TYPE);
|
||||
if (filter == null) {
|
||||
throw new IllegalArgumentException("[filter] must not be null: [" + name + "]");
|
||||
|
@ -66,7 +65,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public FilterAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public FilterAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalFilter.TYPE);
|
||||
filter = in.readNamedWriteable(QueryBuilder.class);
|
||||
}
|
||||
|
@ -90,7 +89,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static FilterAggregatorBuilder parse(String aggregationName, QueryParseContext context)
|
||||
public static FilterAggregationBuilder parse(String aggregationName, QueryParseContext context)
|
||||
throws IOException {
|
||||
QueryBuilder filter = context.parseInnerQueryBuilder();
|
||||
|
||||
|
@ -98,7 +97,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
|
|||
throw new ParsingException(null, "filter cannot be null in filter aggregation [{}]", aggregationName);
|
||||
}
|
||||
|
||||
return new FilterAggregatorBuilder(aggregationName, filter);
|
||||
return new FilterAggregationBuilder(aggregationName, filter);
|
||||
}
|
||||
|
||||
|
||||
|
@ -109,7 +108,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
FilterAggregatorBuilder other = (FilterAggregatorBuilder) obj;
|
||||
FilterAggregationBuilder other = (FilterAggregationBuilder) obj;
|
||||
return Objects.equals(filter, other.filter);
|
||||
}
|
||||
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
|
||||
|
@ -43,7 +43,7 @@ import java.util.Objects;
|
|||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
|
||||
public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregatorBuilder> {
|
||||
public class FiltersAggregationBuilder extends AggregationBuilder<FiltersAggregationBuilder> {
|
||||
public static final String NAME = InternalFilters.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -62,11 +62,11 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
* @param filters
|
||||
* the KeyedFilters to use with this aggregation.
|
||||
*/
|
||||
public FiltersAggregatorBuilder(String name, KeyedFilter... filters) {
|
||||
public FiltersAggregationBuilder(String name, KeyedFilter... filters) {
|
||||
this(name, Arrays.asList(filters));
|
||||
}
|
||||
|
||||
private FiltersAggregatorBuilder(String name, List<KeyedFilter> filters) {
|
||||
private FiltersAggregationBuilder(String name, List<KeyedFilter> filters) {
|
||||
super(name, InternalFilters.TYPE);
|
||||
// internally we want to have a fixed order of filters, regardless of the order of the filters in the request
|
||||
this.filters = new ArrayList<>(filters);
|
||||
|
@ -80,7 +80,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
* @param filters
|
||||
* the filters to use with this aggregation
|
||||
*/
|
||||
public FiltersAggregatorBuilder(String name, QueryBuilder... filters) {
|
||||
public FiltersAggregationBuilder(String name, QueryBuilder... filters) {
|
||||
super(name, InternalFilters.TYPE);
|
||||
List<KeyedFilter> keyedFilters = new ArrayList<>(filters.length);
|
||||
for (int i = 0; i < filters.length; i++) {
|
||||
|
@ -93,7 +93,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public FiltersAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public FiltersAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalFilters.TYPE);
|
||||
keyed = in.readBoolean();
|
||||
int filtersSize = in.readVInt();
|
||||
|
@ -131,7 +131,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
/**
|
||||
* Set whether to include a bucket for documents not matching any filter
|
||||
*/
|
||||
public FiltersAggregatorBuilder otherBucket(boolean otherBucket) {
|
||||
public FiltersAggregationBuilder otherBucket(boolean otherBucket) {
|
||||
this.otherBucket = otherBucket;
|
||||
return this;
|
||||
}
|
||||
|
@ -154,7 +154,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
* Set the key to use for the bucket for documents not matching any
|
||||
* filter.
|
||||
*/
|
||||
public FiltersAggregatorBuilder otherBucketKey(String otherBucketKey) {
|
||||
public FiltersAggregationBuilder otherBucketKey(String otherBucketKey) {
|
||||
if (otherBucketKey == null) {
|
||||
throw new IllegalArgumentException("[otherBucketKey] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -199,7 +199,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static FiltersAggregatorBuilder parse(String aggregationName, QueryParseContext context)
|
||||
public static FiltersAggregationBuilder parse(String aggregationName, QueryParseContext context)
|
||||
throws IOException {
|
||||
XContentParser parser = context.parser();
|
||||
|
||||
|
@ -264,12 +264,12 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
otherBucketKey = "_other_";
|
||||
}
|
||||
|
||||
FiltersAggregatorBuilder factory;
|
||||
FiltersAggregationBuilder factory;
|
||||
if (keyedFilters != null) {
|
||||
factory = new FiltersAggregatorBuilder(aggregationName,
|
||||
factory = new FiltersAggregationBuilder(aggregationName,
|
||||
keyedFilters.toArray(new FiltersAggregator.KeyedFilter[keyedFilters.size()]));
|
||||
} else {
|
||||
factory = new FiltersAggregatorBuilder(aggregationName,
|
||||
factory = new FiltersAggregationBuilder(aggregationName,
|
||||
nonKeyedFilters.toArray(new QueryBuilder[nonKeyedFilters.size()]));
|
||||
}
|
||||
if (otherBucket != null) {
|
||||
|
@ -288,7 +288,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
FiltersAggregatorBuilder other = (FiltersAggregatorBuilder) obj;
|
||||
FiltersAggregationBuilder other = (FiltersAggregationBuilder) obj;
|
||||
return Objects.equals(filters, other.filters)
|
||||
&& Objects.equals(keyed, other.keyed)
|
||||
&& Objects.equals(otherBucket, other.otherBucket)
|
|
@ -37,7 +37,7 @@ import org.elasticsearch.search.aggregations.bucket.BucketUtils;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -45,7 +45,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource.GeoPoint, GeoGridAggregatorBuilder> {
|
||||
public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoGridAggregationBuilder> {
|
||||
public static final String NAME = InternalGeoHashGrid.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -53,14 +53,14 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
|
|||
private int requiredSize = GeoHashGridParser.DEFAULT_MAX_NUM_CELLS;
|
||||
private int shardSize = -1;
|
||||
|
||||
public GeoGridAggregatorBuilder(String name) {
|
||||
public GeoGridAggregationBuilder(String name) {
|
||||
super(name, InternalGeoHashGrid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public GeoGridAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public GeoGridAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalGeoHashGrid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
precision = in.readVInt();
|
||||
requiredSize = in.readVInt();
|
||||
|
@ -74,7 +74,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
|
|||
out.writeVInt(shardSize);
|
||||
}
|
||||
|
||||
public GeoGridAggregatorBuilder precision(int precision) {
|
||||
public GeoGridAggregationBuilder precision(int precision) {
|
||||
this.precision = GeoHashGridParams.checkPrecision(precision);
|
||||
return this;
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
|
|||
return precision;
|
||||
}
|
||||
|
||||
public GeoGridAggregatorBuilder size(int size) {
|
||||
public GeoGridAggregationBuilder size(int size) {
|
||||
if (size < -1) {
|
||||
throw new IllegalArgumentException(
|
||||
"[size] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
|
||||
|
@ -96,7 +96,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
|
|||
return requiredSize;
|
||||
}
|
||||
|
||||
public GeoGridAggregatorBuilder shardSize(int shardSize) {
|
||||
public GeoGridAggregationBuilder shardSize(int shardSize) {
|
||||
if (shardSize < -1) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
|
||||
|
@ -145,7 +145,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
GeoGridAggregatorBuilder other = (GeoGridAggregatorBuilder) obj;
|
||||
GeoGridAggregationBuilder other = (GeoGridAggregationBuilder) obj;
|
||||
if (precision != other.precision) {
|
||||
return false;
|
||||
}
|
|
@ -46,10 +46,10 @@ public class GeoHashGridAggregator extends BucketsAggregator {
|
|||
|
||||
private final int requiredSize;
|
||||
private final int shardSize;
|
||||
private final GeoGridAggregatorBuilder.CellIdSource valuesSource;
|
||||
private final GeoGridAggregationBuilder.CellIdSource valuesSource;
|
||||
private final LongHash bucketOrds;
|
||||
|
||||
public GeoHashGridAggregator(String name, AggregatorFactories factories, GeoGridAggregatorBuilder.CellIdSource valuesSource,
|
||||
public GeoHashGridAggregator(String name, AggregatorFactories factories, GeoGridAggregationBuilder.CellIdSource valuesSource,
|
||||
int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, factories, aggregationContext, parent, pipelineAggregators, metaData);
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder.CellIdSource;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder.CellIdSource;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
|
|
|
@ -45,10 +45,10 @@ public class GeoHashGridParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected GeoGridAggregatorBuilder createFactory(
|
||||
protected GeoGridAggregationBuilder createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
GeoGridAggregatorBuilder factory = new GeoGridAggregatorBuilder(aggregationName);
|
||||
GeoGridAggregationBuilder factory = new GeoGridAggregationBuilder(aggregationName);
|
||||
Integer precision = (Integer) otherOptions.get(GeoHashGridParams.FIELD_PRECISION);
|
||||
if (precision != null) {
|
||||
factory.precision(precision);
|
||||
|
|
|
@ -24,25 +24,25 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class GlobalAggregatorBuilder extends AggregatorBuilder<GlobalAggregatorBuilder> {
|
||||
public class GlobalAggregationBuilder extends AggregationBuilder<GlobalAggregationBuilder> {
|
||||
public static final String NAME = InternalGlobal.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public GlobalAggregatorBuilder(String name) {
|
||||
public GlobalAggregationBuilder(String name) {
|
||||
super(name, InternalGlobal.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public GlobalAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public GlobalAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalGlobal.TYPE);
|
||||
}
|
||||
|
||||
|
@ -64,9 +64,9 @@ public class GlobalAggregatorBuilder extends AggregatorBuilder<GlobalAggregatorB
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static GlobalAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
public static GlobalAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
context.parser().nextToken();
|
||||
return new GlobalAggregatorBuilder(aggregationName);
|
||||
return new GlobalAggregationBuilder(aggregationName);
|
||||
}
|
||||
|
||||
@Override
|
|
@ -24,14 +24,14 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.rounding.Rounding;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public abstract class AbstractHistogramBuilder<AB extends AbstractHistogramBuilder<AB>>
|
||||
extends ValuesSourceAggregatorBuilder<ValuesSource.Numeric, AB> {
|
||||
extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, AB> {
|
||||
|
||||
protected long interval;
|
||||
protected long offset = 0;
|
||||
|
|
|
@ -33,21 +33,21 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<DateHistogramAggregatorBuilder> {
|
||||
public class DateHistogramAggregationBuilder extends AbstractHistogramBuilder<DateHistogramAggregationBuilder> {
|
||||
|
||||
public static final String NAME = InternalDateHistogram.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
private DateHistogramInterval dateHistogramInterval;
|
||||
|
||||
public DateHistogramAggregatorBuilder(String name) {
|
||||
public DateHistogramAggregationBuilder(String name) {
|
||||
super(name, InternalDateHistogram.HISTOGRAM_FACTORY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public DateHistogramAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public DateHistogramAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalDateHistogram.HISTOGRAM_FACTORY);
|
||||
dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new);
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
|
|||
/**
|
||||
* Set the interval.
|
||||
*/
|
||||
public DateHistogramAggregatorBuilder dateHistogramInterval(DateHistogramInterval dateHistogramInterval) {
|
||||
public DateHistogramAggregationBuilder dateHistogramInterval(DateHistogramInterval dateHistogramInterval) {
|
||||
if (dateHistogramInterval == null) {
|
||||
throw new IllegalArgumentException("[dateHistogramInterval] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
|
|||
return this;
|
||||
}
|
||||
|
||||
public DateHistogramAggregatorBuilder offset(String offset) {
|
||||
public DateHistogramAggregationBuilder offset(String offset) {
|
||||
if (offset == null) {
|
||||
throw new IllegalArgumentException("[offset] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -79,12 +79,12 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
|
|||
protected static long parseStringOffset(String offset) {
|
||||
if (offset.charAt(0) == '-') {
|
||||
return -TimeValue
|
||||
.parseTimeValue(offset.substring(1), null, DateHistogramAggregatorBuilder.class.getSimpleName() + ".parseOffset")
|
||||
.parseTimeValue(offset.substring(1), null, DateHistogramAggregationBuilder.class.getSimpleName() + ".parseOffset")
|
||||
.millis();
|
||||
}
|
||||
int beginIndex = offset.charAt(0) == '+' ? 1 : 0;
|
||||
return TimeValue
|
||||
.parseTimeValue(offset.substring(beginIndex), null, DateHistogramAggregatorBuilder.class.getSimpleName() + ".parseOffset")
|
||||
.parseTimeValue(offset.substring(beginIndex), null, DateHistogramAggregationBuilder.class.getSimpleName() + ".parseOffset")
|
||||
.millis();
|
||||
}
|
||||
|
||||
|
@ -121,7 +121,7 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
DateHistogramAggregatorBuilder other = (DateHistogramAggregatorBuilder) obj;
|
||||
DateHistogramAggregationBuilder other = (DateHistogramAggregationBuilder) obj;
|
||||
return super.innerEquals(obj) && Objects.equals(dateHistogramInterval, other.dateHistogramInterval);
|
||||
}
|
||||
}
|
|
@ -42,9 +42,9 @@ public class DateHistogramParser extends HistogramParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected DateHistogramAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected DateHistogramAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DateHistogramAggregatorBuilder factory = new DateHistogramAggregatorBuilder(aggregationName);
|
||||
DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder(aggregationName);
|
||||
Object interval = otherOptions.get(Rounding.Interval.INTERVAL_FIELD);
|
||||
if (interval == null) {
|
||||
throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]");
|
||||
|
@ -89,6 +89,6 @@ public class DateHistogramParser extends HistogramParser {
|
|||
|
||||
@Override
|
||||
protected long parseStringOffset(String offset) throws IOException {
|
||||
return DateHistogramAggregatorBuilder.parseStringOffset(offset);
|
||||
return DateHistogramAggregationBuilder.parseStringOffset(offset);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,18 +29,18 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class HistogramAggregatorBuilder extends AbstractHistogramBuilder<HistogramAggregatorBuilder> {
|
||||
public class HistogramAggregationBuilder extends AbstractHistogramBuilder<HistogramAggregationBuilder> {
|
||||
public static final String NAME = InternalHistogram.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public HistogramAggregatorBuilder(String name) {
|
||||
public HistogramAggregationBuilder(String name) {
|
||||
super(name, InternalHistogram.HISTOGRAM_FACTORY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public HistogramAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public HistogramAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalHistogram.HISTOGRAM_FACTORY);
|
||||
}
|
||||
|
|
@ -47,7 +47,7 @@ public class HistogramParser extends NumericValuesSourceParser {
|
|||
@Override
|
||||
protected AbstractHistogramBuilder<?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
HistogramAggregatorBuilder factory = new HistogramAggregatorBuilder(aggregationName);
|
||||
HistogramAggregationBuilder factory = new HistogramAggregationBuilder(aggregationName);
|
||||
Long interval = (Long) otherOptions.get(Rounding.Interval.INTERVAL_FIELD);
|
||||
if (interval == null) {
|
||||
throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]");
|
||||
|
|
|
@ -28,25 +28,25 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class MissingAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource, MissingAggregatorBuilder> {
|
||||
public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, MissingAggregationBuilder> {
|
||||
public static final String NAME = InternalMissing.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public MissingAggregatorBuilder(String name, ValueType targetValueType) {
|
||||
public MissingAggregationBuilder(String name, ValueType targetValueType) {
|
||||
super(name, InternalMissing.TYPE, ValuesSourceType.ANY, targetValueType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public MissingAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public MissingAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalMissing.TYPE, ValuesSourceType.ANY);
|
||||
}
|
||||
|
|
@ -41,8 +41,8 @@ public class MissingParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected MissingAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected MissingAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MissingAggregatorBuilder(aggregationName, targetValueType);
|
||||
return new MissingAggregationBuilder(aggregationName, targetValueType);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
@ -34,7 +34,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorBuilder> {
|
||||
public class NestedAggregationBuilder extends AggregationBuilder<NestedAggregationBuilder> {
|
||||
public static final String NAME = InternalNested.TYPE.name();
|
||||
public static final ParseField AGGREGATION_FIELD_NAME = new ParseField(NAME);
|
||||
|
||||
|
@ -47,7 +47,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorB
|
|||
* the path to use for this nested aggregation. The path must
|
||||
* match the path to a nested object in the mappings.
|
||||
*/
|
||||
public NestedAggregatorBuilder(String name, String path) {
|
||||
public NestedAggregationBuilder(String name, String path) {
|
||||
super(name, InternalNested.TYPE);
|
||||
if (path == null) {
|
||||
throw new IllegalArgumentException("[path] must not be null: [" + name + "]");
|
||||
|
@ -58,7 +58,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorB
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public NestedAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public NestedAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalNested.TYPE);
|
||||
path = in.readString();
|
||||
}
|
||||
|
@ -89,7 +89,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorB
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static NestedAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
public static NestedAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
String path = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
|
@ -115,7 +115,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorB
|
|||
throw new ParsingException(parser.getTokenLocation(), "Missing [path] field for nested aggregation [" + aggregationName + "]");
|
||||
}
|
||||
|
||||
return new NestedAggregatorBuilder(aggregationName, path);
|
||||
return new NestedAggregationBuilder(aggregationName, path);
|
||||
}
|
||||
|
||||
|
||||
|
@ -126,7 +126,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorB
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
NestedAggregatorBuilder other = (NestedAggregatorBuilder) obj;
|
||||
NestedAggregationBuilder other = (NestedAggregationBuilder) obj;
|
||||
return Objects.equals(path, other.path);
|
||||
}
|
||||
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
@ -34,20 +34,20 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ReverseNestedAggregatorBuilder extends AggregatorBuilder<ReverseNestedAggregatorBuilder> {
|
||||
public class ReverseNestedAggregationBuilder extends AggregationBuilder<ReverseNestedAggregationBuilder> {
|
||||
public static final String NAME = InternalReverseNested.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
private String path;
|
||||
|
||||
public ReverseNestedAggregatorBuilder(String name) {
|
||||
public ReverseNestedAggregationBuilder(String name) {
|
||||
super(name, InternalReverseNested.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public ReverseNestedAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public ReverseNestedAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalReverseNested.TYPE);
|
||||
path = in.readOptionalString();
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ public class ReverseNestedAggregatorBuilder extends AggregatorBuilder<ReverseNes
|
|||
* the path to a nested object in the mappings. If it is not specified
|
||||
* then this aggregation will go back to the root document.
|
||||
*/
|
||||
public ReverseNestedAggregatorBuilder path(String path) {
|
||||
public ReverseNestedAggregationBuilder path(String path) {
|
||||
if (path == null) {
|
||||
throw new IllegalArgumentException("[path] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ public class ReverseNestedAggregatorBuilder extends AggregatorBuilder<ReverseNes
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static ReverseNestedAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
public static ReverseNestedAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
String path = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
|
@ -114,7 +114,7 @@ public class ReverseNestedAggregatorBuilder extends AggregatorBuilder<ReverseNes
|
|||
}
|
||||
}
|
||||
|
||||
ReverseNestedAggregatorBuilder factory = new ReverseNestedAggregatorBuilder(
|
||||
ReverseNestedAggregationBuilder factory = new ReverseNestedAggregationBuilder(
|
||||
aggregationName);
|
||||
if (path != null) {
|
||||
factory.path(path);
|
||||
|
@ -130,7 +130,7 @@ public class ReverseNestedAggregatorBuilder extends AggregatorBuilder<ReverseNes
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
ReverseNestedAggregatorBuilder other = (ReverseNestedAggregatorBuilder) obj;
|
||||
ReverseNestedAggregationBuilder other = (ReverseNestedAggregationBuilder) obj;
|
||||
return Objects.equals(path, other.path);
|
||||
}
|
||||
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -33,7 +33,7 @@ import java.util.List;
|
|||
import java.util.Objects;
|
||||
|
||||
public abstract class AbstractRangeBuilder<AB extends AbstractRangeBuilder<AB, R>, R extends Range>
|
||||
extends ValuesSourceAggregatorBuilder<ValuesSource.Numeric, AB> {
|
||||
extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, AB> {
|
||||
|
||||
protected final InternalRange.Factory<?, ?> rangeFactory;
|
||||
protected List<R> ranges = new ArrayList<>();
|
||||
|
|
|
@ -30,18 +30,18 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregatorBuilder, Range> {
|
||||
public class RangeAggregationBuilder extends AbstractRangeBuilder<RangeAggregationBuilder, Range> {
|
||||
public static final String NAME = InternalRange.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public RangeAggregatorBuilder(String name) {
|
||||
public RangeAggregationBuilder(String name) {
|
||||
super(name, InternalRange.FACTORY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public RangeAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public RangeAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalRange.FACTORY, Range::new);
|
||||
}
|
||||
|
||||
|
@ -55,7 +55,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public RangeAggregatorBuilder addRange(String key, double from, double to) {
|
||||
public RangeAggregationBuilder addRange(String key, double from, double to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* automatically generated based on <code>from</code> and
|
||||
* <code>to</code>.
|
||||
*/
|
||||
public RangeAggregatorBuilder addRange(double from, double to) {
|
||||
public RangeAggregationBuilder addRange(double from, double to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -77,7 +77,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public RangeAggregatorBuilder addUnboundedTo(String key, double to) {
|
||||
public RangeAggregationBuilder addUnboundedTo(String key, double to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -86,7 +86,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* Same as {@link #addUnboundedTo(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public RangeAggregatorBuilder addUnboundedTo(double to) {
|
||||
public RangeAggregationBuilder addUnboundedTo(double to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public RangeAggregatorBuilder addUnboundedFrom(String key, double from) {
|
||||
public RangeAggregationBuilder addUnboundedFrom(String key, double from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
@ -107,7 +107,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* Same as {@link #addUnboundedFrom(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public RangeAggregatorBuilder addUnboundedFrom(double from) {
|
||||
public RangeAggregationBuilder addUnboundedFrom(double from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
|
@ -51,7 +51,7 @@ public class RangeParser extends NumericValuesSourceParser {
|
|||
@Override
|
||||
protected AbstractRangeBuilder<?, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
RangeAggregatorBuilder factory = new RangeAggregatorBuilder(aggregationName);
|
||||
RangeAggregationBuilder factory = new RangeAggregationBuilder(aggregationName);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<? extends Range> ranges = (List<? extends Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
for (Range range : ranges) {
|
||||
|
|
|
@ -33,18 +33,18 @@ import org.joda.time.DateTime;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAggregatorBuilder, RangeAggregator.Range> {
|
||||
public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeAggregationBuilder, RangeAggregator.Range> {
|
||||
public static final String NAME = InternalDateRange.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public DateRangeAggregatorBuilder(String name) {
|
||||
public DateRangeAggregationBuilder(String name) {
|
||||
super(name, InternalDateRange.FACTORY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public DateRangeAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalDateRange.FACTORY, Range::new);
|
||||
}
|
||||
|
||||
|
@ -63,7 +63,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(String key, String from, String to) {
|
||||
public DateRangeAggregationBuilder addRange(String key, String from, String to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addRange(String, String, String)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(String from, String to) {
|
||||
public DateRangeAggregationBuilder addRange(String from, String to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(String key, String to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(String key, String to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedTo(String, String)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(String to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(String to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -105,7 +105,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(String key, String from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(String key, String from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
@ -114,7 +114,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedFrom(String, String)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(String from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(String from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
|
@ -128,7 +128,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(String key, double from, double to) {
|
||||
public DateRangeAggregationBuilder addRange(String key, double from, double to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addRange(String, double, double)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(double from, double to) {
|
||||
public DateRangeAggregationBuilder addRange(double from, double to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -149,7 +149,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(String key, double to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(String key, double to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -158,7 +158,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedTo(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(double to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(double to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -170,7 +170,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(String key, double from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(String key, double from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
@ -179,7 +179,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedFrom(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(double from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(double from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
|
@ -193,7 +193,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(String key, DateTime from, DateTime to) {
|
||||
public DateRangeAggregationBuilder addRange(String key, DateTime from, DateTime to) {
|
||||
addRange(new Range(key, convertDateTime(from), convertDateTime(to)));
|
||||
return this;
|
||||
}
|
||||
|
@ -210,7 +210,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addRange(String, DateTime, DateTime)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(DateTime from, DateTime to) {
|
||||
public DateRangeAggregationBuilder addRange(DateTime from, DateTime to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -222,7 +222,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(String key, DateTime to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(String key, DateTime to) {
|
||||
addRange(new Range(key, null, convertDateTime(to)));
|
||||
return this;
|
||||
}
|
||||
|
@ -231,7 +231,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedTo(String, DateTime)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(DateTime to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(DateTime to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -243,7 +243,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(String key, DateTime from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(String key, DateTime from) {
|
||||
addRange(new Range(key, convertDateTime(from), null));
|
||||
return this;
|
||||
}
|
||||
|
@ -252,7 +252,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedFrom(String, DateTime)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(DateTime from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(DateTime from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
|
@ -38,9 +38,9 @@ public class DateRangeParser extends RangeParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected DateRangeAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected DateRangeAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DateRangeAggregatorBuilder factory = new DateRangeAggregatorBuilder(aggregationName);
|
||||
DateRangeAggregationBuilder factory = new DateRangeAggregationBuilder(aggregationName);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Range> ranges = (List<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
for (Range range : ranges) {
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
|||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.Range;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
|
||||
|
@ -42,7 +42,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource.GeoPoint, GeoDistanceAggregatorBuilder> {
|
||||
public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoDistanceAggregationBuilder> {
|
||||
public static final String NAME = InternalGeoDistance.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -52,11 +52,11 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
private GeoDistance distanceType = GeoDistance.DEFAULT;
|
||||
private boolean keyed = false;
|
||||
|
||||
public GeoDistanceAggregatorBuilder(String name, GeoPoint origin) {
|
||||
public GeoDistanceAggregationBuilder(String name, GeoPoint origin) {
|
||||
this(name, origin, InternalGeoDistance.FACTORY);
|
||||
}
|
||||
|
||||
private GeoDistanceAggregatorBuilder(String name, GeoPoint origin,
|
||||
private GeoDistanceAggregationBuilder(String name, GeoPoint origin,
|
||||
InternalRange.Factory<InternalGeoDistance.Bucket, InternalGeoDistance> rangeFactory) {
|
||||
super(name, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType());
|
||||
if (origin == null) {
|
||||
|
@ -68,7 +68,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public GeoDistanceAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalGeoDistance.FACTORY.type(), InternalGeoDistance.FACTORY.getValueSourceType(),
|
||||
InternalGeoDistance.FACTORY.getValueType());
|
||||
origin = new GeoPoint(in.readDouble(), in.readDouble());
|
||||
|
@ -95,7 +95,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
unit.writeTo(out);
|
||||
}
|
||||
|
||||
public GeoDistanceAggregatorBuilder addRange(Range range) {
|
||||
public GeoDistanceAggregationBuilder addRange(Range range) {
|
||||
if (range == null) {
|
||||
throw new IllegalArgumentException("[range] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -113,7 +113,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addRange(String key, double from, double to) {
|
||||
public GeoDistanceAggregationBuilder addRange(String key, double from, double to) {
|
||||
ranges.add(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* automatically generated based on <code>from</code> and
|
||||
* <code>to</code>.
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addRange(double from, double to) {
|
||||
public GeoDistanceAggregationBuilder addRange(double from, double to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -135,7 +135,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addUnboundedTo(String key, double to) {
|
||||
public GeoDistanceAggregationBuilder addUnboundedTo(String key, double to) {
|
||||
ranges.add(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* Same as {@link #addUnboundedTo(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addUnboundedTo(double to) {
|
||||
public GeoDistanceAggregationBuilder addUnboundedTo(double to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -156,7 +156,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addUnboundedFrom(String key, double from) {
|
||||
public GeoDistanceAggregationBuilder addUnboundedFrom(String key, double from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
@ -165,7 +165,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* Same as {@link #addUnboundedFrom(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addUnboundedFrom(double from) {
|
||||
public GeoDistanceAggregationBuilder addUnboundedFrom(double from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
|
@ -178,7 +178,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
return NAME;
|
||||
}
|
||||
|
||||
public GeoDistanceAggregatorBuilder unit(DistanceUnit unit) {
|
||||
public GeoDistanceAggregationBuilder unit(DistanceUnit unit) {
|
||||
if (unit == null) {
|
||||
throw new IllegalArgumentException("[unit] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -190,7 +190,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
return unit;
|
||||
}
|
||||
|
||||
public GeoDistanceAggregatorBuilder distanceType(GeoDistance distanceType) {
|
||||
public GeoDistanceAggregationBuilder distanceType(GeoDistance distanceType) {
|
||||
if (distanceType == null) {
|
||||
throw new IllegalArgumentException("[distanceType] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -202,7 +202,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
return distanceType;
|
||||
}
|
||||
|
||||
public GeoDistanceAggregatorBuilder keyed(boolean keyed) {
|
||||
public GeoDistanceAggregationBuilder keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
@ -236,7 +236,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
GeoDistanceAggregatorBuilder other = (GeoDistanceAggregatorBuilder) obj;
|
||||
GeoDistanceAggregationBuilder other = (GeoDistanceAggregationBuilder) obj;
|
||||
return Objects.equals(origin, other.origin)
|
||||
&& Objects.equals(ranges, other.ranges)
|
||||
&& Objects.equals(keyed, other.keyed)
|
|
@ -85,10 +85,10 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected GeoDistanceAggregatorBuilder createFactory(
|
||||
protected GeoDistanceAggregationBuilder createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
GeoPoint origin = (GeoPoint) otherOptions.get(ORIGIN_FIELD);
|
||||
GeoDistanceAggregatorBuilder factory = new GeoDistanceAggregatorBuilder(aggregationName, origin);
|
||||
GeoDistanceAggregationBuilder factory = new GeoDistanceAggregationBuilder(aggregationName, origin);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Range> ranges = (List<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
for (Range range : ranges) {
|
||||
|
|
|
@ -44,14 +44,14 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
||||
public final class IpRangeAggregatorBuilder
|
||||
extends ValuesSourceAggregatorBuilder<ValuesSource.Bytes, IpRangeAggregatorBuilder> {
|
||||
public final class IpRangeAggregationBuilder
|
||||
extends ValuesSourceAggregationBuilder<ValuesSource.Bytes, IpRangeAggregationBuilder> {
|
||||
private static final String NAME = "ip_range";
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
private static final InternalAggregation.Type TYPE = new InternalAggregation.Type(NAME);
|
||||
|
@ -163,7 +163,7 @@ public final class IpRangeAggregatorBuilder
|
|||
private boolean keyed = false;
|
||||
private List<Range> ranges = new ArrayList<>();
|
||||
|
||||
public IpRangeAggregatorBuilder(String name) {
|
||||
public IpRangeAggregationBuilder(String name) {
|
||||
super(name, TYPE, ValuesSourceType.BYTES, ValueType.IP);
|
||||
}
|
||||
|
||||
|
@ -172,7 +172,7 @@ public final class IpRangeAggregatorBuilder
|
|||
return NAME;
|
||||
}
|
||||
|
||||
public IpRangeAggregatorBuilder keyed(boolean keyed) {
|
||||
public IpRangeAggregationBuilder keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
@ -187,7 +187,7 @@ public final class IpRangeAggregatorBuilder
|
|||
}
|
||||
|
||||
/** Add a new {@link Range} to this aggregation. */
|
||||
public IpRangeAggregatorBuilder addRange(Range range) {
|
||||
public IpRangeAggregationBuilder addRange(Range range) {
|
||||
ranges.add(range);
|
||||
return this;
|
||||
}
|
||||
|
@ -202,7 +202,7 @@ public final class IpRangeAggregatorBuilder
|
|||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addRange(String key, String from, String to) {
|
||||
public IpRangeAggregationBuilder addRange(String key, String from, String to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -210,7 +210,7 @@ public final class IpRangeAggregatorBuilder
|
|||
/**
|
||||
* Add a new range to this aggregation using the CIDR notation.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addMaskRange(String key, String mask) {
|
||||
public IpRangeAggregationBuilder addMaskRange(String key, String mask) {
|
||||
return addRange(new Range(key, mask));
|
||||
}
|
||||
|
||||
|
@ -218,7 +218,7 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addMaskRange(String, String)} but uses the mask itself as
|
||||
* a key.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addMaskRange(String mask) {
|
||||
public IpRangeAggregationBuilder addMaskRange(String mask) {
|
||||
return addRange(new Range(mask, mask));
|
||||
}
|
||||
|
||||
|
@ -226,7 +226,7 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addRange(String, String, String)} but the key will be
|
||||
* automatically generated.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addRange(String from, String to) {
|
||||
public IpRangeAggregationBuilder addRange(String from, String to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -234,7 +234,7 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addRange(String, String, String)} but there will be no
|
||||
* lower bound.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addUnboundedTo(String key, String to) {
|
||||
public IpRangeAggregationBuilder addUnboundedTo(String key, String to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -243,7 +243,7 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addUnboundedTo(String, String)} but the key will be
|
||||
* generated automatically.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addUnboundedTo(String to) {
|
||||
public IpRangeAggregationBuilder addUnboundedTo(String to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -251,13 +251,13 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addRange(String, String, String)} but there will be no
|
||||
* upper bound.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addUnboundedFrom(String key, String from) {
|
||||
public IpRangeAggregationBuilder addUnboundedFrom(String key, String from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IpRangeAggregatorBuilder script(Script script) {
|
||||
public IpRangeAggregationBuilder script(Script script) {
|
||||
throw new IllegalArgumentException("[ip_range] does not support scripts");
|
||||
}
|
||||
|
||||
|
@ -265,11 +265,11 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addUnboundedFrom(String, String)} but the key will be
|
||||
* generated automatically.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addUnboundedFrom(String from) {
|
||||
public IpRangeAggregationBuilder addUnboundedFrom(String from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
public IpRangeAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public IpRangeAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, TYPE, ValuesSourceType.BYTES, ValueType.IP);
|
||||
final int numRanges = in.readVInt();
|
||||
for (int i = 0; i < numRanges; ++i) {
|
||||
|
@ -323,7 +323,7 @@ public final class IpRangeAggregatorBuilder
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
IpRangeAggregatorBuilder that = (IpRangeAggregatorBuilder) obj;
|
||||
IpRangeAggregationBuilder that = (IpRangeAggregationBuilder) obj;
|
||||
return keyed == that.keyed
|
||||
&& ranges.equals(that.ranges);
|
||||
}
|
|
@ -30,10 +30,10 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.BytesValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder.Range;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder.Range;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
/**
|
||||
|
@ -48,10 +48,10 @@ public class IpRangeParser extends BytesValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregatorBuilder<ValuesSource.Bytes, ?> createFactory(
|
||||
protected ValuesSourceAggregationBuilder<ValuesSource.Bytes, ?> createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
IpRangeAggregatorBuilder range = new IpRangeAggregatorBuilder(aggregationName);
|
||||
IpRangeAggregationBuilder range = new IpRangeAggregationBuilder(aggregationName);
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<Range> ranges = (Iterable<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
if (otherOptions.containsKey(RangeAggregator.RANGES_FIELD)) {
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -36,25 +36,25 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource, DiversifiedAggregatorBuilder> {
|
||||
public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, DiversifiedAggregationBuilder> {
|
||||
public static final String NAME = "diversified_sampler";
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
public static final Type TYPE = new Type(NAME);
|
||||
|
||||
public static final int MAX_DOCS_PER_VALUE_DEFAULT = 1;
|
||||
|
||||
private int shardSize = SamplerAggregatorBuilder.DEFAULT_SHARD_SAMPLE_SIZE;
|
||||
private int shardSize = SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE;
|
||||
private int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT;
|
||||
private String executionHint = null;
|
||||
|
||||
public DiversifiedAggregatorBuilder(String name) {
|
||||
public DiversifiedAggregationBuilder(String name) {
|
||||
super(name, TYPE, ValuesSourceType.ANY, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public DiversifiedAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public DiversifiedAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, TYPE, ValuesSourceType.ANY, null);
|
||||
shardSize = in.readVInt();
|
||||
maxDocsPerValue = in.readVInt();
|
||||
|
@ -71,7 +71,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
/**
|
||||
* Set the max num docs to be returned from each shard.
|
||||
*/
|
||||
public DiversifiedAggregatorBuilder shardSize(int shardSize) {
|
||||
public DiversifiedAggregationBuilder shardSize(int shardSize) {
|
||||
if (shardSize < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
|
||||
|
@ -90,7 +90,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
/**
|
||||
* Set the max num docs to be returned per value.
|
||||
*/
|
||||
public DiversifiedAggregatorBuilder maxDocsPerValue(int maxDocsPerValue) {
|
||||
public DiversifiedAggregationBuilder maxDocsPerValue(int maxDocsPerValue) {
|
||||
if (maxDocsPerValue < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[maxDocsPerValue] must be greater than or equal to 0. Found [" + maxDocsPerValue + "] in [" + name + "]");
|
||||
|
@ -109,7 +109,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
/**
|
||||
* Set the execution hint.
|
||||
*/
|
||||
public DiversifiedAggregatorBuilder executionHint(String executionHint) {
|
||||
public DiversifiedAggregationBuilder executionHint(String executionHint) {
|
||||
this.executionHint = executionHint;
|
||||
return this;
|
||||
}
|
||||
|
@ -145,7 +145,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
DiversifiedAggregatorBuilder other = (DiversifiedAggregatorBuilder) obj;
|
||||
DiversifiedAggregationBuilder other = (DiversifiedAggregationBuilder) obj;
|
||||
return Objects.equals(shardSize, other.shardSize)
|
||||
&& Objects.equals(maxDocsPerValue, other.maxDocsPerValue)
|
||||
&& Objects.equals(executionHint, other.executionHint);
|
|
@ -38,9 +38,9 @@ public class DiversifiedSamplerParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected DiversifiedAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected DiversifiedAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DiversifiedAggregatorBuilder factory = new DiversifiedAggregatorBuilder(aggregationName);
|
||||
DiversifiedAggregationBuilder factory = new DiversifiedAggregationBuilder(aggregationName);
|
||||
Integer shardSize = (Integer) otherOptions.get(SamplerAggregator.SHARD_SIZE_FIELD);
|
||||
if (shardSize != null) {
|
||||
factory.shardSize(shardSize);
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
@ -34,7 +34,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregatorBuilder> {
|
||||
public class SamplerAggregationBuilder extends AggregationBuilder<SamplerAggregationBuilder> {
|
||||
public static final String NAME = InternalSampler.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -42,14 +42,14 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregato
|
|||
|
||||
private int shardSize = DEFAULT_SHARD_SAMPLE_SIZE;
|
||||
|
||||
public SamplerAggregatorBuilder(String name) {
|
||||
public SamplerAggregationBuilder(String name) {
|
||||
super(name, InternalSampler.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public SamplerAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public SamplerAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalSampler.TYPE);
|
||||
shardSize = in.readVInt();
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregato
|
|||
/**
|
||||
* Set the max num docs to be returned from each shard.
|
||||
*/
|
||||
public SamplerAggregatorBuilder shardSize(int shardSize) {
|
||||
public SamplerAggregationBuilder shardSize(int shardSize) {
|
||||
this.shardSize = shardSize;
|
||||
return this;
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregato
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static SamplerAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
public static SamplerAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Integer shardSize = null;
|
||||
|
@ -110,7 +110,7 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregato
|
|||
}
|
||||
}
|
||||
|
||||
SamplerAggregatorBuilder factory = new SamplerAggregatorBuilder(aggregationName);
|
||||
SamplerAggregationBuilder factory = new SamplerAggregationBuilder(aggregationName);
|
||||
if (shardSize != null) {
|
||||
factory.shardSize(shardSize);
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregato
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
SamplerAggregatorBuilder other = (SamplerAggregatorBuilder) obj;
|
||||
SamplerAggregationBuilder other = (SamplerAggregationBuilder) obj;
|
||||
return Objects.equals(shardSize, other.shardSize);
|
||||
}
|
||||
|
|
@ -29,12 +29,12 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHSc
|
|||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -45,7 +45,7 @@ import java.util.Objects;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource, SignificantTermsAggregatorBuilder> {
|
||||
public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, SignificantTermsAggregationBuilder> {
|
||||
public static final String NAME = SignificantStringTerms.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -62,14 +62,14 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC;
|
||||
|
||||
public SignificantTermsAggregatorBuilder(String name, ValueType valueType) {
|
||||
public SignificantTermsAggregationBuilder(String name, ValueType valueType) {
|
||||
super(name, SignificantStringTerms.TYPE, ValuesSourceType.ANY, valueType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a Stream.
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public SignificantTermsAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, SignificantStringTerms.TYPE, ValuesSourceType.ANY);
|
||||
bucketCountThresholds = new BucketCountThresholds(in);
|
||||
executionHint = in.readOptionalString();
|
||||
|
@ -100,7 +100,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
return bucketCountThresholds;
|
||||
}
|
||||
|
||||
public SignificantTermsAggregatorBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) {
|
||||
public SignificantTermsAggregationBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) {
|
||||
if (bucketCountThresholds == null) {
|
||||
throw new IllegalArgumentException("[bucketCountThresholds] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -112,7 +112,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
* Sets the size - indicating how many term buckets should be returned
|
||||
* (defaults to 10)
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder size(int size) {
|
||||
public SignificantTermsAggregationBuilder size(int size) {
|
||||
if (size < 0) {
|
||||
throw new IllegalArgumentException("[size] must be greater than or equal to 0. Found [" + size + "] in [" + name + "]");
|
||||
}
|
||||
|
@ -126,7 +126,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
* search execution). The higher the shard size is, the more accurate the
|
||||
* results are.
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder shardSize(int shardSize) {
|
||||
public SignificantTermsAggregationBuilder shardSize(int shardSize) {
|
||||
if (shardSize < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
|
||||
|
@ -139,7 +139,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
* Set the minimum document count terms should have in order to appear in
|
||||
* the response.
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder minDocCount(long minDocCount) {
|
||||
public SignificantTermsAggregationBuilder minDocCount(long minDocCount) {
|
||||
if (minDocCount < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]");
|
||||
|
@ -152,7 +152,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
* Set the minimum document count terms should have on the shard in order to
|
||||
* appear in the response.
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder shardMinDocCount(long shardMinDocCount) {
|
||||
public SignificantTermsAggregationBuilder shardMinDocCount(long shardMinDocCount) {
|
||||
if (shardMinDocCount < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]");
|
||||
|
@ -164,7 +164,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
/**
|
||||
* Expert: sets an execution hint to the aggregation.
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder executionHint(String executionHint) {
|
||||
public SignificantTermsAggregationBuilder executionHint(String executionHint) {
|
||||
this.executionHint = executionHint;
|
||||
return this;
|
||||
}
|
||||
|
@ -176,7 +176,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
return executionHint;
|
||||
}
|
||||
|
||||
public SignificantTermsAggregatorBuilder backgroundFilter(QueryBuilder backgroundFilter) {
|
||||
public SignificantTermsAggregationBuilder backgroundFilter(QueryBuilder backgroundFilter) {
|
||||
if (backgroundFilter == null) {
|
||||
throw new IllegalArgumentException("[backgroundFilter] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -191,7 +191,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
/**
|
||||
* Set terms to include and exclude from the aggregation results
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder includeExclude(IncludeExclude includeExclude) {
|
||||
public SignificantTermsAggregationBuilder includeExclude(IncludeExclude includeExclude) {
|
||||
this.includeExclude = includeExclude;
|
||||
return this;
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
return includeExclude;
|
||||
}
|
||||
|
||||
public SignificantTermsAggregatorBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) {
|
||||
public SignificantTermsAggregationBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) {
|
||||
if (significanceHeuristic == null) {
|
||||
throw new IllegalArgumentException("[significanceHeuristic] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -226,7 +226,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
bucketCountThresholds.toXContent(builder, params);
|
||||
if (executionHint != null) {
|
||||
builder.field(TermsAggregatorBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
|
||||
builder.field(TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
|
||||
}
|
||||
if (filterBuilder != null) {
|
||||
builder.field(BACKGROUND_FILTER.getPreferredName(), filterBuilder);
|
||||
|
@ -245,7 +245,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
SignificantTermsAggregatorBuilder other = (SignificantTermsAggregatorBuilder) obj;
|
||||
SignificantTermsAggregationBuilder other = (SignificantTermsAggregationBuilder) obj;
|
||||
return Objects.equals(bucketCountThresholds, other.bucketCountThresholds)
|
||||
&& Objects.equals(executionHint, other.executionHint)
|
||||
&& Objects.equals(filterBuilder, other.filterBuilder)
|
|
@ -178,7 +178,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
|
||||
numberOfAggregatorsCreated++;
|
||||
BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(this.bucketCountThresholds);
|
||||
if (bucketCountThresholds.getShardSize() == SignificantTermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) {
|
||||
if (bucketCountThresholds.getShardSize() == SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) {
|
||||
// The user has not made a shardSize selection .
|
||||
// Use default heuristic to avoid any wrong-ranking caused by
|
||||
// distributed counting
|
||||
|
@ -211,7 +211,14 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
}
|
||||
}
|
||||
assert execution != null;
|
||||
return execution.create(name, factories, valuesSource, config.format(), bucketCountThresholds, includeExclude, context, parent,
|
||||
|
||||
DocValueFormat format = config.format();
|
||||
if ((includeExclude != null) && (includeExclude.isRegexBased()) && format != DocValueFormat.RAW) {
|
||||
throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude "
|
||||
+ "settings as they can only be applied to string fields. Use an array of values for include/exclude clauses");
|
||||
}
|
||||
|
||||
return execution.create(name, factories, valuesSource, format, bucketCountThresholds, includeExclude, context, parent,
|
||||
significanceHeuristic, this, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
|
@ -227,7 +234,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
}
|
||||
IncludeExclude.LongFilter longFilter = null;
|
||||
if (includeExclude != null) {
|
||||
longFilter = includeExclude.convertToLongFilter();
|
||||
longFilter = includeExclude.convertToLongFilter(config.format());
|
||||
}
|
||||
return new SignificantLongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(),
|
||||
bucketCountThresholds, context, parent, significanceHeuristic, this, longFilter, pipelineAggregators,
|
||||
|
@ -248,7 +255,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
||||
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter();
|
||||
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format);
|
||||
return new SignificantStringTermsAggregator(name, factories, valuesSource, format, bucketCountThresholds, filter,
|
||||
aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
||||
}
|
||||
|
@ -262,7 +269,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
||||
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
|
||||
return new GlobalOrdinalsSignificantTermsAggregator(name, factories,
|
||||
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, format, bucketCountThresholds, filter,
|
||||
aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
||||
|
@ -277,7 +284,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
||||
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
|
||||
return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories,
|
||||
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, format, bucketCountThresholds, filter,
|
||||
aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
||||
|
|
|
@ -53,10 +53,11 @@ public class SignificantTermsParser extends AbstractTermsParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected SignificantTermsAggregatorBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint,
|
||||
protected SignificantTermsAggregationBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds,
|
||||
SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions) {
|
||||
SignificantTermsAggregatorBuilder factory = new SignificantTermsAggregatorBuilder(aggregationName, targetValueType);
|
||||
SignificantTermsAggregationBuilder factory = new SignificantTermsAggregationBuilder(aggregationName, targetValueType);
|
||||
if (bucketCountThresholds != null) {
|
||||
factory.bucketCountThresholds(bucketCountThresholds);
|
||||
}
|
||||
|
@ -66,11 +67,12 @@ public class SignificantTermsParser extends AbstractTermsParser {
|
|||
if (incExc != null) {
|
||||
factory.includeExclude(incExc);
|
||||
}
|
||||
QueryBuilder backgroundFilter = (QueryBuilder) otherOptions.get(SignificantTermsAggregatorBuilder.BACKGROUND_FILTER);
|
||||
QueryBuilder backgroundFilter = (QueryBuilder) otherOptions.get(SignificantTermsAggregationBuilder.BACKGROUND_FILTER);
|
||||
if (backgroundFilter != null) {
|
||||
factory.backgroundFilter(backgroundFilter);
|
||||
}
|
||||
SignificanceHeuristic significanceHeuristic = (SignificanceHeuristic) otherOptions.get(SignificantTermsAggregatorBuilder.HEURISTIC);
|
||||
SignificanceHeuristic significanceHeuristic =
|
||||
(SignificanceHeuristic) otherOptions.get(SignificantTermsAggregationBuilder.HEURISTIC);
|
||||
if (significanceHeuristic != null) {
|
||||
factory.significanceHeuristic(significanceHeuristic);
|
||||
}
|
||||
|
@ -85,12 +87,12 @@ public class SignificantTermsParser extends AbstractTermsParser {
|
|||
.lookupReturningNullIfNotFound(currentFieldName, parseFieldMatcher);
|
||||
if (significanceHeuristicParser != null) {
|
||||
SignificanceHeuristic significanceHeuristic = significanceHeuristicParser.parse(parser, parseFieldMatcher);
|
||||
otherOptions.put(SignificantTermsAggregatorBuilder.HEURISTIC, significanceHeuristic);
|
||||
otherOptions.put(SignificantTermsAggregationBuilder.HEURISTIC, significanceHeuristic);
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SignificantTermsAggregatorBuilder.BACKGROUND_FILTER)) {
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SignificantTermsAggregationBuilder.BACKGROUND_FILTER)) {
|
||||
QueryParseContext queryParseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher);
|
||||
QueryBuilder filter = queryParseContext.parseInnerQueryBuilder();
|
||||
otherOptions.put(SignificantTermsAggregatorBuilder.BACKGROUND_FILTER, filter);
|
||||
otherOptions.put(SignificantTermsAggregationBuilder.BACKGROUND_FILTER, filter);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -99,6 +101,6 @@ public class SignificantTermsParser extends AbstractTermsParser {
|
|||
|
||||
@Override
|
||||
protected BucketCountThresholds getDefaultBucketCountThresholds() {
|
||||
return new TermsAggregator.BucketCountThresholds(SignificantTermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
return new TermsAggregator.BucketCountThresholds(SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms<UnmappedS
|
|||
public UnmappedSignificantTerms(String name, int requiredSize, long minDocCount, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
//We pass zero for index/subset sizes because for the purpose of significant term analysis
|
||||
// we assume an unmapped index's size is irrelevant to the proceedings.
|
||||
super(0, 0, name, DocValueFormat.RAW, requiredSize, minDocCount, SignificantTermsAggregatorBuilder.DEFAULT_SIGNIFICANCE_HEURISTIC,
|
||||
super(0, 0, name, DocValueFormat.RAW, requiredSize, minDocCount, SignificantTermsAggregationBuilder.DEFAULT_SIGNIFICANCE_HEURISTIC,
|
||||
BUCKETS, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude
|
|||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -50,8 +50,10 @@ public abstract class AbstractTermsParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected final ValuesSourceAggregatorBuilder<ValuesSource, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
protected final ValuesSourceAggregationBuilder<ValuesSource, ?> createFactory(String aggregationName,
|
||||
ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType,
|
||||
Map<ParseField, Object> otherOptions) {
|
||||
BucketCountThresholds bucketCountThresholds = getDefaultBucketCountThresholds();
|
||||
Integer requiredSize = (Integer) otherOptions.get(REQUIRED_SIZE_FIELD_NAME);
|
||||
if (requiredSize != null && requiredSize != -1) {
|
||||
|
@ -77,10 +79,14 @@ public abstract class AbstractTermsParser extends AnyValuesSourceParser {
|
|||
otherOptions);
|
||||
}
|
||||
|
||||
protected abstract ValuesSourceAggregatorBuilder<ValuesSource, ?> doCreateFactory(String aggregationName,
|
||||
protected abstract ValuesSourceAggregationBuilder<ValuesSource, ?> doCreateFactory(String aggregationName,
|
||||
ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions);
|
||||
ValueType targetValueType,
|
||||
BucketCountThresholds bucketCountThresholds,
|
||||
SubAggCollectionMode collectMode,
|
||||
String executionHint,
|
||||
IncludeExclude incExc,
|
||||
Map<ParseField, Object> otherOptions);
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -38,7 +38,7 @@ import java.io.IOException;
|
|||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource, TermsAggregatorBuilder> {
|
||||
public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, TermsAggregationBuilder> {
|
||||
public static final String NAME = StringTerms.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -61,14 +61,14 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
private boolean showTermDocCountError = false;
|
||||
|
||||
public TermsAggregatorBuilder(String name, ValueType valueType) {
|
||||
public TermsAggregationBuilder(String name, ValueType valueType) {
|
||||
super(name, StringTerms.TYPE, ValuesSourceType.ANY, valueType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public TermsAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public TermsAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, StringTerms.TYPE, ValuesSourceType.ANY);
|
||||
bucketCountThresholds = new BucketCountThresholds(in);
|
||||
collectMode = SubAggCollectionMode.readFromStream(in);
|
||||
|
@ -97,7 +97,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
return bucketCountThresholds;
|
||||
}
|
||||
|
||||
public TermsAggregatorBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) {
|
||||
public TermsAggregationBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) {
|
||||
if (bucketCountThresholds == null) {
|
||||
throw new IllegalArgumentException("[bucketCountThresholds] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
* Sets the size - indicating how many term buckets should be returned
|
||||
* (defaults to 10)
|
||||
*/
|
||||
public TermsAggregatorBuilder size(int size) {
|
||||
public TermsAggregationBuilder size(int size) {
|
||||
if (size < 0) {
|
||||
throw new IllegalArgumentException("[size] must be greater than or equal to 0. Found [" + size + "] in [" + name + "]");
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
* search execution). The higher the shard size is, the more accurate the
|
||||
* results are.
|
||||
*/
|
||||
public TermsAggregatorBuilder shardSize(int shardSize) {
|
||||
public TermsAggregationBuilder shardSize(int shardSize) {
|
||||
if (shardSize < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
|
||||
|
@ -136,7 +136,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
* Set the minimum document count terms should have in order to appear in
|
||||
* the response.
|
||||
*/
|
||||
public TermsAggregatorBuilder minDocCount(long minDocCount) {
|
||||
public TermsAggregationBuilder minDocCount(long minDocCount) {
|
||||
if (minDocCount < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]");
|
||||
|
@ -149,7 +149,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
* Set the minimum document count terms should have on the shard in order to
|
||||
* appear in the response.
|
||||
*/
|
||||
public TermsAggregatorBuilder shardMinDocCount(long shardMinDocCount) {
|
||||
public TermsAggregationBuilder shardMinDocCount(long shardMinDocCount) {
|
||||
if (shardMinDocCount < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]");
|
||||
|
@ -161,7 +161,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Sets the order in which the buckets will be returned.
|
||||
*/
|
||||
public TermsAggregatorBuilder order(Terms.Order order) {
|
||||
public TermsAggregationBuilder order(Terms.Order order) {
|
||||
if (order == null) {
|
||||
throw new IllegalArgumentException("[order] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -172,7 +172,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Sets the order in which the buckets will be returned.
|
||||
*/
|
||||
public TermsAggregatorBuilder order(List<Terms.Order> orders) {
|
||||
public TermsAggregationBuilder order(List<Terms.Order> orders) {
|
||||
if (orders == null) {
|
||||
throw new IllegalArgumentException("[orders] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -190,7 +190,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Expert: sets an execution hint to the aggregation.
|
||||
*/
|
||||
public TermsAggregatorBuilder executionHint(String executionHint) {
|
||||
public TermsAggregationBuilder executionHint(String executionHint) {
|
||||
this.executionHint = executionHint;
|
||||
return this;
|
||||
}
|
||||
|
@ -205,7 +205,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Expert: set the collection mode.
|
||||
*/
|
||||
public TermsAggregatorBuilder collectMode(SubAggCollectionMode collectMode) {
|
||||
public TermsAggregationBuilder collectMode(SubAggCollectionMode collectMode) {
|
||||
if (collectMode == null) {
|
||||
throw new IllegalArgumentException("[collectMode] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -223,7 +223,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Set terms to include and exclude from the aggregation results
|
||||
*/
|
||||
public TermsAggregatorBuilder includeExclude(IncludeExclude includeExclude) {
|
||||
public TermsAggregationBuilder includeExclude(IncludeExclude includeExclude) {
|
||||
this.includeExclude = includeExclude;
|
||||
return this;
|
||||
}
|
||||
|
@ -245,7 +245,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Set whether doc count error will be return for individual terms
|
||||
*/
|
||||
public TermsAggregatorBuilder showTermDocCountError(boolean showTermDocCountError) {
|
||||
public TermsAggregationBuilder showTermDocCountError(boolean showTermDocCountError) {
|
||||
this.showTermDocCountError = showTermDocCountError;
|
||||
return this;
|
||||
}
|
||||
|
@ -262,7 +262,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
bucketCountThresholds.toXContent(builder, params);
|
||||
builder.field(SHOW_TERM_DOC_COUNT_ERROR.getPreferredName(), showTermDocCountError);
|
||||
if (executionHint != null) {
|
||||
builder.field(TermsAggregatorBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
|
||||
builder.field(TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
|
||||
}
|
||||
builder.field(ORDER_FIELD.getPreferredName());
|
||||
order.toXContent(builder, params);
|
||||
|
@ -280,7 +280,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
TermsAggregatorBuilder other = (TermsAggregatorBuilder) obj;
|
||||
TermsAggregationBuilder other = (TermsAggregationBuilder) obj;
|
||||
return Objects.equals(bucketCountThresholds, other.bucketCountThresholds)
|
||||
&& Objects.equals(collectMode, other.collectMode)
|
||||
&& Objects.equals(executionHint, other.executionHint)
|
|
@ -139,10 +139,10 @@ public abstract class TermsAggregator extends BucketsAggregator {
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(TermsAggregatorBuilder.REQUIRED_SIZE_FIELD_NAME.getPreferredName(), requiredSize);
|
||||
builder.field(TermsAggregatorBuilder.SHARD_SIZE_FIELD_NAME.getPreferredName(), shardSize);
|
||||
builder.field(TermsAggregatorBuilder.MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), minDocCount);
|
||||
builder.field(TermsAggregatorBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), shardMinDocCount);
|
||||
builder.field(TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME.getPreferredName(), requiredSize);
|
||||
builder.field(TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME.getPreferredName(), shardSize);
|
||||
builder.field(TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), minDocCount);
|
||||
builder.field(TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), shardMinDocCount);
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
}
|
||||
BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(this.bucketCountThresholds);
|
||||
if (!(order == InternalOrder.TERM_ASC || order == InternalOrder.TERM_DESC)
|
||||
&& bucketCountThresholds.getShardSize() == TermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) {
|
||||
&& bucketCountThresholds.getShardSize() == TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) {
|
||||
// The user has not made a shardSize selection. Use default
|
||||
// heuristic to avoid any wrong-ranking caused by distributed
|
||||
// counting
|
||||
|
@ -150,8 +150,13 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
}
|
||||
}
|
||||
}
|
||||
DocValueFormat format = config.format();
|
||||
if ((includeExclude != null) && (includeExclude.isRegexBased()) && format != DocValueFormat.RAW) {
|
||||
throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude "
|
||||
+ "settings as they can only be applied to string fields. Use an array of values for include/exclude clauses");
|
||||
}
|
||||
|
||||
return execution.create(name, factories, valuesSource, order, config.format(), bucketCountThresholds, includeExclude, context, parent,
|
||||
return execution.create(name, factories, valuesSource, order, format, bucketCountThresholds, includeExclude, context, parent,
|
||||
collectMode, showTermDocCountError, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
|
@ -171,7 +176,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
pipelineAggregators, metaData);
|
||||
}
|
||||
if (includeExclude != null) {
|
||||
longFilter = includeExclude.convertToLongFilter();
|
||||
longFilter = includeExclude.convertToLongFilter(config.format());
|
||||
}
|
||||
return new LongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), order,
|
||||
bucketCountThresholds, context, parent, collectMode, showTermDocCountError, longFilter, pipelineAggregators,
|
||||
|
@ -192,7 +197,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter();
|
||||
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format);
|
||||
return new StringTermsAggregator(name, factories, valuesSource, order, format, bucketCountThresholds, filter,
|
||||
aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData);
|
||||
}
|
||||
|
@ -211,7 +216,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
|
||||
return new GlobalOrdinalsStringTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, order,
|
||||
format, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError,
|
||||
pipelineAggregators, metaData);
|
||||
|
@ -231,7 +236,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
|
||||
return new GlobalOrdinalsStringTermsAggregator.WithHash(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource,
|
||||
order, format, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError,
|
||||
pipelineAggregators, metaData);
|
||||
|
|
|
@ -41,12 +41,13 @@ import java.util.Map;
|
|||
*/
|
||||
public class TermsParser extends AbstractTermsParser {
|
||||
@Override
|
||||
protected TermsAggregatorBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint,
|
||||
protected TermsAggregationBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds,
|
||||
SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions) {
|
||||
TermsAggregatorBuilder factory = new TermsAggregatorBuilder(aggregationName, targetValueType);
|
||||
TermsAggregationBuilder factory = new TermsAggregationBuilder(aggregationName, targetValueType);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<OrderElement> orderElements = (List<OrderElement>) otherOptions.get(TermsAggregatorBuilder.ORDER_FIELD);
|
||||
List<OrderElement> orderElements = (List<OrderElement>) otherOptions.get(TermsAggregationBuilder.ORDER_FIELD);
|
||||
if (orderElements != null) {
|
||||
List<Terms.Order> orders = new ArrayList<>(orderElements.size());
|
||||
for (OrderElement orderElement : orderElements) {
|
||||
|
@ -66,7 +67,7 @@ public class TermsParser extends AbstractTermsParser {
|
|||
if (incExc != null) {
|
||||
factory.includeExclude(incExc);
|
||||
}
|
||||
Boolean showTermDocCountError = (Boolean) otherOptions.get(TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR);
|
||||
Boolean showTermDocCountError = (Boolean) otherOptions.get(TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR);
|
||||
if (showTermDocCountError != null) {
|
||||
factory.showTermDocCountError(showTermDocCountError);
|
||||
}
|
||||
|
@ -77,12 +78,12 @@ public class TermsParser extends AbstractTermsParser {
|
|||
public boolean parseSpecial(String aggregationName, XContentParser parser, ParseFieldMatcher parseFieldMatcher, Token token,
|
||||
String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.ORDER_FIELD)) {
|
||||
otherOptions.put(TermsAggregatorBuilder.ORDER_FIELD, Collections.singletonList(parseOrderParam(aggregationName, parser)));
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) {
|
||||
otherOptions.put(TermsAggregationBuilder.ORDER_FIELD, Collections.singletonList(parseOrderParam(aggregationName, parser)));
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.ORDER_FIELD)) {
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) {
|
||||
List<OrderElement> orderElements = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
|
@ -93,12 +94,12 @@ public class TermsParser extends AbstractTermsParser {
|
|||
"Order elements must be of type object in [" + aggregationName + "] found token of type [" + token + "].");
|
||||
}
|
||||
}
|
||||
otherOptions.put(TermsAggregatorBuilder.ORDER_FIELD, orderElements);
|
||||
otherOptions.put(TermsAggregationBuilder.ORDER_FIELD, orderElements);
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR)) {
|
||||
otherOptions.put(TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR, parser.booleanValue());
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR)) {
|
||||
otherOptions.put(TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR, parser.booleanValue());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -158,7 +159,7 @@ public class TermsParser extends AbstractTermsParser {
|
|||
|
||||
@Override
|
||||
public TermsAggregator.BucketCountThresholds getDefaultBucketCountThresholds() {
|
||||
return new TermsAggregator.BucketCountThresholds(TermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
return new TermsAggregator.BucketCountThresholds(TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
}
|
||||
|
||||
static Terms.Order resolveOrder(String key, boolean asc) {
|
||||
|
|
|
@ -43,6 +43,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.WithOrdinals;
|
||||
|
||||
|
@ -135,7 +136,8 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
}
|
||||
|
||||
public static abstract class OrdinalsFilter {
|
||||
public abstract LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) throws IOException;
|
||||
public abstract LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource)
|
||||
throws IOException;
|
||||
|
||||
}
|
||||
|
||||
|
@ -152,7 +154,8 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
*
|
||||
*/
|
||||
@Override
|
||||
public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) throws IOException {
|
||||
public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource)
|
||||
throws IOException {
|
||||
LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getValueCount());
|
||||
TermsEnum globalTermsEnum;
|
||||
Terms globalTerms = new DocValuesTerms(globalOrdinals);
|
||||
|
@ -179,7 +182,7 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
@Override
|
||||
public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, WithOrdinals valueSource) throws IOException {
|
||||
LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getValueCount());
|
||||
if(includeValues!=null){
|
||||
if (includeValues != null) {
|
||||
for (BytesRef term : includeValues) {
|
||||
long ord = globalOrdinals.lookupTerm(term);
|
||||
if (ord >= 0) {
|
||||
|
@ -534,33 +537,46 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
return a;
|
||||
}
|
||||
|
||||
public StringFilter convertToStringFilter() {
|
||||
public StringFilter convertToStringFilter(DocValueFormat format) {
|
||||
if (isRegexBased()) {
|
||||
return new AutomatonBackedStringFilter(toAutomaton());
|
||||
}
|
||||
return new TermListBackedStringFilter(includeValues, excludeValues);
|
||||
return new TermListBackedStringFilter(parseForDocValues(includeValues, format), parseForDocValues(excludeValues, format));
|
||||
}
|
||||
|
||||
public OrdinalsFilter convertToOrdinalsFilter() {
|
||||
private static SortedSet<BytesRef> parseForDocValues(SortedSet<BytesRef> endUserFormattedValues, DocValueFormat format) {
|
||||
SortedSet<BytesRef> result = endUserFormattedValues;
|
||||
if (endUserFormattedValues != null) {
|
||||
if (format != DocValueFormat.RAW) {
|
||||
result = new TreeSet<>();
|
||||
for (BytesRef formattedVal : endUserFormattedValues) {
|
||||
result.add(format.parseBytesRef(formattedVal.utf8ToString()));
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format) {
|
||||
|
||||
if (isRegexBased()) {
|
||||
return new AutomatonBackedOrdinalsFilter(toAutomaton());
|
||||
}
|
||||
return new TermListBackedOrdinalsFilter(includeValues, excludeValues);
|
||||
return new TermListBackedOrdinalsFilter(parseForDocValues(includeValues, format), parseForDocValues(excludeValues, format));
|
||||
}
|
||||
|
||||
public LongFilter convertToLongFilter() {
|
||||
public LongFilter convertToLongFilter(DocValueFormat format) {
|
||||
int numValids = includeValues == null ? 0 : includeValues.size();
|
||||
int numInvalids = excludeValues == null ? 0 : excludeValues.size();
|
||||
LongFilter result = new LongFilter(numValids, numInvalids);
|
||||
if (includeValues != null) {
|
||||
for (BytesRef val : includeValues) {
|
||||
result.addAccept(Long.parseLong(val.utf8ToString()));
|
||||
result.addAccept(format.parseLong(val.utf8ToString(), false, null));
|
||||
}
|
||||
}
|
||||
if (excludeValues != null) {
|
||||
for (BytesRef val : excludeValues) {
|
||||
result.addReject(Long.parseLong(val.utf8ToString()));
|
||||
result.addReject(format.parseLong(val.utf8ToString(), false, null));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
@ -572,13 +588,13 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
LongFilter result = new LongFilter(numValids, numInvalids);
|
||||
if (includeValues != null) {
|
||||
for (BytesRef val : includeValues) {
|
||||
double dval=Double.parseDouble(val.utf8ToString());
|
||||
double dval = Double.parseDouble(val.utf8ToString());
|
||||
result.addAccept(NumericUtils.doubleToSortableLong(dval));
|
||||
}
|
||||
}
|
||||
if (excludeValues != null) {
|
||||
for (BytesRef val : excludeValues) {
|
||||
double dval=Double.parseDouble(val.utf8ToString());
|
||||
double dval = Double.parseDouble(val.utf8ToString());
|
||||
result.addReject(NumericUtils.doubleToSortableLong(dval));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class AvgAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource.Numeric, AvgAggregatorBuilder> {
|
||||
public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, AvgAggregationBuilder> {
|
||||
public static final String NAME = InternalAvg.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public AvgAggregatorBuilder(String name) {
|
||||
public AvgAggregationBuilder(String name) {
|
||||
super(name, InternalAvg.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public AvgAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public AvgAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalAvg.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
|
@ -44,8 +44,8 @@ public class AvgParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected AvgAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected AvgAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new AvgAggregatorBuilder(aggregationName);
|
||||
return new AvgAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,14 +28,16 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource, CardinalityAggregatorBuilder> {
|
||||
public final class CardinalityAggregationBuilder
|
||||
extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource, CardinalityAggregationBuilder> {
|
||||
|
||||
public static final String NAME = InternalCardinality.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -43,14 +45,14 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu
|
|||
|
||||
private Long precisionThreshold = null;
|
||||
|
||||
public CardinalityAggregatorBuilder(String name, ValueType targetValueType) {
|
||||
public CardinalityAggregationBuilder(String name, ValueType targetValueType) {
|
||||
super(name, InternalCardinality.TYPE, ValuesSourceType.ANY, targetValueType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public CardinalityAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public CardinalityAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalCardinality.TYPE, ValuesSourceType.ANY);
|
||||
if (in.readBoolean()) {
|
||||
precisionThreshold = in.readLong();
|
||||
|
@ -75,7 +77,7 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu
|
|||
* Set a precision threshold. Higher values improve accuracy but also
|
||||
* increase memory usage.
|
||||
*/
|
||||
public CardinalityAggregatorBuilder precisionThreshold(long precisionThreshold) {
|
||||
public CardinalityAggregationBuilder precisionThreshold(long precisionThreshold) {
|
||||
if (precisionThreshold < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[precisionThreshold] must be greater than or equal to 0. Found [" + precisionThreshold + "] in [" + name + "]");
|
||||
|
@ -122,7 +124,7 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
CardinalityAggregatorBuilder other = (CardinalityAggregatorBuilder) obj;
|
||||
CardinalityAggregationBuilder other = (CardinalityAggregationBuilder) obj;
|
||||
return Objects.equals(precisionThreshold, other.precisionThreshold);
|
||||
}
|
||||
|
|
@ -40,10 +40,10 @@ public class CardinalityParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected CardinalityAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected CardinalityAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
CardinalityAggregatorBuilder factory = new CardinalityAggregatorBuilder(aggregationName, targetValueType);
|
||||
Long precisionThreshold = (Long) otherOptions.get(CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD);
|
||||
CardinalityAggregationBuilder factory = new CardinalityAggregationBuilder(aggregationName, targetValueType);
|
||||
Long precisionThreshold = (Long) otherOptions.get(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD);
|
||||
if (precisionThreshold != null) {
|
||||
factory.precisionThreshold(precisionThreshold);
|
||||
}
|
||||
|
@ -54,8 +54,8 @@ public class CardinalityParser extends AnyValuesSourceParser {
|
|||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(currentFieldName, CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD)) {
|
||||
otherOptions.put(CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD, parser.longValue());
|
||||
if (parseFieldMatcher.match(currentFieldName, CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD)) {
|
||||
otherOptions.put(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD, parser.longValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, REHASH)) {
|
||||
// ignore
|
||||
|
|
|
@ -28,27 +28,27 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GeoBoundsAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource.GeoPoint, GeoBoundsAggregatorBuilder> {
|
||||
public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoBoundsAggregationBuilder> {
|
||||
public static final String NAME = InternalGeoBounds.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIED = new ParseField(NAME);
|
||||
|
||||
private boolean wrapLongitude = true;
|
||||
|
||||
public GeoBoundsAggregatorBuilder(String name) {
|
||||
public GeoBoundsAggregationBuilder(String name) {
|
||||
super(name, InternalGeoBounds.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public GeoBoundsAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public GeoBoundsAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalGeoBounds.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
wrapLongitude = in.readBoolean();
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ public class GeoBoundsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Va
|
|||
/**
|
||||
* Set whether to wrap longitudes. Defaults to true.
|
||||
*/
|
||||
public GeoBoundsAggregatorBuilder wrapLongitude(boolean wrapLongitude) {
|
||||
public GeoBoundsAggregationBuilder wrapLongitude(boolean wrapLongitude) {
|
||||
this.wrapLongitude = wrapLongitude;
|
||||
return this;
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ public class GeoBoundsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Va
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
GeoBoundsAggregatorBuilder other = (GeoBoundsAggregatorBuilder) obj;
|
||||
GeoBoundsAggregationBuilder other = (GeoBoundsAggregationBuilder) obj;
|
||||
return Objects.equals(wrapLongitude, other.wrapLongitude);
|
||||
}
|
||||
|
|
@ -37,9 +37,9 @@ public class GeoBoundsParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected GeoBoundsAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected GeoBoundsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
GeoBoundsAggregatorBuilder factory = new GeoBoundsAggregatorBuilder(aggregationName);
|
||||
GeoBoundsAggregationBuilder factory = new GeoBoundsAggregationBuilder(aggregationName);
|
||||
Boolean wrapLongitude = (Boolean) otherOptions.get(GeoBoundsAggregator.WRAP_LONGITUDE_FIELD);
|
||||
if (wrapLongitude != null) {
|
||||
factory.wrapLongitude(wrapLongitude);
|
||||
|
|
|
@ -28,25 +28,25 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class GeoCentroidAggregatorBuilder
|
||||
extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource.GeoPoint, GeoCentroidAggregatorBuilder> {
|
||||
public class GeoCentroidAggregationBuilder
|
||||
extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.GeoPoint, GeoCentroidAggregationBuilder> {
|
||||
public static final String NAME = InternalGeoCentroid.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public GeoCentroidAggregatorBuilder(String name) {
|
||||
public GeoCentroidAggregationBuilder(String name) {
|
||||
super(name, InternalGeoCentroid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public GeoCentroidAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public GeoCentroidAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalGeoCentroid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
}
|
||||
|
|
@ -46,8 +46,8 @@ public class GeoCentroidParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected GeoCentroidAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected GeoCentroidAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new GeoCentroidAggregatorBuilder(aggregationName);
|
||||
return new GeoCentroidAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class MaxAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource.Numeric, MaxAggregatorBuilder> {
|
||||
public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MaxAggregationBuilder> {
|
||||
public static final String NAME = InternalMax.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public MaxAggregatorBuilder(String name) {
|
||||
public MaxAggregationBuilder(String name) {
|
||||
super(name, InternalMax.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public MaxAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public MaxAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalMax.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
|
@ -44,8 +44,8 @@ public class MaxParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected MaxAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected MaxAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MaxAggregatorBuilder(aggregationName);
|
||||
return new MaxAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class MinAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource.Numeric, MinAggregatorBuilder> {
|
||||
public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MinAggregationBuilder> {
|
||||
public static final String NAME = InternalMin.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public MinAggregatorBuilder(String name) {
|
||||
public MinAggregationBuilder(String name) {
|
||||
super(name, InternalMin.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public MinAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public MinAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalMin.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
|
@ -45,8 +45,8 @@ public class MinParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected MinAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected MinAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MinAggregatorBuilder(aggregationName);
|
||||
return new MinAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
|
|||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -115,7 +115,7 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregatorBuilder<Numeric, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected ValuesSourceAggregationBuilder<Numeric, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
PercentilesMethod method = (PercentilesMethod) otherOptions.getOrDefault(METHOD_FIELD, PercentilesMethod.TDIGEST);
|
||||
|
||||
|
@ -126,7 +126,7 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse
|
|||
return buildFactory(aggregationName, cdfValues, method, compression, numberOfSignificantValueDigits, keyed);
|
||||
}
|
||||
|
||||
protected abstract ValuesSourceAggregatorBuilder<Numeric, ?> buildFactory(String aggregationName, double[] cdfValues,
|
||||
protected abstract ValuesSourceAggregationBuilder<Numeric, ?> buildFactory(String aggregationName, double[] cdfValues,
|
||||
PercentilesMethod method,
|
||||
Double compression,
|
||||
Integer numberOfSignificantValueDigits, Boolean keyed);
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder.LeafOnly;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder.LeafOnly;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -41,7 +41,7 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Numeric, PercentileRanksAggregatorBuilder> {
|
||||
public class PercentileRanksAggregationBuilder extends LeafOnly<ValuesSource.Numeric, PercentileRanksAggregationBuilder> {
|
||||
public static final String NAME = InternalTDigestPercentileRanks.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -51,14 +51,14 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
private double compression = 100.0;
|
||||
private boolean keyed = true;
|
||||
|
||||
public PercentileRanksAggregatorBuilder(String name) {
|
||||
public PercentileRanksAggregationBuilder(String name) {
|
||||
super(name, InternalTDigestPercentileRanks.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public PercentileRanksAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public PercentileRanksAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalTDigestPercentileRanks.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
values = in.readDoubleArray();
|
||||
keyed = in.readBoolean();
|
||||
|
@ -79,7 +79,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
/**
|
||||
* Set the values to compute percentiles from.
|
||||
*/
|
||||
public PercentileRanksAggregatorBuilder values(double... values) {
|
||||
public PercentileRanksAggregationBuilder values(double... values) {
|
||||
if (values == null) {
|
||||
throw new IllegalArgumentException("[values] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
/**
|
||||
* Set whether the XContent response should be keyed
|
||||
*/
|
||||
public PercentileRanksAggregatorBuilder keyed(boolean keyed) {
|
||||
public PercentileRanksAggregationBuilder keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
* Expert: set the number of significant digits in the values. Only relevant
|
||||
* when using {@link PercentilesMethod#HDR}.
|
||||
*/
|
||||
public PercentileRanksAggregatorBuilder numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
public PercentileRanksAggregationBuilder numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
if (numberOfSignificantValueDigits < 0 || numberOfSignificantValueDigits > 5) {
|
||||
throw new IllegalArgumentException("[numberOfSignificantValueDigits] must be between 0 and 5: [" + name + "]");
|
||||
}
|
||||
|
@ -135,7 +135,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
* Expert: set the compression. Higher values improve accuracy but also
|
||||
* memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}.
|
||||
*/
|
||||
public PercentileRanksAggregatorBuilder compression(double compression) {
|
||||
public PercentileRanksAggregationBuilder compression(double compression) {
|
||||
if (compression < 0.0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[compression] must be greater than or equal to 0. Found [" + compression + "] in [" + name + "]");
|
||||
|
@ -152,7 +152,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
return compression;
|
||||
}
|
||||
|
||||
public PercentileRanksAggregatorBuilder method(PercentilesMethod method) {
|
||||
public PercentileRanksAggregationBuilder method(PercentilesMethod method) {
|
||||
if (method == null) {
|
||||
throw new IllegalArgumentException("[method] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -195,7 +195,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
PercentileRanksAggregatorBuilder other = (PercentileRanksAggregatorBuilder) obj;
|
||||
PercentileRanksAggregationBuilder other = (PercentileRanksAggregationBuilder) obj;
|
||||
if (!Objects.equals(method, other.method)) {
|
||||
return false;
|
||||
}
|
|
@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.metrics.percentiles;
|
|||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -39,9 +39,10 @@ public class PercentileRanksParser extends AbstractPercentilesParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregatorBuilder<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) {
|
||||
PercentileRanksAggregatorBuilder factory = new PercentileRanksAggregatorBuilder(aggregationName);
|
||||
protected ValuesSourceAggregationBuilder<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits,
|
||||
Boolean keyed) {
|
||||
PercentileRanksAggregationBuilder factory = new PercentileRanksAggregationBuilder(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.values(keys);
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder.LeafOnly;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder.LeafOnly;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -41,7 +41,7 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric, PercentilesAggregatorBuilder> {
|
||||
public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric, PercentilesAggregationBuilder> {
|
||||
public static final String NAME = InternalTDigestPercentiles.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -51,14 +51,14 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
private double compression = 100.0;
|
||||
private boolean keyed = true;
|
||||
|
||||
public PercentilesAggregatorBuilder(String name) {
|
||||
public PercentilesAggregationBuilder(String name) {
|
||||
super(name, InternalTDigestPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public PercentilesAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public PercentilesAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalTDigestPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
percents = in.readDoubleArray();
|
||||
keyed = in.readBoolean();
|
||||
|
@ -79,7 +79,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
/**
|
||||
* Set the values to compute percentiles from.
|
||||
*/
|
||||
public PercentilesAggregatorBuilder percentiles(double... percents) {
|
||||
public PercentilesAggregationBuilder percentiles(double... percents) {
|
||||
if (percents == null) {
|
||||
throw new IllegalArgumentException("[percents] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
/**
|
||||
* Set whether the XContent response should be keyed
|
||||
*/
|
||||
public PercentilesAggregatorBuilder keyed(boolean keyed) {
|
||||
public PercentilesAggregationBuilder keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
* Expert: set the number of significant digits in the values. Only relevant
|
||||
* when using {@link PercentilesMethod#HDR}.
|
||||
*/
|
||||
public PercentilesAggregatorBuilder numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
public PercentilesAggregationBuilder numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
if (numberOfSignificantValueDigits < 0 || numberOfSignificantValueDigits > 5) {
|
||||
throw new IllegalArgumentException("[numberOfSignificantValueDigits] must be between 0 and 5: [" + name + "]");
|
||||
}
|
||||
|
@ -135,7 +135,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
* Expert: set the compression. Higher values improve accuracy but also
|
||||
* memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}.
|
||||
*/
|
||||
public PercentilesAggregatorBuilder compression(double compression) {
|
||||
public PercentilesAggregationBuilder compression(double compression) {
|
||||
if (compression < 0.0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[compression] must be greater than or equal to 0. Found [" + compression + "] in [" + name + "]");
|
||||
|
@ -152,7 +152,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
return compression;
|
||||
}
|
||||
|
||||
public PercentilesAggregatorBuilder method(PercentilesMethod method) {
|
||||
public PercentilesAggregationBuilder method(PercentilesMethod method) {
|
||||
if (method == null) {
|
||||
throw new IllegalArgumentException("[method] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -195,7 +195,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
PercentilesAggregatorBuilder other = (PercentilesAggregatorBuilder) obj;
|
||||
PercentilesAggregationBuilder other = (PercentilesAggregationBuilder) obj;
|
||||
if (!Objects.equals(method, other.method)) {
|
||||
return false;
|
||||
}
|
|
@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.metrics.percentiles;
|
|||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -41,9 +41,10 @@ public class PercentilesParser extends AbstractPercentilesParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregatorBuilder<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) {
|
||||
PercentilesAggregatorBuilder factory = new PercentilesAggregatorBuilder(aggregationName);
|
||||
protected ValuesSourceAggregationBuilder<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits,
|
||||
Boolean keyed) {
|
||||
PercentilesAggregationBuilder factory = new PercentilesAggregationBuilder(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.percentiles(keys);
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.index.query.QueryParseContext;
|
|||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptParameterParser;
|
||||
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
@ -40,7 +40,7 @@ import java.util.Map;
|
|||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder<ScriptedMetricAggregatorBuilder> {
|
||||
public class ScriptedMetricAggregationBuilder extends AggregationBuilder<ScriptedMetricAggregationBuilder> {
|
||||
|
||||
public static final String NAME = InternalScriptedMetric.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
@ -58,14 +58,14 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder<ScriptedM
|
|||
private Script reduceScript;
|
||||
private Map<String, Object> params;
|
||||
|
||||
public ScriptedMetricAggregatorBuilder(String name) {
|
||||
public ScriptedMetricAggregationBuilder(String name) {
|
||||
super(name, InternalScriptedMetric.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public ScriptedMetricAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public ScriptedMetricAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalScriptedMetric.TYPE);
|
||||
initScript = in.readOptionalWriteable(Script::new);
|
||||
mapScript = in.readOptionalWriteable(Script::new);
|
||||
|
@ -92,7 +92,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder<ScriptedM
|
|||
/**
|
||||
* Set the <tt>init</tt> script.
|
||||
*/
|
||||
public ScriptedMetricAggregatorBuilder initScript(Script initScript) {
|
||||
public ScriptedMetricAggregationBuilder initScript(Script initScript) {
|
||||
if (initScript == null) {
|
||||
throw new IllegalArgumentException("[initScript] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -110,7 +110,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder<ScriptedM
|
|||
/**
|
||||
* Set the <tt>map</tt> script.
|
||||
*/
|
||||
public ScriptedMetricAggregatorBuilder mapScript(Script mapScript) {
|
||||
public ScriptedMetricAggregationBuilder mapScript(Script mapScript) {
|
||||
if (mapScript == null) {
|
||||
throw new IllegalArgumentException("[mapScript] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder<ScriptedM
|
|||
/**
|
||||
* Set the <tt>combine</tt> script.
|
||||
*/
|
||||
public ScriptedMetricAggregatorBuilder combineScript(Script combineScript) {
|
||||
public ScriptedMetricAggregationBuilder combineScript(Script combineScript) {
|
||||
if (combineScript == null) {
|
||||
throw new IllegalArgumentException("[combineScript] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -146,7 +146,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder<ScriptedM
|
|||
/**
|
||||
* Set the <tt>reduce</tt> script.
|
||||
*/
|
||||
public ScriptedMetricAggregatorBuilder reduceScript(Script reduceScript) {
|
||||
public ScriptedMetricAggregationBuilder reduceScript(Script reduceScript) {
|
||||
if (reduceScript == null) {
|
||||
throw new IllegalArgumentException("[reduceScript] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -165,7 +165,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder<ScriptedM
|
|||
* Set parameters that will be available in the <tt>init</tt>,
|
||||
* <tt>map</tt> and <tt>combine</tt> phases.
|
||||
*/
|
||||
public ScriptedMetricAggregatorBuilder params(Map<String, Object> params) {
|
||||
public ScriptedMetricAggregationBuilder params(Map<String, Object> params) {
|
||||
if (params == null) {
|
||||
throw new IllegalArgumentException("[params] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -214,7 +214,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder<ScriptedM
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static ScriptedMetricAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
public static ScriptedMetricAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
Script initScript = null;
|
||||
Script mapScript = null;
|
||||
Script combineScript = null;
|
||||
|
@ -305,7 +305,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder<ScriptedM
|
|||
throw new ParsingException(parser.getTokenLocation(), "map_script field is required in [" + aggregationName + "].");
|
||||
}
|
||||
|
||||
ScriptedMetricAggregatorBuilder factory = new ScriptedMetricAggregatorBuilder(aggregationName);
|
||||
ScriptedMetricAggregationBuilder factory = new ScriptedMetricAggregationBuilder(aggregationName);
|
||||
if (initScript != null) {
|
||||
factory.initScript(initScript);
|
||||
}
|
||||
|
@ -336,7 +336,7 @@ public class ScriptedMetricAggregatorBuilder extends AggregatorBuilder<ScriptedM
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
ScriptedMetricAggregatorBuilder other = (ScriptedMetricAggregatorBuilder) obj;
|
||||
ScriptedMetricAggregationBuilder other = (ScriptedMetricAggregationBuilder) obj;
|
||||
return Objects.equals(initScript, other.initScript)
|
||||
&& Objects.equals(mapScript, other.mapScript)
|
||||
&& Objects.equals(combineScript, other.combineScript)
|
|
@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class StatsAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource.Numeric, StatsAggregatorBuilder> {
|
||||
public class StatsAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, StatsAggregationBuilder> {
|
||||
public static final String NAME = InternalStats.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public StatsAggregatorBuilder(String name) {
|
||||
public StatsAggregationBuilder(String name) {
|
||||
super(name, InternalStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public StatsAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public StatsAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
|
@ -44,8 +44,8 @@ public class StatsParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected StatsAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected StatsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new StatsAggregatorBuilder(aggregationName);
|
||||
return new StatsAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,28 +29,28 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ExtendedStatsAggregatorBuilder
|
||||
extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource.Numeric, ExtendedStatsAggregatorBuilder> {
|
||||
public class ExtendedStatsAggregationBuilder
|
||||
extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, ExtendedStatsAggregationBuilder> {
|
||||
public static final String NAME = InternalExtendedStats.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
private double sigma = 2.0;
|
||||
|
||||
public ExtendedStatsAggregatorBuilder(String name) {
|
||||
public ExtendedStatsAggregationBuilder(String name) {
|
||||
super(name, InternalExtendedStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public ExtendedStatsAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public ExtendedStatsAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalExtendedStats.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
sigma = in.readDouble();
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ public class ExtendedStatsAggregatorBuilder
|
|||
out.writeDouble(sigma);
|
||||
}
|
||||
|
||||
public ExtendedStatsAggregatorBuilder sigma(double sigma) {
|
||||
public ExtendedStatsAggregationBuilder sigma(double sigma) {
|
||||
if (sigma < 0.0) {
|
||||
throw new IllegalArgumentException("[sigma] must be greater than or equal to 0. Found [" + sigma + "] in [" + name + "]");
|
||||
}
|
||||
|
@ -91,7 +91,7 @@ public class ExtendedStatsAggregatorBuilder
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
ExtendedStatsAggregatorBuilder other = (ExtendedStatsAggregatorBuilder) obj;
|
||||
ExtendedStatsAggregationBuilder other = (ExtendedStatsAggregationBuilder) obj;
|
||||
return Objects.equals(sigma, other.sigma);
|
||||
}
|
||||
|
|
@ -50,9 +50,9 @@ public class ExtendedStatsParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ExtendedStatsAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected ExtendedStatsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
ExtendedStatsAggregatorBuilder factory = new ExtendedStatsAggregatorBuilder(aggregationName);
|
||||
ExtendedStatsAggregationBuilder factory = new ExtendedStatsAggregationBuilder(aggregationName);
|
||||
Double sigma = (Double) otherOptions.get(ExtendedStatsAggregator.SIGMA_FIELD);
|
||||
if (sigma != null) {
|
||||
factory.sigma(sigma);
|
||||
|
|
|
@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class SumAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource.Numeric, SumAggregatorBuilder> {
|
||||
public class SumAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, SumAggregationBuilder> {
|
||||
public static final String NAME = InternalSum.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public SumAggregatorBuilder(String name) {
|
||||
public SumAggregationBuilder(String name) {
|
||||
super(name, InternalSum.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public SumAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public SumAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalSum.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
|
@ -44,8 +44,8 @@ public class SumParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected SumAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected SumAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new SumAggregatorBuilder(aggregationName);
|
||||
return new SumAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.aggregations.AggregationInitializationException;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
@ -51,7 +51,7 @@ import java.util.List;
|
|||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
|
||||
public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregatorBuilder> {
|
||||
public class TopHitsAggregationBuilder extends AggregationBuilder<TopHitsAggregationBuilder> {
|
||||
public static final String NAME = InternalTopHits.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -67,14 +67,14 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
private Set<ScriptField> scriptFields;
|
||||
private FetchSourceContext fetchSourceContext;
|
||||
|
||||
public TopHitsAggregatorBuilder(String name) {
|
||||
public TopHitsAggregationBuilder(String name) {
|
||||
super(name, InternalTopHits.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public TopHitsAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalTopHits.TYPE);
|
||||
explain = in.readBoolean();
|
||||
fetchSourceContext = in.readOptionalStreamable(FetchSourceContext::new);
|
||||
|
@ -159,7 +159,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
/**
|
||||
* From index to start the search from. Defaults to <tt>0</tt>.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder from(int from) {
|
||||
public TopHitsAggregationBuilder from(int from) {
|
||||
if (from < 0) {
|
||||
throw new IllegalArgumentException("[from] must be greater than or equal to 0. Found [" + from + "] in [" + name + "]");
|
||||
}
|
||||
|
@ -177,7 +177,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
/**
|
||||
* The number of search hits to return. Defaults to <tt>10</tt>.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder size(int size) {
|
||||
public TopHitsAggregationBuilder size(int size) {
|
||||
if (size < 0) {
|
||||
throw new IllegalArgumentException("[size] must be greater than or equal to 0. Found [" + size + "] in [" + name + "]");
|
||||
}
|
||||
|
@ -200,7 +200,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* @param order
|
||||
* The sort ordering
|
||||
*/
|
||||
public TopHitsAggregatorBuilder sort(String name, SortOrder order) {
|
||||
public TopHitsAggregationBuilder sort(String name, SortOrder order) {
|
||||
if (name == null) {
|
||||
throw new IllegalArgumentException("sort [name] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -220,7 +220,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* @param name
|
||||
* The name of the field to sort by
|
||||
*/
|
||||
public TopHitsAggregatorBuilder sort(String name) {
|
||||
public TopHitsAggregationBuilder sort(String name) {
|
||||
if (name == null) {
|
||||
throw new IllegalArgumentException("sort [name] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -234,7 +234,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
/**
|
||||
* Adds a sort builder.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder sort(SortBuilder<?> sort) {
|
||||
public TopHitsAggregationBuilder sort(SortBuilder<?> sort) {
|
||||
if (sort == null) {
|
||||
throw new IllegalArgumentException("[sort] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -248,7 +248,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
/**
|
||||
* Adds a sort builder.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder sorts(List<SortBuilder<?>> sorts) {
|
||||
public TopHitsAggregationBuilder sorts(List<SortBuilder<?>> sorts) {
|
||||
if (sorts == null) {
|
||||
throw new IllegalArgumentException("[sorts] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -271,7 +271,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
/**
|
||||
* Adds highlight to perform as part of the search.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder highlighter(HighlightBuilder highlightBuilder) {
|
||||
public TopHitsAggregationBuilder highlighter(HighlightBuilder highlightBuilder) {
|
||||
if (highlightBuilder == null) {
|
||||
throw new IllegalArgumentException("[highlightBuilder] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -290,7 +290,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* Indicates whether the response should contain the stored _source for
|
||||
* every hit
|
||||
*/
|
||||
public TopHitsAggregatorBuilder fetchSource(boolean fetch) {
|
||||
public TopHitsAggregationBuilder fetchSource(boolean fetch) {
|
||||
if (this.fetchSourceContext == null) {
|
||||
this.fetchSourceContext = new FetchSourceContext(fetch);
|
||||
} else {
|
||||
|
@ -311,7 +311,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* An optional exclude (optionally wildcarded) pattern to
|
||||
* filter the returned _source
|
||||
*/
|
||||
public TopHitsAggregatorBuilder fetchSource(@Nullable String include, @Nullable String exclude) {
|
||||
public TopHitsAggregationBuilder fetchSource(@Nullable String include, @Nullable String exclude) {
|
||||
fetchSource(include == null ? Strings.EMPTY_ARRAY : new String[] { include },
|
||||
exclude == null ? Strings.EMPTY_ARRAY : new String[] { exclude });
|
||||
return this;
|
||||
|
@ -329,7 +329,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* An optional list of exclude (optionally wildcarded)
|
||||
* pattern to filter the returned _source
|
||||
*/
|
||||
public TopHitsAggregatorBuilder fetchSource(@Nullable String[] includes, @Nullable String[] excludes) {
|
||||
public TopHitsAggregationBuilder fetchSource(@Nullable String[] includes, @Nullable String[] excludes) {
|
||||
fetchSourceContext = new FetchSourceContext(includes, excludes);
|
||||
return this;
|
||||
}
|
||||
|
@ -337,7 +337,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
/**
|
||||
* Indicate how the _source should be fetched.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder fetchSource(@Nullable FetchSourceContext fetchSourceContext) {
|
||||
public TopHitsAggregationBuilder fetchSource(@Nullable FetchSourceContext fetchSourceContext) {
|
||||
if (fetchSourceContext == null) {
|
||||
throw new IllegalArgumentException("[fetchSourceContext] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -358,7 +358,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* the search request. If none are specified, the source of the document
|
||||
* will be return.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder field(String field) {
|
||||
public TopHitsAggregationBuilder field(String field) {
|
||||
if (field == null) {
|
||||
throw new IllegalArgumentException("[field] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -373,7 +373,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* Sets the fields to load and return as part of the search request. If
|
||||
* none are specified, the source of the document will be returned.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder fields(List<String> fields) {
|
||||
public TopHitsAggregationBuilder fields(List<String> fields) {
|
||||
if (fields == null) {
|
||||
throw new IllegalArgumentException("[fields] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -385,7 +385,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* Sets no fields to be loaded, resulting in only id and type to be
|
||||
* returned per field.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder noFields() {
|
||||
public TopHitsAggregationBuilder noFields() {
|
||||
this.fieldNames = Collections.emptyList();
|
||||
return this;
|
||||
}
|
||||
|
@ -401,7 +401,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* Adds a field to load from the field data cache and return as part of
|
||||
* the search request.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder fieldDataField(String fieldDataField) {
|
||||
public TopHitsAggregationBuilder fieldDataField(String fieldDataField) {
|
||||
if (fieldDataField == null) {
|
||||
throw new IllegalArgumentException("[fieldDataField] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -416,7 +416,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* Adds fields to load from the field data cache and return as part of
|
||||
* the search request.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder fieldDataFields(List<String> fieldDataFields) {
|
||||
public TopHitsAggregationBuilder fieldDataFields(List<String> fieldDataFields) {
|
||||
if (fieldDataFields == null) {
|
||||
throw new IllegalArgumentException("[fieldDataFields] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -442,7 +442,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* @param script
|
||||
* The script
|
||||
*/
|
||||
public TopHitsAggregatorBuilder scriptField(String name, Script script) {
|
||||
public TopHitsAggregationBuilder scriptField(String name, Script script) {
|
||||
if (name == null) {
|
||||
throw new IllegalArgumentException("scriptField [name] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -461,7 +461,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* @param script
|
||||
* The script
|
||||
*/
|
||||
public TopHitsAggregatorBuilder scriptField(String name, Script script, boolean ignoreFailure) {
|
||||
public TopHitsAggregationBuilder scriptField(String name, Script script, boolean ignoreFailure) {
|
||||
if (name == null) {
|
||||
throw new IllegalArgumentException("scriptField [name] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -475,7 +475,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
return this;
|
||||
}
|
||||
|
||||
public TopHitsAggregatorBuilder scriptFields(List<ScriptField> scriptFields) {
|
||||
public TopHitsAggregationBuilder scriptFields(List<ScriptField> scriptFields) {
|
||||
if (scriptFields == null) {
|
||||
throw new IllegalArgumentException("[scriptFields] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -497,7 +497,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* Should each {@link org.elasticsearch.search.SearchHit} be returned
|
||||
* with an explanation of the hit (ranking).
|
||||
*/
|
||||
public TopHitsAggregatorBuilder explain(boolean explain) {
|
||||
public TopHitsAggregationBuilder explain(boolean explain) {
|
||||
this.explain = explain;
|
||||
return this;
|
||||
}
|
||||
|
@ -514,7 +514,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* Should each {@link org.elasticsearch.search.SearchHit} be returned
|
||||
* with a version associated with it.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder version(boolean version) {
|
||||
public TopHitsAggregationBuilder version(boolean version) {
|
||||
this.version = version;
|
||||
return this;
|
||||
}
|
||||
|
@ -531,7 +531,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
* Applies when sorting, and controls if scores will be tracked as well.
|
||||
* Defaults to <tt>false</tt>.
|
||||
*/
|
||||
public TopHitsAggregatorBuilder trackScores(boolean trackScores) {
|
||||
public TopHitsAggregationBuilder trackScores(boolean trackScores) {
|
||||
this.trackScores = trackScores;
|
||||
return this;
|
||||
}
|
||||
|
@ -544,7 +544,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
}
|
||||
|
||||
@Override
|
||||
public TopHitsAggregatorBuilder subAggregations(Builder subFactories) {
|
||||
public TopHitsAggregationBuilder subAggregations(Builder subFactories) {
|
||||
throw new AggregationInitializationException("Aggregator [" + name + "] of type [" + type + "] cannot accept sub-aggregations");
|
||||
}
|
||||
|
||||
|
@ -607,8 +607,8 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static TopHitsAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
TopHitsAggregatorBuilder factory = new TopHitsAggregatorBuilder(aggregationName);
|
||||
public static TopHitsAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
TopHitsAggregationBuilder factory = new TopHitsAggregationBuilder(aggregationName);
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
XContentParser parser = context.parser();
|
||||
|
@ -741,7 +741,7 @@ public class TopHitsAggregatorBuilder extends AggregatorBuilder<TopHitsAggregato
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
TopHitsAggregatorBuilder other = (TopHitsAggregatorBuilder) obj;
|
||||
TopHitsAggregationBuilder other = (TopHitsAggregationBuilder) obj;
|
||||
return Objects.equals(explain, other.explain)
|
||||
&& Objects.equals(fetchSourceContext, other.fetchSourceContext)
|
||||
&& Objects.equals(fieldDataFields, other.fieldDataFields)
|
|
@ -28,24 +28,24 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ValueCountAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource, ValueCountAggregatorBuilder> {
|
||||
public class ValueCountAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource, ValueCountAggregationBuilder> {
|
||||
public static final String NAME = InternalValueCount.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public ValueCountAggregatorBuilder(String name, ValueType targetValueType) {
|
||||
public ValueCountAggregationBuilder(String name, ValueType targetValueType) {
|
||||
super(name, InternalValueCount.TYPE, ValuesSourceType.ANY, targetValueType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public ValueCountAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public ValueCountAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalValueCount.TYPE, ValuesSourceType.ANY);
|
||||
}
|
||||
|
|
@ -24,7 +24,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -46,8 +46,8 @@ public class ValueCountParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregatorBuilder<ValuesSource, ValueCountAggregatorBuilder> createFactory(
|
||||
protected ValuesSourceAggregationBuilder<ValuesSource, ValueCountAggregationBuilder> createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new ValueCountAggregatorBuilder(aggregationName, targetValueType);
|
||||
return new ValueCountAggregationBuilder(aggregationName, targetValueType);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -84,7 +84,7 @@ public abstract class AbstractValuesSourceParser<VS extends ValuesSource>
|
|||
}
|
||||
|
||||
@Override
|
||||
public final ValuesSourceAggregatorBuilder<VS, ?> parse(String aggregationName, QueryParseContext context)
|
||||
public final ValuesSourceAggregationBuilder<VS, ?> parse(String aggregationName, QueryParseContext context)
|
||||
throws IOException {
|
||||
|
||||
XContentParser parser = context.parser();
|
||||
|
@ -147,7 +147,7 @@ public abstract class AbstractValuesSourceParser<VS extends ValuesSource>
|
|||
}
|
||||
}
|
||||
|
||||
ValuesSourceAggregatorBuilder<VS, ?> factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType,
|
||||
ValuesSourceAggregationBuilder<VS, ?> factory = createFactory(aggregationName, this.valuesSourceType, this.targetValueType,
|
||||
otherOptions);
|
||||
if (field != null) {
|
||||
factory.field(field);
|
||||
|
@ -171,7 +171,7 @@ public abstract class AbstractValuesSourceParser<VS extends ValuesSource>
|
|||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link ValuesSourceAggregatorBuilder} from the information
|
||||
* Creates a {@link ValuesSourceAggregationBuilder} from the information
|
||||
* gathered by the subclass. Options parsed in
|
||||
* {@link AbstractValuesSourceParser} itself will be added to the factory
|
||||
* after it has been returned by this method.
|
||||
|
@ -189,7 +189,7 @@ public abstract class AbstractValuesSourceParser<VS extends ValuesSource>
|
|||
* method
|
||||
* @return the created factory
|
||||
*/
|
||||
protected abstract ValuesSourceAggregatorBuilder<VS, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
protected abstract ValuesSourceAggregationBuilder<VS, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions);
|
||||
|
||||
/**
|
||||
|
|
|
@ -31,7 +31,7 @@ import org.elasticsearch.script.ScriptContext;
|
|||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.AggregationInitializationException;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
|
@ -46,11 +46,11 @@ import java.util.Objects;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public abstract class ValuesSourceAggregatorBuilder<VS extends ValuesSource, AB extends ValuesSourceAggregatorBuilder<VS, AB>>
|
||||
extends AggregatorBuilder<AB> {
|
||||
public abstract class ValuesSourceAggregationBuilder<VS extends ValuesSource, AB extends ValuesSourceAggregationBuilder<VS, AB>>
|
||||
extends AggregationBuilder<AB> {
|
||||
|
||||
public static abstract class LeafOnly<VS extends ValuesSource, AB extends ValuesSourceAggregatorBuilder<VS, AB>>
|
||||
extends ValuesSourceAggregatorBuilder<VS, AB> {
|
||||
public static abstract class LeafOnly<VS extends ValuesSource, AB extends ValuesSourceAggregationBuilder<VS, AB>>
|
||||
extends ValuesSourceAggregationBuilder<VS, AB> {
|
||||
|
||||
protected LeafOnly(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) {
|
||||
super(name, type, valuesSourceType, targetValueType);
|
||||
|
@ -87,7 +87,7 @@ public abstract class ValuesSourceAggregatorBuilder<VS extends ValuesSource, AB
|
|||
private DateTimeZone timeZone = null;
|
||||
protected ValuesSourceConfig<VS> config;
|
||||
|
||||
protected ValuesSourceAggregatorBuilder(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) {
|
||||
protected ValuesSourceAggregationBuilder(String name, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType) {
|
||||
super(name, type);
|
||||
if (valuesSourceType == null) {
|
||||
throw new IllegalArgumentException("[valuesSourceType] must not be null: [" + name + "]");
|
||||
|
@ -99,7 +99,7 @@ public abstract class ValuesSourceAggregatorBuilder<VS extends ValuesSource, AB
|
|||
/**
|
||||
* Read an aggregation from a stream that does not serialize its targetValueType. This should be used by most subclasses.
|
||||
*/
|
||||
protected ValuesSourceAggregatorBuilder(StreamInput in, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType)
|
||||
protected ValuesSourceAggregationBuilder(StreamInput in, Type type, ValuesSourceType valuesSourceType, ValueType targetValueType)
|
||||
throws IOException {
|
||||
super(in, type);
|
||||
assert false == serializeTargetValueType() : "Wrong read constructor called for subclass that provides its targetValueType";
|
||||
|
@ -112,7 +112,7 @@ public abstract class ValuesSourceAggregatorBuilder<VS extends ValuesSource, AB
|
|||
* Read an aggregation from a stream that serializes its targetValueType. This should only be used by subclasses that override
|
||||
* {@link #serializeTargetValueType()} to return true.
|
||||
*/
|
||||
protected ValuesSourceAggregatorBuilder(StreamInput in, Type type, ValuesSourceType valuesSourceType) throws IOException {
|
||||
protected ValuesSourceAggregationBuilder(StreamInput in, Type type, ValuesSourceType valuesSourceType) throws IOException {
|
||||
super(in, type);
|
||||
assert serializeTargetValueType() : "Wrong read constructor called for subclass that serializes its targetValueType";
|
||||
this.valuesSourceType = valuesSourceType;
|
||||
|
@ -430,7 +430,7 @@ public abstract class ValuesSourceAggregatorBuilder<VS extends ValuesSource, AB
|
|||
|
||||
@Override
|
||||
protected final boolean doEquals(Object obj) {
|
||||
ValuesSourceAggregatorBuilder<?, ?> other = (ValuesSourceAggregatorBuilder<?, ?>) obj;
|
||||
ValuesSourceAggregationBuilder<?, ?> other = (ValuesSourceAggregationBuilder<?, ?>) obj;
|
||||
if (!Objects.equals(field, other.field))
|
||||
return false;
|
||||
if (!Objects.equals(format, other.format))
|
|
@ -41,7 +41,7 @@ import org.elasticsearch.index.query.QueryBuilder;
|
|||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
|
@ -600,7 +600,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
/**
|
||||
* Add an aggregation to perform as part of the search.
|
||||
*/
|
||||
public SearchSourceBuilder aggregation(AggregatorBuilder<?> aggregation) {
|
||||
public SearchSourceBuilder aggregation(AggregationBuilder<?> aggregation) {
|
||||
if (aggregations == null) {
|
||||
aggregations = AggregatorFactories.builder();
|
||||
}
|
||||
|
@ -1033,6 +1033,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
suggestBuilder = SuggestBuilder.fromXContent(context, suggesters);
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
|
||||
sorts = new ArrayList<>(SortBuilder.fromXContent(context));
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, RESCORE_FIELD)) {
|
||||
rescoreBuilders = new ArrayList<>();
|
||||
rescoreBuilders.add(RescoreBuilder.parseFromXContent(context));
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, EXT_FIELD)) {
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().copyCurrentStructure(parser);
|
||||
ext = xContentBuilder.bytes();
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.cluster.metadata;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
public class MetaDataMappingServiceTests extends ESSingleNodeTestCase {
|
||||
|
||||
// Tests _parent meta field logic, because part of the validation is in MetaDataMappingService
|
||||
public void testAddChildTypePointingToAlreadyExistingType() throws Exception {
|
||||
createIndex("test", Settings.EMPTY, "type", "field", "type=keyword");
|
||||
|
||||
// Shouldn't be able the add the _parent field pointing to an already existing type, which isn't a parent type
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> client().admin()
|
||||
.indices()
|
||||
.preparePutMapping("test")
|
||||
.setType("child")
|
||||
.setSource("_parent", "type=type")
|
||||
.get());
|
||||
assertThat(e.getMessage(),
|
||||
equalTo("can't add a _parent field that points to an already existing type, that isn't already a parent"));
|
||||
}
|
||||
|
||||
// Tests _parent meta field logic, because part of the validation is in MetaDataMappingService
|
||||
public void testAddExtraChildTypePointingToAlreadyParentExistingType() throws Exception {
|
||||
IndexService indexService = createIndex("test", client().admin().indices().prepareCreate("test")
|
||||
.addMapping("parent")
|
||||
.addMapping("child1", "_parent", "type=parent")
|
||||
);
|
||||
|
||||
// adding the extra child type that points to an already existing parent type is allowed:
|
||||
client().admin()
|
||||
.indices()
|
||||
.preparePutMapping("test")
|
||||
.setType("child2")
|
||||
.setSource("_parent", "type=parent")
|
||||
.get();
|
||||
|
||||
DocumentMapper documentMapper = indexService.mapperService().documentMapper("child2");
|
||||
assertThat(documentMapper.parentFieldMapper().type(), equalTo("parent"));
|
||||
assertThat(documentMapper.parentFieldMapper().active(), is(true));
|
||||
}
|
||||
|
||||
}
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.common.blobstore.fs.FsBlobStore;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.test.ESBlobStoreContainerTestCase;
|
||||
import org.elasticsearch.repositories.ESBlobStoreContainerTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.common.blobstore.fs.FsBlobStore;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.test.ESBlobStoreTestCase;
|
||||
import org.elasticsearch.repositories.ESBlobStoreTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Path;
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.rounding;
|
||||
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.elasticsearch.common.rounding.DateTimeUnit.WEEK_OF_WEEKYEAR;
|
||||
import static org.elasticsearch.common.rounding.DateTimeUnit.YEAR_OF_CENTURY;
|
||||
import static org.elasticsearch.common.rounding.DateTimeUnit.QUARTER;
|
||||
import static org.elasticsearch.common.rounding.DateTimeUnit.MONTH_OF_YEAR;
|
||||
import static org.elasticsearch.common.rounding.DateTimeUnit.DAY_OF_MONTH;
|
||||
import static org.elasticsearch.common.rounding.DateTimeUnit.HOUR_OF_DAY;
|
||||
import static org.elasticsearch.common.rounding.DateTimeUnit.MINUTES_OF_HOUR;
|
||||
import static org.elasticsearch.common.rounding.DateTimeUnit.SECOND_OF_MINUTE;
|
||||
|
||||
public class DateTimeUnitTests extends ESTestCase {
|
||||
|
||||
/**
|
||||
* test that we don't accidentally change enum ids
|
||||
*/
|
||||
public void testEnumIds() {
|
||||
assertEquals(1, WEEK_OF_WEEKYEAR.id());
|
||||
assertEquals(WEEK_OF_WEEKYEAR, DateTimeUnit.resolve((byte) 1));
|
||||
|
||||
assertEquals(2, YEAR_OF_CENTURY.id());
|
||||
assertEquals(YEAR_OF_CENTURY, DateTimeUnit.resolve((byte) 2));
|
||||
|
||||
assertEquals(3, QUARTER.id());
|
||||
assertEquals(QUARTER, DateTimeUnit.resolve((byte) 3));
|
||||
|
||||
assertEquals(4, MONTH_OF_YEAR.id());
|
||||
assertEquals(MONTH_OF_YEAR, DateTimeUnit.resolve((byte) 4));
|
||||
|
||||
assertEquals(5, DAY_OF_MONTH.id());
|
||||
assertEquals(DAY_OF_MONTH, DateTimeUnit.resolve((byte) 5));
|
||||
|
||||
assertEquals(6, HOUR_OF_DAY.id());
|
||||
assertEquals(HOUR_OF_DAY, DateTimeUnit.resolve((byte) 6));
|
||||
|
||||
assertEquals(7, MINUTES_OF_HOUR.id());
|
||||
assertEquals(MINUTES_OF_HOUR, DateTimeUnit.resolve((byte) 7));
|
||||
|
||||
assertEquals(8, SECOND_OF_MINUTE.id());
|
||||
assertEquals(SECOND_OF_MINUTE, DateTimeUnit.resolve((byte) 8));
|
||||
}
|
||||
|
||||
public void testIsDayOrLonger() {
|
||||
for (DateTimeUnit unit : DateTimeUnit.values()) {
|
||||
if (DateTimeUnit.isDayOrLonger(unit)) {
|
||||
assertTrue(unit == DAY_OF_MONTH ||
|
||||
unit == MONTH_OF_YEAR ||
|
||||
unit == QUARTER ||
|
||||
unit == YEAR_OF_CENTURY ||
|
||||
unit == WEEK_OF_WEEKYEAR);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -25,6 +25,7 @@ import org.joda.time.DateTime;
|
|||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.format.ISODateTimeFormat;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -147,21 +148,37 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
|||
Rounding tzRounding;
|
||||
// testing savings to non savings switch
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
|
||||
assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forID("CET"))),
|
||||
equalTo(time("2014-10-26T01:00:00", DateTimeZone.forID("CET"))));
|
||||
assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forOffsetHours(2))), // CEST = UTC+2
|
||||
equalTo(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2))),
|
||||
equalTo(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2))),
|
||||
equalTo(time("2014-10-26T03:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("CET")).build();
|
||||
assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forID("CET"))),
|
||||
equalTo(time("2014-10-26T01:00:00", DateTimeZone.forID("CET"))));
|
||||
assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forOffsetHours(2))), // CEST = UTC+2
|
||||
equalTo(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2))),
|
||||
equalTo(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2))),
|
||||
equalTo(time("2014-10-26T03:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
|
||||
// testing non savings to savings switch
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
|
||||
assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forID("CET"))),
|
||||
equalTo(time("2014-03-30T01:00:00", DateTimeZone.forID("CET"))));
|
||||
assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forOffsetHours(1))), // CET = UTC+1
|
||||
equalTo(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1))),
|
||||
equalTo(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1))),
|
||||
equalTo(time("2014-03-30T03:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("CET")).build();
|
||||
assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forID("CET"))),
|
||||
equalTo(time("2014-03-30T01:00:00", DateTimeZone.forID("CET"))));
|
||||
assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forOffsetHours(1))), // CET = UTC+1
|
||||
equalTo(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1))),
|
||||
equalTo(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1))),
|
||||
equalTo(time("2014-03-30T03:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
|
||||
// testing non savings to savings switch (America/Chicago)
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
|
||||
|
@ -210,6 +227,31 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that nextRoundingValue() for hour rounding (and smaller) is equally spaced (see #18326)
|
||||
* Start at a random date in a random time zone, then find the next zone offset transition (if any).
|
||||
* From there, check that when we advance by using rounding#nextRoundingValue(), we always advance by the same
|
||||
* amount of milliseconds.
|
||||
*/
|
||||
public void testSubHourNextRoundingEquallySpaced() {
|
||||
String timeZone = randomFrom(new ArrayList<>(DateTimeZone.getAvailableIDs()));
|
||||
DateTimeUnit unit = randomFrom(new DateTimeUnit[] { DateTimeUnit.HOUR_OF_DAY, DateTimeUnit.MINUTES_OF_HOUR,
|
||||
DateTimeUnit.SECOND_OF_MINUTE });
|
||||
DateTimeZone tz = DateTimeZone.forID(timeZone);
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(unit, tz);
|
||||
// move the random date to transition for timezones that have offset change due to dst transition
|
||||
long nextTransition = tz.nextTransition(Math.abs(randomLong() % ((long) 10e11)));
|
||||
final long millisPerUnit = unit.field().getDurationField().getUnitMillis();
|
||||
// start ten units before transition
|
||||
long roundedDate = rounding.round(nextTransition - (10 * millisPerUnit));
|
||||
while (roundedDate < nextTransition + 10 * millisPerUnit) {
|
||||
long delta = rounding.nextRoundingValue(roundedDate) - roundedDate;
|
||||
assertEquals("Difference between rounded values not equally spaced for [" + unit.name() + "], [" + timeZone + "] at "
|
||||
+ new DateTime(roundedDate), millisPerUnit, delta);
|
||||
roundedDate = rounding.nextRoundingValue(roundedDate);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* randomized test on TimeIntervalRounding with random interval and time zone offsets
|
||||
*/
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue