Merge branch 'master' into clean
This commit is contained in:
commit
3381f50e07
|
@ -201,7 +201,7 @@ gradle test -Dtests.timeoutSuite=5000! ...
|
|||
Change the logging level of ES (not gradle)
|
||||
|
||||
--------------------------------
|
||||
gradle test -Des.logger.level=DEBUG
|
||||
gradle test -Dtests.logger.level=DEBUG
|
||||
--------------------------------
|
||||
|
||||
Print all the logging output from the test runs to the commandline
|
||||
|
|
|
@ -456,7 +456,7 @@ class BuildPlugin implements Plugin<Project> {
|
|||
// default test sysprop values
|
||||
systemProperty 'tests.ifNoTests', 'fail'
|
||||
// TODO: remove setting logging level via system property
|
||||
systemProperty 'es.logger.level', 'WARN'
|
||||
systemProperty 'tests.logger.level', 'WARN'
|
||||
for (Map.Entry<String, String> property : System.properties.entrySet()) {
|
||||
if (property.getKey().startsWith('tests.') ||
|
||||
property.getKey().startsWith('es.')) {
|
||||
|
|
|
@ -129,7 +129,7 @@ class NodeInfo {
|
|||
}
|
||||
|
||||
env = [ 'JAVA_HOME' : project.javaHome ]
|
||||
args.addAll("-E", "es.node.portsfile=true")
|
||||
args.addAll("-E", "node.portsfile=true")
|
||||
String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ")
|
||||
String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs
|
||||
env.put('ES_JAVA_OPTS', esJavaOpts)
|
||||
|
@ -140,7 +140,7 @@ class NodeInfo {
|
|||
}
|
||||
}
|
||||
env.put('ES_JVM_OPTIONS', new File(confDir, 'jvm.options'))
|
||||
args.addAll("-E", "es.path.conf=${confDir}")
|
||||
args.addAll("-E", "path.conf=${confDir}")
|
||||
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
|
||||
args.add('"') // end the entire command, quoted
|
||||
}
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
#!/bin/sh -e
|
||||
#!/bin/bash -e
|
||||
<% commands.each {command -> %><%= command %><% } %>
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
#!/bin/sh -e
|
||||
#!/bin/bash -e
|
||||
<% commands.each {command -> %><%= command %><% } %>
|
||||
|
|
|
@ -69,6 +69,8 @@ public class Version {
|
|||
public static final Version V_2_3_1 = new Version(V_2_3_1_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
|
||||
public static final int V_2_3_2_ID = 2030299;
|
||||
public static final Version V_2_3_2 = new Version(V_2_3_2_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
|
||||
public static final int V_2_3_3_ID = 2030399;
|
||||
public static final Version V_2_3_3 = new Version(V_2_3_3_ID, org.apache.lucene.util.Version.LUCENE_5_5_0);
|
||||
public static final int V_5_0_0_alpha1_ID = 5000001;
|
||||
public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0);
|
||||
public static final int V_5_0_0_alpha2_ID = 5000002;
|
||||
|
@ -94,6 +96,8 @@ public class Version {
|
|||
return V_5_0_0_alpha2;
|
||||
case V_5_0_0_alpha1_ID:
|
||||
return V_5_0_0_alpha1;
|
||||
case V_2_3_3_ID:
|
||||
return V_2_3_3;
|
||||
case V_2_3_2_ID:
|
||||
return V_2_3_2;
|
||||
case V_2_3_1_ID:
|
||||
|
|
|
@ -54,7 +54,7 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc
|
|||
TransportService transportService, IndicesService indicesService, ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver) {
|
||||
super(settings, ClearIndicesCacheAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver,
|
||||
ClearIndicesCacheRequest::new, ThreadPool.Names.MANAGEMENT);
|
||||
ClearIndicesCacheRequest::new, ThreadPool.Names.MANAGEMENT, false);
|
||||
this.indicesService = indicesService;
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.bytes.BytesReference;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
|
@ -165,9 +165,9 @@ public class PercolateRequestBuilder extends ActionRequestBuilder<PercolateReque
|
|||
|
||||
/**
|
||||
* Delegates to
|
||||
* {@link PercolateSourceBuilder#addAggregation(AggregatorBuilder)}
|
||||
* {@link PercolateSourceBuilder#addAggregation(AggregationBuilder)}
|
||||
*/
|
||||
public PercolateRequestBuilder addAggregation(AggregatorBuilder<?> aggregationBuilder) {
|
||||
public PercolateRequestBuilder addAggregation(AggregationBuilder<?> aggregationBuilder) {
|
||||
sourceBuilder().addAggregation(aggregationBuilder);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
|
@ -53,7 +53,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
private List<SortBuilder<?>> sorts;
|
||||
private Boolean trackScores;
|
||||
private HighlightBuilder highlightBuilder;
|
||||
private List<AggregatorBuilder<?>> aggregationBuilders;
|
||||
private List<AggregationBuilder<?>> aggregationBuilders;
|
||||
private List<PipelineAggregatorBuilder<?>> pipelineAggregationBuilders;
|
||||
|
||||
/**
|
||||
|
@ -126,7 +126,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
/**
|
||||
* Add an aggregation definition.
|
||||
*/
|
||||
public PercolateSourceBuilder addAggregation(AggregatorBuilder<?> aggregationBuilder) {
|
||||
public PercolateSourceBuilder addAggregation(AggregationBuilder<?> aggregationBuilder) {
|
||||
if (aggregationBuilders == null) {
|
||||
aggregationBuilders = new ArrayList<>();
|
||||
}
|
||||
|
@ -175,7 +175,7 @@ public class PercolateSourceBuilder extends ToXContentToBytes {
|
|||
builder.field("aggregations");
|
||||
builder.startObject();
|
||||
if (aggregationBuilders != null) {
|
||||
for (AggregatorBuilder<?> aggregation : aggregationBuilders) {
|
||||
for (AggregationBuilder<?> aggregation : aggregationBuilders) {
|
||||
aggregation.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.index.query.QueryBuilder;
|
|||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilder;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.highlight.HighlightBuilder;
|
||||
|
@ -373,7 +373,7 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||
/**
|
||||
* Adds an aggregation to the search operation.
|
||||
*/
|
||||
public SearchRequestBuilder addAggregation(AggregatorBuilder<?> aggregation) {
|
||||
public SearchRequestBuilder addAggregation(AggregationBuilder<?> aggregation) {
|
||||
sourceBuilder().aggregation(aggregation);
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -84,6 +84,20 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
|
|||
|
||||
final String transportNodeBroadcastAction;
|
||||
|
||||
public TransportBroadcastByNodeAction(
|
||||
Settings settings,
|
||||
String actionName,
|
||||
ThreadPool threadPool,
|
||||
ClusterService clusterService,
|
||||
TransportService transportService,
|
||||
ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
Supplier<Request> request,
|
||||
String executor) {
|
||||
this(settings, actionName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, request,
|
||||
executor, true);
|
||||
}
|
||||
|
||||
public TransportBroadcastByNodeAction(
|
||||
Settings settings,
|
||||
String actionName,
|
||||
|
@ -93,7 +107,8 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
|
|||
ActionFilters actionFilters,
|
||||
IndexNameExpressionResolver indexNameExpressionResolver,
|
||||
Supplier<Request> request,
|
||||
String executor) {
|
||||
String executor,
|
||||
boolean canTripCircuitBreaker) {
|
||||
super(settings, actionName, threadPool, transportService, actionFilters, indexNameExpressionResolver, request);
|
||||
|
||||
this.clusterService = clusterService;
|
||||
|
@ -101,7 +116,8 @@ public abstract class TransportBroadcastByNodeAction<Request extends BroadcastRe
|
|||
|
||||
transportNodeBroadcastAction = actionName + "[n]";
|
||||
|
||||
transportService.registerRequestHandler(transportNodeBroadcastAction, NodeRequest::new, executor, new BroadcastByNodeTransportRequestHandler());
|
||||
transportService.registerRequestHandler(transportNodeBroadcastAction, NodeRequest::new, executor, false, canTripCircuitBreaker,
|
||||
new BroadcastByNodeTransportRequestHandler());
|
||||
}
|
||||
|
||||
private Response newResponse(
|
||||
|
|
|
@ -177,15 +177,7 @@ final class Bootstrap {
|
|||
// install SM after natives, shutdown hooks, etc.
|
||||
Security.configure(environment, BootstrapSettings.SECURITY_FILTER_BAD_DEFAULTS_SETTING.get(settings));
|
||||
|
||||
// We do not need to reload system properties here as we have already applied them in building the settings and
|
||||
// reloading could cause multiple prompts to the user for values if a system property was specified with a prompt
|
||||
// placeholder
|
||||
Settings nodeSettings = Settings.builder()
|
||||
.put(settings)
|
||||
.put(InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING.getKey(), true)
|
||||
.build();
|
||||
|
||||
node = new Node(nodeSettings) {
|
||||
node = new Node(settings) {
|
||||
@Override
|
||||
protected void validateNodeBeforeAcceptingRequests(Settings settings, BoundTransportAddress boundTransportAddress) {
|
||||
BootstrapCheck.check(settings, boundTransportAddress);
|
||||
|
@ -193,13 +185,13 @@ final class Bootstrap {
|
|||
};
|
||||
}
|
||||
|
||||
private static Environment initialSettings(boolean foreground, String pidFile) {
|
||||
private static Environment initialSettings(boolean foreground, String pidFile, Map<String, String> esSettings) {
|
||||
Terminal terminal = foreground ? Terminal.DEFAULT : null;
|
||||
Settings.Builder builder = Settings.builder();
|
||||
if (Strings.hasLength(pidFile)) {
|
||||
builder.put(Environment.PIDFILE_SETTING.getKey(), pidFile);
|
||||
}
|
||||
return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal);
|
||||
return InternalSettingsPreparer.prepareEnvironment(builder.build(), terminal, esSettings);
|
||||
}
|
||||
|
||||
private void start() {
|
||||
|
@ -233,11 +225,13 @@ final class Bootstrap {
|
|||
// Set the system property before anything has a chance to trigger its use
|
||||
initLoggerPrefix();
|
||||
|
||||
elasticsearchSettings(esSettings);
|
||||
// force the class initializer for BootstrapInfo to run before
|
||||
// the security manager is installed
|
||||
BootstrapInfo.init();
|
||||
|
||||
INSTANCE = new Bootstrap();
|
||||
|
||||
Environment environment = initialSettings(foreground, pidFile);
|
||||
Environment environment = initialSettings(foreground, pidFile, esSettings);
|
||||
Settings settings = environment.settings();
|
||||
LogConfigurator.configure(settings, true);
|
||||
checkForCustomConfFile();
|
||||
|
@ -295,13 +289,6 @@ final class Bootstrap {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Sets system properties passed as CLI parameters")
|
||||
private static void elasticsearchSettings(Map<String, String> esSettings) {
|
||||
for (Map.Entry<String, String> esSetting : esSettings.entrySet()) {
|
||||
System.setProperty(esSetting.getKey(), esSetting.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "System#out")
|
||||
private static void closeSystOut() {
|
||||
System.out.close();
|
||||
|
|
|
@ -120,4 +120,8 @@ public final class BootstrapInfo {
|
|||
}
|
||||
return SYSTEM_PROPERTIES;
|
||||
}
|
||||
|
||||
public static void init() {
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,28 +21,25 @@ package org.elasticsearch.bootstrap;
|
|||
|
||||
import joptsimple.OptionSet;
|
||||
import joptsimple.OptionSpec;
|
||||
import joptsimple.util.KeyValuePair;
|
||||
import org.elasticsearch.Build;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.SettingCommand;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.monitor.jvm.JvmInfo;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* This class starts elasticsearch.
|
||||
*/
|
||||
class Elasticsearch extends Command {
|
||||
class Elasticsearch extends SettingCommand {
|
||||
|
||||
private final OptionSpec<Void> versionOption;
|
||||
private final OptionSpec<Void> daemonizeOption;
|
||||
private final OptionSpec<String> pidfileOption;
|
||||
private final OptionSpec<KeyValuePair> propertyOption;
|
||||
|
||||
// visible for testing
|
||||
Elasticsearch() {
|
||||
|
@ -56,7 +53,6 @@ class Elasticsearch extends Command {
|
|||
pidfileOption = parser.acceptsAll(Arrays.asList("p", "pidfile"),
|
||||
"Creates a pid file in the specified path on start")
|
||||
.withRequiredArg();
|
||||
propertyOption = parser.accepts("E", "Configure an Elasticsearch setting").withRequiredArg().ofType(KeyValuePair.class);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -75,7 +71,7 @@ class Elasticsearch extends Command {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void execute(Terminal terminal, OptionSet options) throws Exception {
|
||||
protected void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception {
|
||||
if (options.nonOptionArguments().isEmpty() == false) {
|
||||
throw new UserError(ExitCodes.USAGE, "Positional arguments not allowed, found " + options.nonOptionArguments());
|
||||
}
|
||||
|
@ -84,26 +80,15 @@ class Elasticsearch extends Command {
|
|||
throw new UserError(ExitCodes.USAGE, "Elasticsearch version option is mutually exclusive with any other option");
|
||||
}
|
||||
terminal.println("Version: " + org.elasticsearch.Version.CURRENT
|
||||
+ ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date()
|
||||
+ ", JVM: " + JvmInfo.jvmInfo().version());
|
||||
+ ", Build: " + Build.CURRENT.shortHash() + "/" + Build.CURRENT.date()
|
||||
+ ", JVM: " + JvmInfo.jvmInfo().version());
|
||||
return;
|
||||
}
|
||||
|
||||
final boolean daemonize = options.has(daemonizeOption);
|
||||
final String pidFile = pidfileOption.value(options);
|
||||
|
||||
final Map<String, String> esSettings = new HashMap<>();
|
||||
for (final KeyValuePair kvp : propertyOption.values(options)) {
|
||||
if (!kvp.key.startsWith("es.")) {
|
||||
throw new UserError(ExitCodes.USAGE, "Elasticsearch settings must be prefixed with [es.] but was [" + kvp.key + "]");
|
||||
}
|
||||
if (kvp.value.isEmpty()) {
|
||||
throw new UserError(ExitCodes.USAGE, "Elasticsearch setting [" + kvp.key + "] must not be empty");
|
||||
}
|
||||
esSettings.put(kvp.key, kvp.value);
|
||||
}
|
||||
|
||||
init(daemonize, pidFile, esSettings);
|
||||
init(daemonize, pidFile, settings);
|
||||
}
|
||||
|
||||
void init(final boolean daemonize, final String pidFile, final Map<String, String> esSettings) {
|
||||
|
|
|
@ -19,15 +19,15 @@
|
|||
|
||||
package org.elasticsearch.cli;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import joptsimple.OptionException;
|
||||
import joptsimple.OptionParser;
|
||||
import joptsimple.OptionSet;
|
||||
import joptsimple.OptionSpec;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* An action to execute within a cli.
|
||||
*/
|
||||
|
@ -112,4 +112,5 @@ public abstract class Command {
|
|||
*
|
||||
* Any runtime user errors (like an input file that does not exist), should throw a {@link UserError}. */
|
||||
protected abstract void execute(Terminal terminal, OptionSet options) throws Exception;
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.cli;
|
||||
|
||||
import joptsimple.OptionSet;
|
||||
import joptsimple.OptionSpec;
|
||||
import joptsimple.util.KeyValuePair;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
public abstract class SettingCommand extends Command {
|
||||
|
||||
private final OptionSpec<KeyValuePair> settingOption;
|
||||
|
||||
public SettingCommand(String description) {
|
||||
super(description);
|
||||
this.settingOption = parser.accepts("E", "Configure a setting").withRequiredArg().ofType(KeyValuePair.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(Terminal terminal, OptionSet options) throws Exception {
|
||||
final Map<String, String> settings = new HashMap<>();
|
||||
for (final KeyValuePair kvp : settingOption.values(options)) {
|
||||
if (kvp.value.isEmpty()) {
|
||||
throw new UserError(ExitCodes.USAGE, "Setting [" + kvp.key + "] must not be empty");
|
||||
}
|
||||
settings.put(kvp.key, kvp.value);
|
||||
}
|
||||
|
||||
putSystemPropertyIfSettingIsMissing(settings, "path.conf", "es.path.conf");
|
||||
putSystemPropertyIfSettingIsMissing(settings, "path.data", "es.path.data");
|
||||
putSystemPropertyIfSettingIsMissing(settings, "path.home", "es.path.home");
|
||||
putSystemPropertyIfSettingIsMissing(settings, "path.logs", "es.path.logs");
|
||||
|
||||
execute(terminal, options, settings);
|
||||
}
|
||||
|
||||
protected static void putSystemPropertyIfSettingIsMissing(final Map<String, String> settings, final String setting, final String key) {
|
||||
final String value = System.getProperty(key);
|
||||
if (value != null) {
|
||||
if (settings.containsKey(setting)) {
|
||||
final String message =
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"duplicate setting [%s] found via command-line [%s] and system property [%s]",
|
||||
setting,
|
||||
settings.get(setting),
|
||||
value);
|
||||
throw new IllegalArgumentException(message);
|
||||
} else {
|
||||
settings.put(setting, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception;
|
||||
|
||||
}
|
|
@ -281,8 +281,11 @@ public class MetaDataMappingService extends AbstractComponent {
|
|||
// Also the order of the mappings may be backwards.
|
||||
if (newMapper.parentFieldMapper().active()) {
|
||||
for (ObjectCursor<MappingMetaData> mapping : indexMetaData.getMappings().values()) {
|
||||
if (newMapper.parentFieldMapper().type().equals(mapping.value.type())) {
|
||||
throw new IllegalArgumentException("can't add a _parent field that points to an already existing type");
|
||||
String parentType = newMapper.parentFieldMapper().type();
|
||||
if (parentType.equals(mapping.value.type()) &&
|
||||
indexService.mapperService().getParentTypes().contains(parentType) == false) {
|
||||
throw new IllegalArgumentException("can't add a _parent field that points to an " +
|
||||
"already existing type, that isn't already a parent");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,7 +21,6 @@ package org.elasticsearch.common.logging;
|
|||
|
||||
import org.apache.log4j.PropertyConfigurator;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.bootstrap.BootstrapInfo;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -93,8 +92,7 @@ public class LogConfigurator {
|
|||
|
||||
/**
|
||||
* Consolidates settings and converts them into actual log4j settings, then initializes loggers and appenders.
|
||||
*
|
||||
* @param settings custom settings that should be applied
|
||||
* @param settings custom settings that should be applied
|
||||
* @param resolveConfig controls whether the logging conf file should be read too or not.
|
||||
*/
|
||||
public static void configure(Settings settings, boolean resolveConfig) {
|
||||
|
@ -109,7 +107,7 @@ public class LogConfigurator {
|
|||
if (resolveConfig) {
|
||||
resolveConfig(environment, settingsBuilder);
|
||||
}
|
||||
settingsBuilder.putProperties("es.", BootstrapInfo.getSystemProperties());
|
||||
|
||||
// add custom settings after config was added so that they are not overwritten by config
|
||||
settingsBuilder.put(settings);
|
||||
settingsBuilder.replacePropertyPlaceholders();
|
||||
|
|
|
@ -53,6 +53,15 @@ public enum DateTimeUnit {
|
|||
return field;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param unit the {@link DateTimeUnit} to check
|
||||
* @return true if the unit is a day or longer
|
||||
*/
|
||||
public static boolean isDayOrLonger(DateTimeUnit unit) {
|
||||
return (unit == DateTimeUnit.HOUR_OF_DAY || unit == DateTimeUnit.MINUTES_OF_HOUR
|
||||
|| unit == DateTimeUnit.SECOND_OF_MINUTE) == false;
|
||||
}
|
||||
|
||||
public static DateTimeUnit resolve(byte id) {
|
||||
switch (id) {
|
||||
case 1: return WEEK_OF_WEEKYEAR;
|
||||
|
|
|
@ -46,8 +46,8 @@ public abstract class TimeZoneRounding extends Rounding {
|
|||
|
||||
public static class Builder {
|
||||
|
||||
private DateTimeUnit unit;
|
||||
private long interval = -1;
|
||||
private final DateTimeUnit unit;
|
||||
private final long interval;
|
||||
|
||||
private DateTimeZone timeZone = DateTimeZone.UTC;
|
||||
|
||||
|
@ -142,10 +142,15 @@ public abstract class TimeZoneRounding extends Rounding {
|
|||
|
||||
@Override
|
||||
public long nextRoundingValue(long time) {
|
||||
long timeLocal = time;
|
||||
timeLocal = timeZone.convertUTCToLocal(time);
|
||||
long nextInLocalTime = durationField.add(timeLocal, 1);
|
||||
return timeZone.convertLocalToUTC(nextInLocalTime, false);
|
||||
if (DateTimeUnit.isDayOrLonger(unit)) {
|
||||
time = timeZone.convertUTCToLocal(time);
|
||||
}
|
||||
long next = durationField.add(time, 1);
|
||||
if (DateTimeUnit.isDayOrLonger(unit)) {
|
||||
return timeZone.convertLocalToUTC(next, false);
|
||||
} else {
|
||||
return next;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -161,12 +166,12 @@ public abstract class TimeZoneRounding extends Rounding {
|
|||
out.writeByte(unit.id());
|
||||
out.writeString(timeZone.getID());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(unit, timeZone);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
|
@ -236,12 +241,12 @@ public abstract class TimeZoneRounding extends Rounding {
|
|||
out.writeVLong(interval);
|
||||
out.writeString(timeZone.getID());
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(interval, timeZone);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
|
|
|
@ -87,6 +87,7 @@ import org.elasticsearch.repositories.fs.FsRepository;
|
|||
import org.elasticsearch.repositories.uri.URLRepository;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.SearchService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.Transport;
|
||||
|
@ -374,7 +375,6 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
|||
BaseRestHandler.MULTI_ALLOW_EXPLICIT_INDEX,
|
||||
ClusterName.CLUSTER_NAME_SETTING,
|
||||
Client.CLIENT_TYPE_SETTING_S,
|
||||
InternalSettingsPreparer.IGNORE_SYSTEM_PROPERTIES_SETTING,
|
||||
ClusterModule.SHARDS_ALLOCATOR_TYPE_SETTING,
|
||||
EsExecutors.PROCESSORS_SETTING,
|
||||
ThreadContext.DEFAULT_HEADERS_SETTING,
|
||||
|
@ -397,6 +397,9 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
|||
JvmGcMonitorService.ENABLED_SETTING,
|
||||
JvmGcMonitorService.REFRESH_INTERVAL_SETTING,
|
||||
JvmGcMonitorService.GC_SETTING,
|
||||
JvmGcMonitorService.GC_OVERHEAD_WARN_SETTING,
|
||||
JvmGcMonitorService.GC_OVERHEAD_INFO_SETTING,
|
||||
JvmGcMonitorService.GC_OVERHEAD_DEBUG_SETTING,
|
||||
PageCacheRecycler.LIMIT_HEAP_SETTING,
|
||||
PageCacheRecycler.WEIGHT_BYTES_SETTING,
|
||||
PageCacheRecycler.WEIGHT_INT_SETTING,
|
||||
|
@ -417,6 +420,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
|||
ResourceWatcherService.ENABLED,
|
||||
ResourceWatcherService.RELOAD_INTERVAL_HIGH,
|
||||
ResourceWatcherService.RELOAD_INTERVAL_MEDIUM,
|
||||
ResourceWatcherService.RELOAD_INTERVAL_LOW
|
||||
ResourceWatcherService.RELOAD_INTERVAL_LOW,
|
||||
SearchModule.INDICES_MAX_CLAUSE_COUNT_SETTING
|
||||
)));
|
||||
}
|
||||
|
|
|
@ -18,19 +18,6 @@
|
|||
*/
|
||||
package org.elasticsearch.common.settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
|
@ -50,6 +37,19 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.BiConsumer;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Function;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* A setting. Encapsulates typical stuff like default value, parsing, and scope.
|
||||
* Some (SettingsProperty.Dynamic) can by modified at run time using the API.
|
||||
|
@ -504,7 +504,7 @@ public class Setting<T> extends ToXContentToBytes {
|
|||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
|
||||
}
|
||||
if (value > maxValue) {
|
||||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be =< " + maxValue);
|
||||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be <= " + maxValue);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
@ -572,7 +572,7 @@ public class Setting<T> extends ToXContentToBytes {
|
|||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be >= " + minValue);
|
||||
}
|
||||
if (value.bytes() > maxValue.bytes()) {
|
||||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be =< " + maxValue);
|
||||
throw new IllegalArgumentException("Failed to parse value [" + s + "] for setting [" + key + "] must be <= " + maxValue);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
|
|
@ -58,9 +58,11 @@ import java.util.Set;
|
|||
import java.util.SortedMap;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue;
|
||||
import static org.elasticsearch.common.unit.SizeValue.parseSizeValue;
|
||||
|
@ -942,66 +944,27 @@ public final class Settings implements ToXContent {
|
|||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts all the properties with keys starting with the provided <tt>prefix</tt>.
|
||||
*
|
||||
* @param prefix The prefix to filter property key by
|
||||
* @param properties The properties to put
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder putProperties(String prefix, Dictionary<Object, Object> properties) {
|
||||
for (Object property : Collections.list(properties.keys())) {
|
||||
String key = Objects.toString(property);
|
||||
String value = Objects.toString(properties.get(property));
|
||||
if (key.startsWith(prefix)) {
|
||||
map.put(key.substring(prefix.length()), value);
|
||||
public Builder putProperties(Map<String, String> esSettings, Predicate<String> keyPredicate, Function<String, String> keyFunction) {
|
||||
for (final Map.Entry<String, String> esSetting : esSettings.entrySet()) {
|
||||
final String key = esSetting.getKey();
|
||||
if (keyPredicate.test(key)) {
|
||||
map.put(keyFunction.apply(key), esSetting.getValue());
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts all the properties with keys starting with the provided <tt>prefix</tt>.
|
||||
*
|
||||
* @param prefix The prefix to filter property key by
|
||||
* @param properties The properties to put
|
||||
* @return The builder
|
||||
*/
|
||||
public Builder putProperties(String prefix, Dictionary<Object, Object> properties, String ignorePrefix) {
|
||||
for (Object property : Collections.list(properties.keys())) {
|
||||
String key = Objects.toString(property);
|
||||
String value = Objects.toString(properties.get(property));
|
||||
if (key.startsWith(prefix)) {
|
||||
if (!key.startsWith(ignorePrefix)) {
|
||||
map.put(key.substring(prefix.length()), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Runs across all the settings set on this builder and replaces <tt>${...}</tt> elements in the
|
||||
* each setting value according to the following logic:
|
||||
* <p>
|
||||
* First, tries to resolve it against a System property ({@link System#getProperty(String)}), next,
|
||||
* tries and resolve it against an environment variable ({@link System#getenv(String)}), and last, tries
|
||||
* and replace it with another setting already set on this builder.
|
||||
* Runs across all the settings set on this builder and
|
||||
* replaces <tt>${...}</tt> elements in each setting with
|
||||
* another setting already set on this builder.
|
||||
*/
|
||||
public Builder replacePropertyPlaceholders() {
|
||||
PropertyPlaceholder propertyPlaceholder = new PropertyPlaceholder("${", "}", false);
|
||||
PropertyPlaceholder.PlaceholderResolver placeholderResolver = new PropertyPlaceholder.PlaceholderResolver() {
|
||||
@Override
|
||||
public String resolvePlaceholder(String placeholderName) {
|
||||
if (placeholderName.startsWith("env.")) {
|
||||
// explicit env var prefix
|
||||
return System.getenv(placeholderName.substring("env.".length()));
|
||||
}
|
||||
String value = System.getProperty(placeholderName);
|
||||
if (value != null) {
|
||||
return value;
|
||||
}
|
||||
value = System.getenv(placeholderName);
|
||||
final String value = System.getenv(placeholderName);
|
||||
if (value != null) {
|
||||
return value;
|
||||
}
|
||||
|
@ -1010,8 +973,7 @@ public final class Settings implements ToXContent {
|
|||
|
||||
@Override
|
||||
public boolean shouldIgnoreMissing(String placeholderName) {
|
||||
// if its an explicit env var, we are ok with not having a value for it and treat it as optional
|
||||
if (placeholderName.startsWith("env.") || placeholderName.startsWith("prompt.")) {
|
||||
if (placeholderName.startsWith("prompt.")) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
|
|
|
@ -65,7 +65,12 @@ public class SettingsModule extends AbstractModule {
|
|||
protected void configure() {
|
||||
final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values()));
|
||||
final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values()));
|
||||
Settings indexSettings = settings.filter((s) -> s.startsWith("index.") && clusterSettings.get(s) == null);
|
||||
Settings indexSettings = settings.filter((s) -> (s.startsWith("index.") &&
|
||||
// special case - we want to get Did you mean indices.query.bool.max_clause_count
|
||||
// which means we need to by-pass this check for this setting
|
||||
// TODO remove in 6.0!!
|
||||
"index.query.bool.max_clause_count".equals(s) == false)
|
||||
&& clusterSettings.get(s) == null);
|
||||
if (indexSettings.isEmpty() == false) {
|
||||
try {
|
||||
String separator = IntStream.range(0, 85).mapToObj(s -> "*").collect(Collectors.joining("")).trim();
|
||||
|
|
|
@ -1,629 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import org.apache.lucene.store.DataInput;
|
||||
import org.apache.lucene.store.DataOutput;
|
||||
import org.apache.lucene.store.IndexInput;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.RamUsageEstimator;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.hash.MurmurHash3;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.SizeValue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
|
||||
/**
|
||||
* A bloom filter. Inspired by Guava bloom filter implementation though with some optimizations.
|
||||
*/
|
||||
public class BloomFilter {
|
||||
|
||||
/**
|
||||
* A factory that can use different fpp based on size.
|
||||
*/
|
||||
public static class Factory {
|
||||
|
||||
public static final Factory DEFAULT = buildDefault();
|
||||
|
||||
private static Factory buildDefault() {
|
||||
// Some numbers:
|
||||
// 10k =0.001: 140.4kb , 10 Hashes
|
||||
// 10k =0.01 : 93.6kb , 6 Hashes
|
||||
// 100k=0.01 : 936.0kb , 6 Hashes
|
||||
// 100k=0.03 : 712.7kb , 5 Hashes
|
||||
// 500k=0.01 : 4.5mb , 6 Hashes
|
||||
// 500k=0.03 : 3.4mb , 5 Hashes
|
||||
// 500k=0.05 : 2.9mb , 4 Hashes
|
||||
// 1m=0.01 : 9.1mb , 6 Hashes
|
||||
// 1m=0.03 : 6.9mb , 5 Hashes
|
||||
// 1m=0.05 : 5.9mb , 4 Hashes
|
||||
// 5m=0.01 : 45.7mb , 6 Hashes
|
||||
// 5m=0.03 : 34.8mb , 5 Hashes
|
||||
// 5m=0.05 : 29.7mb , 4 Hashes
|
||||
// 50m=0.01 : 457.0mb , 6 Hashes
|
||||
// 50m=0.03 : 297.3mb , 4 Hashes
|
||||
// 50m=0.10 : 228.5mb , 3 Hashes
|
||||
return buildFromString("10k=0.01,1m=0.03");
|
||||
}
|
||||
|
||||
/**
|
||||
* Supports just passing fpp, as in "0.01", and also ranges, like "50k=0.01,1m=0.05". If
|
||||
* its null, returns {@link #buildDefault()}.
|
||||
*/
|
||||
public static Factory buildFromString(@Nullable String config) {
|
||||
if (config == null) {
|
||||
return buildDefault();
|
||||
}
|
||||
String[] sEntries = config.split(",");
|
||||
if (sEntries.length == 0) {
|
||||
if (config.length() > 0) {
|
||||
return new Factory(new Entry[]{new Entry(0, Double.parseDouble(config))});
|
||||
}
|
||||
return buildDefault();
|
||||
}
|
||||
Entry[] entries = new Entry[sEntries.length];
|
||||
for (int i = 0; i < sEntries.length; i++) {
|
||||
int index = sEntries[i].indexOf('=');
|
||||
entries[i] = new Entry(
|
||||
(int) SizeValue.parseSizeValue(sEntries[i].substring(0, index).trim()).singles(),
|
||||
Double.parseDouble(sEntries[i].substring(index + 1).trim())
|
||||
);
|
||||
}
|
||||
return new Factory(entries);
|
||||
}
|
||||
|
||||
private final Entry[] entries;
|
||||
|
||||
public Factory(Entry[] entries) {
|
||||
this.entries = entries;
|
||||
// the order is from the upper most expected insertions to the lowest
|
||||
Arrays.sort(this.entries, new Comparator<Entry>() {
|
||||
@Override
|
||||
public int compare(Entry o1, Entry o2) {
|
||||
return o2.expectedInsertions - o1.expectedInsertions;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public BloomFilter createFilter(int expectedInsertions) {
|
||||
for (Entry entry : entries) {
|
||||
if (expectedInsertions > entry.expectedInsertions) {
|
||||
return BloomFilter.create(expectedInsertions, entry.fpp);
|
||||
}
|
||||
}
|
||||
return BloomFilter.create(expectedInsertions, 0.03);
|
||||
}
|
||||
|
||||
public static class Entry {
|
||||
public final int expectedInsertions;
|
||||
public final double fpp;
|
||||
|
||||
Entry(int expectedInsertions, double fpp) {
|
||||
this.expectedInsertions = expectedInsertions;
|
||||
this.fpp = fpp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a bloom filter based on the with the expected number
|
||||
* of insertions and expected false positive probability.
|
||||
*
|
||||
* @param expectedInsertions the number of expected insertions to the constructed
|
||||
* @param fpp the desired false positive probability (must be positive and less than 1.0)
|
||||
*/
|
||||
public static BloomFilter create(int expectedInsertions, double fpp) {
|
||||
return create(expectedInsertions, fpp, -1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a bloom filter based on the expected number of insertions, expected false positive probability,
|
||||
* and number of hash functions.
|
||||
*
|
||||
* @param expectedInsertions the number of expected insertions to the constructed
|
||||
* @param fpp the desired false positive probability (must be positive and less than 1.0)
|
||||
* @param numHashFunctions the number of hash functions to use (must be less than or equal to 255)
|
||||
*/
|
||||
public static BloomFilter create(int expectedInsertions, double fpp, int numHashFunctions) {
|
||||
if (expectedInsertions == 0) {
|
||||
expectedInsertions = 1;
|
||||
}
|
||||
/*
|
||||
* TODO(user): Put a warning in the javadoc about tiny fpp values,
|
||||
* since the resulting size is proportional to -log(p), but there is not
|
||||
* much of a point after all, e.g. optimalM(1000, 0.0000000000000001) = 76680
|
||||
* which is less that 10kb. Who cares!
|
||||
*/
|
||||
long numBits = optimalNumOfBits(expectedInsertions, fpp);
|
||||
|
||||
// calculate the optimal number of hash functions
|
||||
if (numHashFunctions == -1) {
|
||||
numHashFunctions = optimalNumOfHashFunctions(expectedInsertions, numBits);
|
||||
}
|
||||
|
||||
try {
|
||||
return new BloomFilter(new BitArray(numBits), numHashFunctions, Hashing.DEFAULT);
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new IllegalArgumentException("Could not create BloomFilter of " + numBits + " bits", e);
|
||||
}
|
||||
}
|
||||
|
||||
public static void skipBloom(IndexInput in) throws IOException {
|
||||
int version = in.readInt(); // we do nothing with this now..., defaults to 0
|
||||
final int numLongs = in.readInt();
|
||||
in.seek(in.getFilePointer() + (numLongs * 8) + 4 + 4); // filter + numberOfHashFunctions + hashType
|
||||
}
|
||||
|
||||
public static BloomFilter deserialize(DataInput in) throws IOException {
|
||||
int version = in.readInt(); // we do nothing with this now..., defaults to 0
|
||||
int numLongs = in.readInt();
|
||||
long[] data = new long[numLongs];
|
||||
for (int i = 0; i < numLongs; i++) {
|
||||
data[i] = in.readLong();
|
||||
}
|
||||
int numberOfHashFunctions = in.readInt();
|
||||
int hashType = in.readInt();
|
||||
return new BloomFilter(new BitArray(data), numberOfHashFunctions, Hashing.fromType(hashType));
|
||||
}
|
||||
|
||||
public static void serilaize(BloomFilter filter, DataOutput out) throws IOException {
|
||||
out.writeInt(0); // version
|
||||
BitArray bits = filter.bits;
|
||||
out.writeInt(bits.data.length);
|
||||
for (long l : bits.data) {
|
||||
out.writeLong(l);
|
||||
}
|
||||
out.writeInt(filter.numHashFunctions);
|
||||
out.writeInt(filter.hashing.type()); // hashType
|
||||
}
|
||||
|
||||
public static BloomFilter readFrom(StreamInput in) throws IOException {
|
||||
int version = in.readVInt(); // we do nothing with this now..., defaults to 0
|
||||
int numLongs = in.readVInt();
|
||||
long[] data = new long[numLongs];
|
||||
for (int i = 0; i < numLongs; i++) {
|
||||
data[i] = in.readLong();
|
||||
}
|
||||
int numberOfHashFunctions = in.readVInt();
|
||||
int hashType = in.readVInt(); // again, nothing to do now...
|
||||
return new BloomFilter(new BitArray(data), numberOfHashFunctions, Hashing.fromType(hashType));
|
||||
}
|
||||
|
||||
public static void writeTo(BloomFilter filter, StreamOutput out) throws IOException {
|
||||
out.writeVInt(0); // version
|
||||
BitArray bits = filter.bits;
|
||||
out.writeVInt(bits.data.length);
|
||||
for (long l : bits.data) {
|
||||
out.writeLong(l);
|
||||
}
|
||||
out.writeVInt(filter.numHashFunctions);
|
||||
out.writeVInt(filter.hashing.type()); // hashType
|
||||
}
|
||||
|
||||
/**
|
||||
* The bit set of the BloomFilter (not necessarily power of 2!)
|
||||
*/
|
||||
final BitArray bits;
|
||||
/**
|
||||
* Number of hashes per element
|
||||
*/
|
||||
final int numHashFunctions;
|
||||
|
||||
final Hashing hashing;
|
||||
|
||||
BloomFilter(BitArray bits, int numHashFunctions, Hashing hashing) {
|
||||
this.bits = bits;
|
||||
this.numHashFunctions = numHashFunctions;
|
||||
this.hashing = hashing;
|
||||
/*
|
||||
* This only exists to forbid BFs that cannot use the compact persistent representation.
|
||||
* If it ever throws, at a user who was not intending to use that representation, we should
|
||||
* reconsider
|
||||
*/
|
||||
if (numHashFunctions > 255) {
|
||||
throw new IllegalArgumentException("Currently we don't allow BloomFilters that would use more than 255 hash functions");
|
||||
}
|
||||
}
|
||||
|
||||
public boolean put(BytesRef value) {
|
||||
return hashing.put(value, numHashFunctions, bits);
|
||||
}
|
||||
|
||||
public boolean mightContain(BytesRef value) {
|
||||
return hashing.mightContain(value, numHashFunctions, bits);
|
||||
}
|
||||
|
||||
public int getNumHashFunctions() {
|
||||
return this.numHashFunctions;
|
||||
}
|
||||
|
||||
public long getSizeInBytes() {
|
||||
return bits.ramBytesUsed();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return bits.hashCode() + numHashFunctions;
|
||||
}
|
||||
|
||||
/*
|
||||
* Cheat sheet:
|
||||
*
|
||||
* m: total bits
|
||||
* n: expected insertions
|
||||
* b: m/n, bits per insertion
|
||||
|
||||
* p: expected false positive probability
|
||||
*
|
||||
* 1) Optimal k = b * ln2
|
||||
* 2) p = (1 - e ^ (-kn/m))^k
|
||||
* 3) For optimal k: p = 2 ^ (-k) ~= 0.6185^b
|
||||
* 4) For optimal k: m = -nlnp / ((ln2) ^ 2)
|
||||
*/
|
||||
|
||||
/**
|
||||
* Computes the optimal k (number of hashes per element inserted in Bloom filter), given the
|
||||
* expected insertions and total number of bits in the Bloom filter.
|
||||
* <p>
|
||||
* See http://en.wikipedia.org/wiki/File:Bloom_filter_fp_probability.svg for the formula.
|
||||
*
|
||||
* @param n expected insertions (must be positive)
|
||||
* @param m total number of bits in Bloom filter (must be positive)
|
||||
*/
|
||||
static int optimalNumOfHashFunctions(long n, long m) {
|
||||
return Math.max(1, (int) Math.round(m / n * Math.log(2)));
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes m (total bits of Bloom filter) which is expected to achieve, for the specified
|
||||
* expected insertions, the required false positive probability.
|
||||
* <p>
|
||||
* See http://en.wikipedia.org/wiki/Bloom_filter#Probability_of_false_positives for the formula.
|
||||
*
|
||||
* @param n expected insertions (must be positive)
|
||||
* @param p false positive rate (must be 0 < p < 1)
|
||||
*/
|
||||
static long optimalNumOfBits(long n, double p) {
|
||||
if (p == 0) {
|
||||
p = Double.MIN_VALUE;
|
||||
}
|
||||
return (long) (-n * Math.log(p) / (Math.log(2) * Math.log(2)));
|
||||
}
|
||||
|
||||
// Note: We use this instead of java.util.BitSet because we need access to the long[] data field
|
||||
static final class BitArray {
|
||||
final long[] data;
|
||||
final long bitSize;
|
||||
long bitCount;
|
||||
|
||||
BitArray(long bits) {
|
||||
this(new long[size(bits)]);
|
||||
}
|
||||
|
||||
private static int size(long bits) {
|
||||
long quotient = bits / 64;
|
||||
long remainder = bits - quotient * 64;
|
||||
return Math.toIntExact(remainder == 0 ? quotient : 1 + quotient);
|
||||
}
|
||||
|
||||
// Used by serialization
|
||||
BitArray(long[] data) {
|
||||
this.data = data;
|
||||
long bitCount = 0;
|
||||
for (long value : data) {
|
||||
bitCount += Long.bitCount(value);
|
||||
}
|
||||
this.bitCount = bitCount;
|
||||
this.bitSize = data.length * Long.SIZE;
|
||||
}
|
||||
|
||||
/** Returns true if the bit changed value. */
|
||||
boolean set(long index) {
|
||||
if (!get(index)) {
|
||||
data[(int) (index >>> 6)] |= (1L << index);
|
||||
bitCount++;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
boolean get(long index) {
|
||||
return (data[(int) (index >>> 6)] & (1L << index)) != 0;
|
||||
}
|
||||
|
||||
/** Number of bits */
|
||||
long bitSize() {
|
||||
return bitSize;
|
||||
}
|
||||
|
||||
/** Number of set bits (1s) */
|
||||
long bitCount() {
|
||||
return bitCount;
|
||||
}
|
||||
|
||||
BitArray copy() {
|
||||
return new BitArray(data.clone());
|
||||
}
|
||||
|
||||
/** Combines the two BitArrays using bitwise OR. */
|
||||
void putAll(BitArray array) {
|
||||
bitCount = 0;
|
||||
for (int i = 0; i < data.length; i++) {
|
||||
data[i] |= array.data[i];
|
||||
bitCount += Long.bitCount(data[i]);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public boolean equals(Object o) {
|
||||
if (o instanceof BitArray) {
|
||||
BitArray bitArray = (BitArray) o;
|
||||
return Arrays.equals(data, bitArray.data);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override public int hashCode() {
|
||||
return Arrays.hashCode(data);
|
||||
}
|
||||
|
||||
public long ramBytesUsed() {
|
||||
return Long.BYTES * data.length + RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + 16;
|
||||
}
|
||||
}
|
||||
|
||||
static enum Hashing {
|
||||
|
||||
V0() {
|
||||
@Override
|
||||
protected boolean put(BytesRef value, int numHashFunctions, BitArray bits) {
|
||||
long bitSize = bits.bitSize();
|
||||
long hash64 = hash3_x64_128(value.bytes, value.offset, value.length, 0);
|
||||
int hash1 = (int) hash64;
|
||||
int hash2 = (int) (hash64 >>> 32);
|
||||
boolean bitsChanged = false;
|
||||
for (int i = 1; i <= numHashFunctions; i++) {
|
||||
int nextHash = hash1 + i * hash2;
|
||||
if (nextHash < 0) {
|
||||
nextHash = ~nextHash;
|
||||
}
|
||||
bitsChanged |= bits.set(nextHash % bitSize);
|
||||
}
|
||||
return bitsChanged;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits) {
|
||||
long bitSize = bits.bitSize();
|
||||
long hash64 = hash3_x64_128(value.bytes, value.offset, value.length, 0);
|
||||
int hash1 = (int) hash64;
|
||||
int hash2 = (int) (hash64 >>> 32);
|
||||
for (int i = 1; i <= numHashFunctions; i++) {
|
||||
int nextHash = hash1 + i * hash2;
|
||||
if (nextHash < 0) {
|
||||
nextHash = ~nextHash;
|
||||
}
|
||||
if (!bits.get(nextHash % bitSize)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int type() {
|
||||
return 0;
|
||||
}
|
||||
},
|
||||
V1() {
|
||||
@Override
|
||||
protected boolean put(BytesRef value, int numHashFunctions, BitArray bits) {
|
||||
long bitSize = bits.bitSize();
|
||||
MurmurHash3.Hash128 hash128 = MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, new MurmurHash3.Hash128());
|
||||
|
||||
boolean bitsChanged = false;
|
||||
long combinedHash = hash128.h1;
|
||||
for (int i = 0; i < numHashFunctions; i++) {
|
||||
// Make the combined hash positive and indexable
|
||||
bitsChanged |= bits.set((combinedHash & Long.MAX_VALUE) % bitSize);
|
||||
combinedHash += hash128.h2;
|
||||
}
|
||||
return bitsChanged;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits) {
|
||||
long bitSize = bits.bitSize();
|
||||
MurmurHash3.Hash128 hash128 = MurmurHash3.hash128(value.bytes, value.offset, value.length, 0, new MurmurHash3.Hash128());
|
||||
|
||||
long combinedHash = hash128.h1;
|
||||
for (int i = 0; i < numHashFunctions; i++) {
|
||||
// Make the combined hash positive and indexable
|
||||
if (!bits.get((combinedHash & Long.MAX_VALUE) % bitSize)) {
|
||||
return false;
|
||||
}
|
||||
combinedHash += hash128.h2;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int type() {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
;
|
||||
|
||||
protected abstract boolean put(BytesRef value, int numHashFunctions, BitArray bits);
|
||||
|
||||
protected abstract boolean mightContain(BytesRef value, int numHashFunctions, BitArray bits);
|
||||
|
||||
protected abstract int type();
|
||||
|
||||
public static final Hashing DEFAULT = Hashing.V1;
|
||||
|
||||
public static Hashing fromType(int type) {
|
||||
if (type == 0) {
|
||||
return Hashing.V0;
|
||||
} if (type == 1) {
|
||||
return Hashing.V1;
|
||||
} else {
|
||||
throw new IllegalArgumentException("no hashing type matching " + type);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// START : MURMUR 3_128 USED FOR Hashing.V0
|
||||
// NOTE: don't replace this code with the o.e.common.hashing.MurmurHash3 method which returns a different hash
|
||||
|
||||
protected static long getblock(byte[] key, int offset, int index) {
|
||||
int i_8 = index << 3;
|
||||
int blockOffset = offset + i_8;
|
||||
return ((long) key[blockOffset + 0] & 0xff) + (((long) key[blockOffset + 1] & 0xff) << 8) +
|
||||
(((long) key[blockOffset + 2] & 0xff) << 16) + (((long) key[blockOffset + 3] & 0xff) << 24) +
|
||||
(((long) key[blockOffset + 4] & 0xff) << 32) + (((long) key[blockOffset + 5] & 0xff) << 40) +
|
||||
(((long) key[blockOffset + 6] & 0xff) << 48) + (((long) key[blockOffset + 7] & 0xff) << 56);
|
||||
}
|
||||
|
||||
protected static long rotl64(long v, int n) {
|
||||
return ((v << n) | (v >>> (64 - n)));
|
||||
}
|
||||
|
||||
protected static long fmix(long k) {
|
||||
k ^= k >>> 33;
|
||||
k *= 0xff51afd7ed558ccdL;
|
||||
k ^= k >>> 33;
|
||||
k *= 0xc4ceb9fe1a85ec53L;
|
||||
k ^= k >>> 33;
|
||||
|
||||
return k;
|
||||
}
|
||||
|
||||
@SuppressWarnings("fallthrough") // Uses fallthrough to implement a well know hashing algorithm
|
||||
public static long hash3_x64_128(byte[] key, int offset, int length, long seed) {
|
||||
final int nblocks = length >> 4; // Process as 128-bit blocks.
|
||||
|
||||
long h1 = seed;
|
||||
long h2 = seed;
|
||||
|
||||
long c1 = 0x87c37b91114253d5L;
|
||||
long c2 = 0x4cf5ad432745937fL;
|
||||
|
||||
//----------
|
||||
// body
|
||||
|
||||
for (int i = 0; i < nblocks; i++) {
|
||||
long k1 = getblock(key, offset, i * 2 + 0);
|
||||
long k2 = getblock(key, offset, i * 2 + 1);
|
||||
|
||||
k1 *= c1;
|
||||
k1 = rotl64(k1, 31);
|
||||
k1 *= c2;
|
||||
h1 ^= k1;
|
||||
|
||||
h1 = rotl64(h1, 27);
|
||||
h1 += h2;
|
||||
h1 = h1 * 5 + 0x52dce729;
|
||||
|
||||
k2 *= c2;
|
||||
k2 = rotl64(k2, 33);
|
||||
k2 *= c1;
|
||||
h2 ^= k2;
|
||||
|
||||
h2 = rotl64(h2, 31);
|
||||
h2 += h1;
|
||||
h2 = h2 * 5 + 0x38495ab5;
|
||||
}
|
||||
|
||||
//----------
|
||||
// tail
|
||||
|
||||
// Advance offset to the unprocessed tail of the data.
|
||||
offset += nblocks * 16;
|
||||
|
||||
long k1 = 0;
|
||||
long k2 = 0;
|
||||
|
||||
switch (length & 15) {
|
||||
case 15:
|
||||
k2 ^= ((long) key[offset + 14]) << 48;
|
||||
case 14:
|
||||
k2 ^= ((long) key[offset + 13]) << 40;
|
||||
case 13:
|
||||
k2 ^= ((long) key[offset + 12]) << 32;
|
||||
case 12:
|
||||
k2 ^= ((long) key[offset + 11]) << 24;
|
||||
case 11:
|
||||
k2 ^= ((long) key[offset + 10]) << 16;
|
||||
case 10:
|
||||
k2 ^= ((long) key[offset + 9]) << 8;
|
||||
case 9:
|
||||
k2 ^= ((long) key[offset + 8]) << 0;
|
||||
k2 *= c2;
|
||||
k2 = rotl64(k2, 33);
|
||||
k2 *= c1;
|
||||
h2 ^= k2;
|
||||
|
||||
case 8:
|
||||
k1 ^= ((long) key[offset + 7]) << 56;
|
||||
case 7:
|
||||
k1 ^= ((long) key[offset + 6]) << 48;
|
||||
case 6:
|
||||
k1 ^= ((long) key[offset + 5]) << 40;
|
||||
case 5:
|
||||
k1 ^= ((long) key[offset + 4]) << 32;
|
||||
case 4:
|
||||
k1 ^= ((long) key[offset + 3]) << 24;
|
||||
case 3:
|
||||
k1 ^= ((long) key[offset + 2]) << 16;
|
||||
case 2:
|
||||
k1 ^= ((long) key[offset + 1]) << 8;
|
||||
case 1:
|
||||
k1 ^= (key[offset]);
|
||||
k1 *= c1;
|
||||
k1 = rotl64(k1, 31);
|
||||
k1 *= c2;
|
||||
h1 ^= k1;
|
||||
}
|
||||
|
||||
//----------
|
||||
// finalization
|
||||
|
||||
h1 ^= length;
|
||||
h2 ^= length;
|
||||
|
||||
h1 += h2;
|
||||
h2 += h1;
|
||||
|
||||
h1 = fmix(h1);
|
||||
h2 = fmix(h2);
|
||||
|
||||
h1 += h2;
|
||||
h2 += h1;
|
||||
|
||||
//return (new long[]{h1, h2});
|
||||
// SAME AS GUAVA, they take the first long out of the 128bit
|
||||
return h1;
|
||||
}
|
||||
|
||||
// END: MURMUR 3_128
|
||||
}
|
|
@ -321,7 +321,7 @@ public final class AnalysisRegistry implements Closeable {
|
|||
if (currentSettings.get("tokenizer") != null) {
|
||||
factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings);
|
||||
} else {
|
||||
throw new IllegalArgumentException(toBuild + " [" + name + "] must have a type associated with it");
|
||||
throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer");
|
||||
}
|
||||
} else if (typeName.equals("custom")) {
|
||||
factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings);
|
||||
|
@ -335,7 +335,7 @@ public final class AnalysisRegistry implements Closeable {
|
|||
factories.put(name, factory);
|
||||
} else {
|
||||
if (typeName == null) {
|
||||
throw new IllegalArgumentException(toBuild + " [" + name + "] must have a type associated with it");
|
||||
throw new IllegalArgumentException(toBuild + " [" + name + "] must specify either an analyzer type, or a tokenizer");
|
||||
}
|
||||
AnalysisModule.AnalysisProvider<T> type = providerMap.get(typeName);
|
||||
if (type == null) {
|
||||
|
|
|
@ -33,13 +33,11 @@ import org.apache.lucene.analysis.util.CharArraySet;
|
|||
public final class FingerprintAnalyzer extends Analyzer {
|
||||
private final char separator;
|
||||
private final int maxOutputSize;
|
||||
private final boolean preserveOriginal;
|
||||
private final CharArraySet stopWords;
|
||||
|
||||
public FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize, boolean preserveOriginal) {
|
||||
public FingerprintAnalyzer(CharArraySet stopWords, char separator, int maxOutputSize) {
|
||||
this.separator = separator;
|
||||
this.maxOutputSize = maxOutputSize;
|
||||
this.preserveOriginal = preserveOriginal;
|
||||
this.stopWords = stopWords;
|
||||
}
|
||||
|
||||
|
@ -48,7 +46,7 @@ public final class FingerprintAnalyzer extends Analyzer {
|
|||
final Tokenizer tokenizer = new StandardTokenizer();
|
||||
TokenStream stream = tokenizer;
|
||||
stream = new LowerCaseFilter(stream);
|
||||
stream = new ASCIIFoldingFilter(stream, preserveOriginal);
|
||||
stream = new ASCIIFoldingFilter(stream, false);
|
||||
stream = new StopFilter(stream, stopWords);
|
||||
stream = new FingerprintFilter(stream, maxOutputSize, separator);
|
||||
return new TokenStreamComponents(tokenizer, stream);
|
||||
|
|
|
@ -34,10 +34,8 @@ import org.elasticsearch.index.IndexSettings;
|
|||
public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider<Analyzer> {
|
||||
|
||||
public static ParseField MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.MAX_OUTPUT_SIZE;
|
||||
public static ParseField PRESERVE_ORIGINAL = ASCIIFoldingTokenFilterFactory.PRESERVE_ORIGINAL;
|
||||
|
||||
public static int DEFAULT_MAX_OUTPUT_SIZE = FingerprintTokenFilterFactory.DEFAULT_MAX_OUTPUT_SIZE;
|
||||
public static boolean DEFAULT_PRESERVE_ORIGINAL = ASCIIFoldingTokenFilterFactory.DEFAULT_PRESERVE_ORIGINAL;
|
||||
public static CharArraySet DEFAULT_STOP_WORDS = CharArraySet.EMPTY_SET;
|
||||
|
||||
private final FingerprintAnalyzer analyzer;
|
||||
|
@ -47,10 +45,9 @@ public class FingerprintAnalyzerProvider extends AbstractIndexAnalyzerProvider<A
|
|||
|
||||
char separator = FingerprintTokenFilterFactory.parseSeparator(settings);
|
||||
int maxOutputSize = settings.getAsInt(MAX_OUTPUT_SIZE.getPreferredName(),DEFAULT_MAX_OUTPUT_SIZE);
|
||||
boolean preserveOriginal = settings.getAsBoolean(PRESERVE_ORIGINAL.getPreferredName(), DEFAULT_PRESERVE_ORIGINAL);
|
||||
CharArraySet stopWords = Analysis.parseStopWords(env, settings, DEFAULT_STOP_WORDS);
|
||||
|
||||
this.analyzer = new FingerprintAnalyzer(stopWords, separator, maxOutputSize, preserveOriginal);
|
||||
this.analyzer = new FingerprintAnalyzer(stopWords, separator, maxOutputSize);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -267,7 +267,7 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
|||
*/
|
||||
public MatchQueryBuilder prefixLength(int prefixLength) {
|
||||
if (prefixLength < 0 ) {
|
||||
throw new IllegalArgumentException("No negative prefix length allowed.");
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires prefix length to be non-negative.");
|
||||
}
|
||||
this.prefixLength = prefixLength;
|
||||
return this;
|
||||
|
@ -284,8 +284,8 @@ public class MatchQueryBuilder extends AbstractQueryBuilder<MatchQueryBuilder> {
|
|||
* When using fuzzy or prefix type query, the number of term expansions to use.
|
||||
*/
|
||||
public MatchQueryBuilder maxExpansions(int maxExpansions) {
|
||||
if (maxExpansions < 0 ) {
|
||||
throw new IllegalArgumentException("No negative maxExpansions allowed.");
|
||||
if (maxExpansions <= 0 ) {
|
||||
throw new IllegalArgumentException("[" + NAME + "] requires maxExpansions to be positive.");
|
||||
}
|
||||
this.maxExpansions = maxExpansions;
|
||||
return this;
|
||||
|
|
|
@ -576,16 +576,22 @@ public class IndexShard extends AbstractIndexShardComponent {
|
|||
long bytes = getEngine().getIndexBufferRAMBytesUsed();
|
||||
writingBytes.addAndGet(bytes);
|
||||
try {
|
||||
logger.debug("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes));
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("refresh with source [{}] indexBufferRAMBytesUsed [{}]", source, new ByteSizeValue(bytes));
|
||||
}
|
||||
long time = System.nanoTime();
|
||||
getEngine().refresh(source);
|
||||
refreshMetric.inc(System.nanoTime() - time);
|
||||
} finally {
|
||||
logger.debug("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId());
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("remove [{}] writing bytes for shard [{}]", new ByteSizeValue(bytes), shardId());
|
||||
}
|
||||
writingBytes.addAndGet(-bytes);
|
||||
}
|
||||
} else {
|
||||
logger.debug("refresh with source [{}]", source);
|
||||
if (logger.isTraceEnabled()) {
|
||||
logger.trace("refresh with source [{}]", source);
|
||||
}
|
||||
long time = System.nanoTime();
|
||||
getEngine().refresh(source);
|
||||
refreshMetric.inc(System.nanoTime() - time);
|
||||
|
|
|
@ -39,6 +39,7 @@ import java.nio.file.OpenOption;
|
|||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
|
||||
public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
||||
|
||||
|
@ -60,7 +61,8 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
private volatile long totalOffset;
|
||||
|
||||
protected final AtomicBoolean closed = new AtomicBoolean(false);
|
||||
|
||||
// lock order synchronized(syncLock) -> synchronized(this)
|
||||
private final Object syncLock = new Object();
|
||||
|
||||
public TranslogWriter(ShardId shardId, long generation, FileChannel channel, Path path, ByteSizeValue bufferSize) throws IOException {
|
||||
super(generation, channel, path, channel.position());
|
||||
|
@ -146,23 +148,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
* raising the exception.
|
||||
*/
|
||||
public void sync() throws IOException {
|
||||
if (syncNeeded()) {
|
||||
synchronized (this) {
|
||||
ensureOpen();
|
||||
final long offsetToSync;
|
||||
final int opsCounter;
|
||||
try {
|
||||
outputStream.flush();
|
||||
offsetToSync = totalOffset;
|
||||
opsCounter = operationCounter;
|
||||
checkpoint(offsetToSync, opsCounter, generation, channel, path);
|
||||
} catch (Throwable ex) {
|
||||
closeWithTragicEvent(ex);
|
||||
throw ex;
|
||||
}
|
||||
lastSyncedOffset = offsetToSync;
|
||||
}
|
||||
}
|
||||
syncUpTo(Long.MAX_VALUE);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -229,9 +215,38 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
* @return <code>true</code> if this call caused an actual sync operation
|
||||
*/
|
||||
public boolean syncUpTo(long offset) throws IOException {
|
||||
if (lastSyncedOffset < offset) {
|
||||
sync();
|
||||
return true;
|
||||
if (lastSyncedOffset < offset && syncNeeded()) {
|
||||
synchronized (syncLock) { // only one sync/checkpoint should happen concurrently but we wait
|
||||
if (lastSyncedOffset < offset && syncNeeded()) {
|
||||
// double checked locking - we don't want to fsync unless we have to and now that we have
|
||||
// the lock we should check again since if this code is busy we might have fsynced enough already
|
||||
final long offsetToSync;
|
||||
final int opsCounter;
|
||||
synchronized (this) {
|
||||
ensureOpen();
|
||||
try {
|
||||
outputStream.flush();
|
||||
offsetToSync = totalOffset;
|
||||
opsCounter = operationCounter;
|
||||
} catch (Throwable ex) {
|
||||
closeWithTragicEvent(ex);
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
// now do the actual fsync outside of the synchronized block such that
|
||||
// we can continue writing to the buffer etc.
|
||||
try {
|
||||
channel.force(false);
|
||||
writeCheckpoint(offsetToSync, opsCounter, path.getParent(), generation, StandardOpenOption.WRITE);
|
||||
} catch (Throwable ex) {
|
||||
closeWithTragicEvent(ex);
|
||||
throw ex;
|
||||
}
|
||||
assert lastSyncedOffset <= offsetToSync : "illegal state: " + lastSyncedOffset + " <= " + offsetToSync;
|
||||
lastSyncedOffset = offsetToSync; // write protected by syncLock
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@ -254,11 +269,6 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
Channels.readFromFileChannelWithEofException(channel, position, targetBuffer);
|
||||
}
|
||||
|
||||
private synchronized void checkpoint(long lastSyncPosition, int operationCounter, long generation, FileChannel translogFileChannel, Path translogFilePath) throws IOException {
|
||||
translogFileChannel.force(false);
|
||||
writeCheckpoint(lastSyncPosition, operationCounter, translogFilePath.getParent(), generation, StandardOpenOption.WRITE);
|
||||
}
|
||||
|
||||
private static void writeCheckpoint(long syncPosition, int numOperations, Path translogFile, long generation, OpenOption... options) throws IOException {
|
||||
final Path checkpointFile = translogFile.resolve(Translog.CHECKPOINT_FILE_NAME);
|
||||
Checkpoint checkpoint = new Checkpoint(syncPosition, numOperations, generation);
|
||||
|
@ -269,7 +279,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
|
||||
static final ChannelFactory DEFAULT = new ChannelFactory();
|
||||
|
||||
// only for testing until we have a disk-full FileSystemt
|
||||
// only for testing until we have a disk-full FileSystem
|
||||
public FileChannel open(Path file) throws IOException {
|
||||
return FileChannel.open(file, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW);
|
||||
}
|
||||
|
|
|
@ -396,12 +396,14 @@ public class FsInfo implements Iterable<FsInfo.Path>, Writeable, ToXContent {
|
|||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
builder.startObject("total");
|
||||
builder.field(OPERATIONS, totalOperations);
|
||||
builder.field(READ_OPERATIONS, totalReadOperations);
|
||||
builder.field(WRITE_OPERATIONS, totalWriteOperations);
|
||||
builder.field(READ_KILOBYTES, totalReadKilobytes);
|
||||
builder.field(WRITE_KILOBYTES, totalWriteKilobytes);
|
||||
builder.endObject();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.elasticsearch.monitor.jvm.JvmStats.GarbageCollector;
|
|||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
|
@ -45,6 +46,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
private final boolean enabled;
|
||||
private final TimeValue interval;
|
||||
private final Map<String, GcThreshold> gcThresholds;
|
||||
private final GcOverheadThreshold gcOverheadThreshold;
|
||||
|
||||
private volatile ScheduledFuture scheduledFuture;
|
||||
|
||||
|
@ -57,6 +59,27 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
private static String GC_COLLECTOR_PREFIX = "monitor.jvm.gc.collector.";
|
||||
public final static Setting<Settings> GC_SETTING = Setting.groupSetting(GC_COLLECTOR_PREFIX, Property.NodeScope);
|
||||
|
||||
public final static Setting<Integer> GC_OVERHEAD_WARN_SETTING =
|
||||
Setting.intSetting("monitor.jvm.gc.overhead.warn", 50, 0, 100, Property.NodeScope);
|
||||
public final static Setting<Integer> GC_OVERHEAD_INFO_SETTING =
|
||||
Setting.intSetting("monitor.jvm.gc.overhead.info", 25, 0, 100, Property.NodeScope);
|
||||
public final static Setting<Integer> GC_OVERHEAD_DEBUG_SETTING =
|
||||
Setting.intSetting("monitor.jvm.gc.overhead.debug", 10, 0, 100, Property.NodeScope);
|
||||
|
||||
static class GcOverheadThreshold {
|
||||
final int warnThreshold;
|
||||
final int infoThreshold;
|
||||
final int debugThreshold;
|
||||
|
||||
public GcOverheadThreshold(final int warnThreshold, final int infoThreshold, final int debugThreshold) {
|
||||
this.warnThreshold = warnThreshold;
|
||||
this.infoThreshold = infoThreshold;
|
||||
this.debugThreshold = debugThreshold;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
static class GcThreshold {
|
||||
public final String name;
|
||||
public final long warnThreshold;
|
||||
|
@ -102,7 +125,42 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
gcThresholds.putIfAbsent("default", new GcThreshold("default", 10000, 5000, 2000));
|
||||
this.gcThresholds = unmodifiableMap(gcThresholds);
|
||||
|
||||
logger.debug("enabled [{}], interval [{}], gc_threshold [{}]", enabled, interval, this.gcThresholds);
|
||||
if (GC_OVERHEAD_WARN_SETTING.get(settings) <= GC_OVERHEAD_INFO_SETTING.get(settings)) {
|
||||
final String message =
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"[%s] must be greater than [%s] [%d] but was [%d]",
|
||||
GC_OVERHEAD_WARN_SETTING.getKey(),
|
||||
GC_OVERHEAD_INFO_SETTING.getKey(),
|
||||
GC_OVERHEAD_INFO_SETTING.get(settings),
|
||||
GC_OVERHEAD_WARN_SETTING.get(settings));
|
||||
throw new IllegalArgumentException(message);
|
||||
}
|
||||
if (GC_OVERHEAD_INFO_SETTING.get(settings) <= GC_OVERHEAD_DEBUG_SETTING.get(settings)) {
|
||||
final String message =
|
||||
String.format(
|
||||
Locale.ROOT,
|
||||
"[%s] must be greater than [%s] [%d] but was [%d]",
|
||||
GC_OVERHEAD_INFO_SETTING.getKey(),
|
||||
GC_OVERHEAD_DEBUG_SETTING.getKey(),
|
||||
GC_OVERHEAD_DEBUG_SETTING.get(settings),
|
||||
GC_OVERHEAD_INFO_SETTING.get(settings));
|
||||
throw new IllegalArgumentException(message);
|
||||
}
|
||||
|
||||
this.gcOverheadThreshold = new GcOverheadThreshold(
|
||||
GC_OVERHEAD_WARN_SETTING.get(settings),
|
||||
GC_OVERHEAD_INFO_SETTING.get(settings),
|
||||
GC_OVERHEAD_DEBUG_SETTING.get(settings));
|
||||
|
||||
logger.debug(
|
||||
"enabled [{}], interval [{}], gc_threshold [{}], overhead [{}, {}, {}]",
|
||||
this.enabled,
|
||||
this.interval,
|
||||
this.gcThresholds,
|
||||
this.gcOverheadThreshold.warnThreshold,
|
||||
this.gcOverheadThreshold.infoThreshold,
|
||||
this.gcOverheadThreshold.debugThreshold);
|
||||
}
|
||||
|
||||
private static TimeValue getValidThreshold(Settings settings, String key, String level) {
|
||||
|
@ -120,15 +178,12 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
return GC_COLLECTOR_PREFIX + key + "." + level;
|
||||
}
|
||||
|
||||
private static final String LOG_MESSAGE =
|
||||
"[gc][{}][{}][{}] duration [{}], collections [{}]/[{}], total [{}]/[{}], memory [{}]->[{}]/[{}], all_pools {}";
|
||||
|
||||
@Override
|
||||
protected void doStart() {
|
||||
if (!enabled) {
|
||||
return;
|
||||
}
|
||||
scheduledFuture = threadPool.scheduleWithFixedDelay(new JvmMonitor(gcThresholds) {
|
||||
scheduledFuture = threadPool.scheduleWithFixedDelay(new JvmMonitor(gcThresholds, gcOverheadThreshold) {
|
||||
@Override
|
||||
void onMonitorFailure(Throwable t) {
|
||||
logger.debug("failed to monitor", t);
|
||||
|
@ -138,9 +193,17 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
void onSlowGc(final Threshold threshold, final long seq, final SlowGcEvent slowGcEvent) {
|
||||
logSlowGc(logger, threshold, seq, slowGcEvent, JvmGcMonitorService::buildPools);
|
||||
}
|
||||
|
||||
@Override
|
||||
void onGcOverhead(final Threshold threshold, final long current, final long elapsed, final long seq) {
|
||||
logGcOverhead(logger, threshold, current, elapsed, seq);
|
||||
}
|
||||
}, interval);
|
||||
}
|
||||
|
||||
private static final String SLOW_GC_LOG_MESSAGE =
|
||||
"[gc][{}][{}][{}] duration [{}], collections [{}]/[{}], total [{}]/[{}], memory [{}]->[{}]/[{}], all_pools {}";
|
||||
|
||||
static void logSlowGc(
|
||||
final ESLogger logger,
|
||||
final JvmMonitor.Threshold threshold,
|
||||
|
@ -162,7 +225,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
case WARN:
|
||||
if (logger.isWarnEnabled()) {
|
||||
logger.warn(
|
||||
LOG_MESSAGE,
|
||||
SLOW_GC_LOG_MESSAGE,
|
||||
name,
|
||||
seq,
|
||||
totalGcCollectionCount,
|
||||
|
@ -180,7 +243,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
case INFO:
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info(
|
||||
LOG_MESSAGE,
|
||||
SLOW_GC_LOG_MESSAGE,
|
||||
name,
|
||||
seq,
|
||||
totalGcCollectionCount,
|
||||
|
@ -198,7 +261,7 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
case DEBUG:
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(
|
||||
LOG_MESSAGE,
|
||||
SLOW_GC_LOG_MESSAGE,
|
||||
name,
|
||||
seq,
|
||||
totalGcCollectionCount,
|
||||
|
@ -239,6 +302,33 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
return sb.toString();
|
||||
}
|
||||
|
||||
private static final String OVERHEAD_LOG_MESSAGE = "[gc][{}] overhead, spent [{}] collecting in the last [{}]";
|
||||
|
||||
static void logGcOverhead(
|
||||
final ESLogger logger,
|
||||
final JvmMonitor.Threshold threshold,
|
||||
final long current,
|
||||
final long elapsed,
|
||||
final long seq) {
|
||||
switch (threshold) {
|
||||
case WARN:
|
||||
if (logger.isWarnEnabled()) {
|
||||
logger.warn(OVERHEAD_LOG_MESSAGE, seq, TimeValue.timeValueMillis(current), TimeValue.timeValueMillis(elapsed));
|
||||
}
|
||||
break;
|
||||
case INFO:
|
||||
if (logger.isInfoEnabled()) {
|
||||
logger.info(OVERHEAD_LOG_MESSAGE, seq, TimeValue.timeValueMillis(current), TimeValue.timeValueMillis(elapsed));
|
||||
}
|
||||
break;
|
||||
case DEBUG:
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug(OVERHEAD_LOG_MESSAGE, seq, TimeValue.timeValueMillis(current), TimeValue.timeValueMillis(elapsed));
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doStop() {
|
||||
if (!enabled) {
|
||||
|
@ -287,16 +377,18 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
private long lastTime = now();
|
||||
private JvmStats lastJvmStats = jvmStats();
|
||||
private long seq = 0;
|
||||
private final Map<String, GcThreshold> gcThresholds;
|
||||
private final Map<String, JvmGcMonitorService.GcThreshold> gcThresholds;
|
||||
final GcOverheadThreshold gcOverheadThreshold;
|
||||
|
||||
public JvmMonitor(Map<String, GcThreshold> gcThresholds) {
|
||||
public JvmMonitor(final Map<String, GcThreshold> gcThresholds, final GcOverheadThreshold gcOverheadThreshold) {
|
||||
this.gcThresholds = Objects.requireNonNull(gcThresholds);
|
||||
this.gcOverheadThreshold = Objects.requireNonNull(gcOverheadThreshold);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
monitorLongGc();
|
||||
monitorGc();
|
||||
} catch (Throwable t) {
|
||||
onMonitorFailure(t);
|
||||
}
|
||||
|
@ -304,12 +396,21 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
|
||||
abstract void onMonitorFailure(Throwable t);
|
||||
|
||||
synchronized void monitorLongGc() {
|
||||
synchronized void monitorGc() {
|
||||
seq++;
|
||||
final long currentTime = now();
|
||||
JvmStats currentJvmStats = jvmStats();
|
||||
|
||||
final long elapsed = TimeUnit.NANOSECONDS.toMillis(currentTime - lastTime);
|
||||
|
||||
monitorSlowGc(currentJvmStats, elapsed);
|
||||
monitorGcOverhead(currentJvmStats, elapsed);
|
||||
|
||||
lastTime = currentTime;
|
||||
lastJvmStats = currentJvmStats;
|
||||
}
|
||||
|
||||
final void monitorSlowGc(JvmStats currentJvmStats, long elapsed) {
|
||||
for (int i = 0; i < currentJvmStats.getGc().getCollectors().length; i++) {
|
||||
GarbageCollector gc = currentJvmStats.getGc().getCollectors()[i];
|
||||
GarbageCollector prevGc = lastJvmStats.getGc().getCollectors()[i];
|
||||
|
@ -350,8 +451,31 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
JvmInfo.jvmInfo().getMem().getHeapMax()));
|
||||
}
|
||||
}
|
||||
lastTime = currentTime;
|
||||
lastJvmStats = currentJvmStats;
|
||||
}
|
||||
|
||||
final void monitorGcOverhead(final JvmStats currentJvmStats, final long elapsed) {
|
||||
long current = 0;
|
||||
for (int i = 0; i < currentJvmStats.getGc().getCollectors().length; i++) {
|
||||
GarbageCollector gc = currentJvmStats.getGc().getCollectors()[i];
|
||||
GarbageCollector prevGc = lastJvmStats.getGc().getCollectors()[i];
|
||||
current += gc.getCollectionTime().millis() - prevGc.getCollectionTime().millis();
|
||||
}
|
||||
checkGcOverhead(current, elapsed, seq);
|
||||
}
|
||||
|
||||
void checkGcOverhead(final long current, final long elapsed, final long seq) {
|
||||
final int fraction = (int) ((100 * current) / (double) elapsed);
|
||||
Threshold overheadThreshold = null;
|
||||
if (fraction >= gcOverheadThreshold.warnThreshold) {
|
||||
overheadThreshold = Threshold.WARN;
|
||||
} else if (fraction >= gcOverheadThreshold.infoThreshold) {
|
||||
overheadThreshold = Threshold.INFO;
|
||||
} else if (fraction >= gcOverheadThreshold.debugThreshold) {
|
||||
overheadThreshold = Threshold.DEBUG;
|
||||
}
|
||||
if (overheadThreshold != null) {
|
||||
onGcOverhead(overheadThreshold, current, elapsed, seq);
|
||||
}
|
||||
}
|
||||
|
||||
JvmStats jvmStats() {
|
||||
|
@ -364,6 +488,8 @@ public class JvmGcMonitorService extends AbstractLifecycleComponent<JvmGcMonitor
|
|||
|
||||
abstract void onSlowGc(final Threshold threshold, final long seq, final SlowGcEvent slowGcEvent);
|
||||
|
||||
abstract void onGcOverhead(final Threshold threshold, final long total, final long elapsed, final long seq);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -19,14 +19,11 @@
|
|||
|
||||
package org.elasticsearch.node.internal;
|
||||
|
||||
import org.elasticsearch.bootstrap.BootstrapInfo;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cluster.ClusterName;
|
||||
import org.elasticsearch.common.Randomness;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsException;
|
||||
import org.elasticsearch.env.Environment;
|
||||
|
@ -39,10 +36,13 @@ import java.nio.charset.StandardCharsets;
|
|||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
import static org.elasticsearch.common.Strings.cleanPath;
|
||||
|
||||
|
@ -52,20 +52,18 @@ import static org.elasticsearch.common.Strings.cleanPath;
|
|||
public class InternalSettingsPreparer {
|
||||
|
||||
private static final String[] ALLOWED_SUFFIXES = {".yml", ".yaml", ".json", ".properties"};
|
||||
static final String PROPERTY_PREFIX = "es.";
|
||||
static final String PROPERTY_DEFAULTS_PREFIX = "es.default.";
|
||||
static final String PROPERTY_DEFAULTS_PREFIX = "default.";
|
||||
static final Predicate<String> PROPERTY_DEFAULTS_PREDICATE = key -> key.startsWith(PROPERTY_DEFAULTS_PREFIX);
|
||||
|
||||
public static final String SECRET_PROMPT_VALUE = "${prompt.secret}";
|
||||
public static final String TEXT_PROMPT_VALUE = "${prompt.text}";
|
||||
public static final Setting<Boolean> IGNORE_SYSTEM_PROPERTIES_SETTING =
|
||||
Setting.boolSetting("config.ignore_system_properties", false, Property.NodeScope);
|
||||
|
||||
/**
|
||||
* Prepares the settings by gathering all elasticsearch system properties and setting defaults.
|
||||
*/
|
||||
public static Settings prepareSettings(Settings input) {
|
||||
Settings.Builder output = Settings.builder();
|
||||
initializeSettings(output, input, true);
|
||||
initializeSettings(output, input, true, Collections.emptyMap());
|
||||
finalizeSettings(output, null, null);
|
||||
return output.build();
|
||||
}
|
||||
|
@ -80,9 +78,23 @@ public class InternalSettingsPreparer {
|
|||
* @return the {@link Settings} and {@link Environment} as a {@link Tuple}
|
||||
*/
|
||||
public static Environment prepareEnvironment(Settings input, Terminal terminal) {
|
||||
return prepareEnvironment(input, terminal, Collections.emptyMap());
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepares the settings by gathering all elasticsearch system properties, optionally loading the configuration settings,
|
||||
* and then replacing all property placeholders. If a {@link Terminal} is provided and configuration settings are loaded,
|
||||
* settings with a value of <code>${prompt.text}</code> or <code>${prompt.secret}</code> will result in a prompt for
|
||||
* the setting to the user.
|
||||
* @param input The custom settings to use. These are not overwritten by settings in the configuration file.
|
||||
* @param terminal the Terminal to use for input/output
|
||||
* @param properties Map of properties key/value pairs (usually from the command-line)
|
||||
* @return the {@link Settings} and {@link Environment} as a {@link Tuple}
|
||||
*/
|
||||
public static Environment prepareEnvironment(Settings input, Terminal terminal, Map<String, String> properties) {
|
||||
// just create enough settings to build the environment, to get the config dir
|
||||
Settings.Builder output = Settings.builder();
|
||||
initializeSettings(output, input, true);
|
||||
initializeSettings(output, input, true, properties);
|
||||
Environment environment = new Environment(output.build());
|
||||
|
||||
boolean settingsFileFound = false;
|
||||
|
@ -103,7 +115,7 @@ public class InternalSettingsPreparer {
|
|||
|
||||
// re-initialize settings now that the config file has been loaded
|
||||
// TODO: only re-initialize if a config file was actually loaded
|
||||
initializeSettings(output, input, false);
|
||||
initializeSettings(output, input, false, properties);
|
||||
finalizeSettings(output, terminal, environment.configFile());
|
||||
|
||||
environment = new Environment(output.build());
|
||||
|
@ -113,22 +125,16 @@ public class InternalSettingsPreparer {
|
|||
return new Environment(output.build());
|
||||
}
|
||||
|
||||
private static boolean useSystemProperties(Settings input) {
|
||||
return !IGNORE_SYSTEM_PROPERTIES_SETTING.get(input);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the builder with the given input settings, and loads system properties settings if allowed.
|
||||
* If loadDefaults is true, system property default settings are loaded.
|
||||
*/
|
||||
private static void initializeSettings(Settings.Builder output, Settings input, boolean loadDefaults) {
|
||||
private static void initializeSettings(Settings.Builder output, Settings input, boolean loadDefaults, Map<String, String> esSettings) {
|
||||
output.put(input);
|
||||
if (useSystemProperties(input)) {
|
||||
if (loadDefaults) {
|
||||
output.putProperties(PROPERTY_DEFAULTS_PREFIX, BootstrapInfo.getSystemProperties());
|
||||
}
|
||||
output.putProperties(PROPERTY_PREFIX, BootstrapInfo.getSystemProperties(), PROPERTY_DEFAULTS_PREFIX);
|
||||
if (loadDefaults) {
|
||||
output.putProperties(esSettings, PROPERTY_DEFAULTS_PREDICATE, key -> key.substring(PROPERTY_DEFAULTS_PREFIX.length()));
|
||||
}
|
||||
output.putProperties(esSettings, PROPERTY_DEFAULTS_PREDICATE.negate(), Function.identity());
|
||||
output.replacePropertyPlaceholders();
|
||||
}
|
||||
|
||||
|
|
|
@ -27,11 +27,14 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.bootstrap.JarHell;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.SettingCommand;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.common.hash.MessageDigests;
|
||||
import org.elasticsearch.common.io.FileSystemUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
|
@ -56,6 +59,7 @@ import java.util.HashSet;
|
|||
import java.util.LinkedHashSet;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.zip.ZipEntry;
|
||||
|
@ -95,7 +99,7 @@ import static org.elasticsearch.common.util.set.Sets.newHashSet;
|
|||
* elasticsearch config directory, using the name of the plugin. If any files to be installed
|
||||
* already exist, they will be skipped.
|
||||
*/
|
||||
class InstallPluginCommand extends Command {
|
||||
class InstallPluginCommand extends SettingCommand {
|
||||
|
||||
private static final String PROPERTY_SUPPORT_STAGING_URLS = "es.plugins.staging";
|
||||
|
||||
|
@ -126,12 +130,12 @@ class InstallPluginCommand extends Command {
|
|||
"mapper-murmur3",
|
||||
"mapper-size",
|
||||
"repository-azure",
|
||||
"repository-gcs",
|
||||
"repository-hdfs",
|
||||
"repository-s3",
|
||||
"store-smb",
|
||||
"x-pack")));
|
||||
|
||||
private final Environment env;
|
||||
private final OptionSpec<Void> batchOption;
|
||||
private final OptionSpec<String> arguments;
|
||||
|
||||
|
@ -159,9 +163,8 @@ class InstallPluginCommand extends Command {
|
|||
FILE_PERMS = Collections.unmodifiableSet(filePerms);
|
||||
}
|
||||
|
||||
InstallPluginCommand(Environment env) {
|
||||
InstallPluginCommand() {
|
||||
super("Install a plugin");
|
||||
this.env = env;
|
||||
this.batchOption = parser.acceptsAll(Arrays.asList("b", "batch"),
|
||||
"Enable batch mode explicitly, automatic confirmation of security permission");
|
||||
this.arguments = parser.nonOptions("plugin id");
|
||||
|
@ -177,7 +180,7 @@ class InstallPluginCommand extends Command {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected void execute(Terminal terminal, OptionSet options) throws Exception {
|
||||
protected void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception {
|
||||
// TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args
|
||||
List<String> args = arguments.values(options);
|
||||
if (args.size() != 1) {
|
||||
|
@ -185,12 +188,12 @@ class InstallPluginCommand extends Command {
|
|||
}
|
||||
String pluginId = args.get(0);
|
||||
boolean isBatch = options.has(batchOption) || System.console() == null;
|
||||
execute(terminal, pluginId, isBatch);
|
||||
execute(terminal, pluginId, isBatch, settings);
|
||||
}
|
||||
|
||||
// pkg private for testing
|
||||
void execute(Terminal terminal, String pluginId, boolean isBatch) throws Exception {
|
||||
|
||||
void execute(Terminal terminal, String pluginId, boolean isBatch, Map<String, String> settings) throws Exception {
|
||||
final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings);
|
||||
// TODO: remove this leniency!! is it needed anymore?
|
||||
if (Files.exists(env.pluginsFile()) == false) {
|
||||
terminal.println("Plugins directory [" + env.pluginsFile() + "] does not exist. Creating...");
|
||||
|
@ -199,7 +202,7 @@ class InstallPluginCommand extends Command {
|
|||
|
||||
Path pluginZip = download(terminal, pluginId, env.tmpFile());
|
||||
Path extractedZip = unzip(pluginZip, env.pluginsFile());
|
||||
install(terminal, isBatch, extractedZip);
|
||||
install(terminal, isBatch, extractedZip, env);
|
||||
}
|
||||
|
||||
/** Downloads the plugin and returns the file it was downloaded to. */
|
||||
|
@ -348,7 +351,7 @@ class InstallPluginCommand extends Command {
|
|||
}
|
||||
|
||||
/** Load information about the plugin, and verify it can be installed with no errors. */
|
||||
private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch) throws Exception {
|
||||
private PluginInfo verify(Terminal terminal, Path pluginRoot, boolean isBatch, Environment env) throws Exception {
|
||||
// read and validate the plugin descriptor
|
||||
PluginInfo info = PluginInfo.readFromProperties(pluginRoot);
|
||||
terminal.println(VERBOSE, info.toString());
|
||||
|
@ -397,12 +400,12 @@ class InstallPluginCommand extends Command {
|
|||
* Installs the plugin from {@code tmpRoot} into the plugins dir.
|
||||
* If the plugin has a bin dir and/or a config dir, those are copied.
|
||||
*/
|
||||
private void install(Terminal terminal, boolean isBatch, Path tmpRoot) throws Exception {
|
||||
private void install(Terminal terminal, boolean isBatch, Path tmpRoot, Environment env) throws Exception {
|
||||
List<Path> deleteOnFailure = new ArrayList<>();
|
||||
deleteOnFailure.add(tmpRoot);
|
||||
|
||||
try {
|
||||
PluginInfo info = verify(terminal, tmpRoot, isBatch);
|
||||
PluginInfo info = verify(terminal, tmpRoot, isBatch, env);
|
||||
|
||||
final Path destination = env.pluginsFile().resolve(info.getName());
|
||||
if (Files.exists(destination)) {
|
||||
|
|
|
@ -19,6 +19,13 @@
|
|||
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import joptsimple.OptionSet;
|
||||
import org.elasticsearch.cli.SettingCommand;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
|
@ -26,26 +33,20 @@ import java.nio.file.Path;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import joptsimple.OptionSet;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A command for the plugin cli to list plugins installed in elasticsearch.
|
||||
*/
|
||||
class ListPluginsCommand extends Command {
|
||||
class ListPluginsCommand extends SettingCommand {
|
||||
|
||||
private final Environment env;
|
||||
|
||||
ListPluginsCommand(Environment env) {
|
||||
ListPluginsCommand() {
|
||||
super("Lists installed elasticsearch plugins");
|
||||
this.env = env;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(Terminal terminal, OptionSet options) throws Exception {
|
||||
protected void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception {
|
||||
final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings);
|
||||
if (Files.exists(env.pluginsFile()) == false) {
|
||||
throw new IOException("Plugins directory missing: " + env.pluginsFile());
|
||||
}
|
||||
|
|
|
@ -26,21 +26,24 @@ import org.elasticsearch.common.settings.Settings;
|
|||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
/**
|
||||
* A cli tool for adding, removing and listing plugins for elasticsearch.
|
||||
*/
|
||||
public class PluginCli extends MultiCommand {
|
||||
|
||||
public PluginCli(Environment env) {
|
||||
public PluginCli() {
|
||||
super("A tool for managing installed elasticsearch plugins");
|
||||
subcommands.put("list", new ListPluginsCommand(env));
|
||||
subcommands.put("install", new InstallPluginCommand(env));
|
||||
subcommands.put("remove", new RemovePluginCommand(env));
|
||||
subcommands.put("list", new ListPluginsCommand());
|
||||
subcommands.put("install", new InstallPluginCommand());
|
||||
subcommands.put("remove", new RemovePluginCommand());
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
// initialize default for es.logger.level because we will not read the logging.yml
|
||||
String loggerLevel = System.getProperty("es.logger.level", "INFO");
|
||||
String pathHome = System.getProperty("es.path.home");
|
||||
// Set the appender for all potential log files to terminal so that other components that use the logger print out the
|
||||
// same terminal.
|
||||
// The reason for this is that the plugin cli cannot be configured with a file appender because when the plugin command is
|
||||
|
@ -48,12 +51,14 @@ public class PluginCli extends MultiCommand {
|
|||
// is run as service then the logs should be at /var/log/elasticsearch but when started from the tar they should be at es.home/logs.
|
||||
// Therefore we print to Terminal.
|
||||
Environment loggingEnvironment = InternalSettingsPreparer.prepareEnvironment(Settings.builder()
|
||||
.put("path.home", pathHome)
|
||||
.put("appender.terminal.type", "terminal")
|
||||
.put("rootLogger", "${es.logger.level}, terminal")
|
||||
.put("es.logger.level", loggerLevel)
|
||||
.put("rootLogger", "${logger.level}, terminal")
|
||||
.put("logger.level", loggerLevel)
|
||||
.build(), Terminal.DEFAULT);
|
||||
LogConfigurator.configure(loggingEnvironment.settings(), false);
|
||||
Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, Terminal.DEFAULT);
|
||||
exit(new PluginCli(env).main(args, Terminal.DEFAULT));
|
||||
|
||||
exit(new PluginCli().main(args, Terminal.DEFAULT));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -24,45 +24,49 @@ import java.nio.file.Path;
|
|||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import joptsimple.OptionSet;
|
||||
import joptsimple.OptionSpec;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.cli.Command;
|
||||
import org.elasticsearch.cli.ExitCodes;
|
||||
import org.elasticsearch.cli.SettingCommand;
|
||||
import org.elasticsearch.cli.UserError;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
||||
import static org.elasticsearch.cli.Terminal.Verbosity.VERBOSE;
|
||||
|
||||
/**
|
||||
* A command for the plugin cli to remove a plugin from elasticsearch.
|
||||
*/
|
||||
class RemovePluginCommand extends Command {
|
||||
class RemovePluginCommand extends SettingCommand {
|
||||
|
||||
private final Environment env;
|
||||
private final OptionSpec<String> arguments;
|
||||
|
||||
RemovePluginCommand(Environment env) {
|
||||
RemovePluginCommand() {
|
||||
super("Removes a plugin from elasticsearch");
|
||||
this.env = env;
|
||||
this.arguments = parser.nonOptions("plugin name");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(Terminal terminal, OptionSet options) throws Exception {
|
||||
protected void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception {
|
||||
// TODO: in jopt-simple 5.0 we can enforce a min/max number of positional args
|
||||
List<String> args = arguments.values(options);
|
||||
if (args.size() != 1) {
|
||||
throw new UserError(ExitCodes.USAGE, "Must supply a single plugin id argument");
|
||||
}
|
||||
execute(terminal, args.get(0));
|
||||
execute(terminal, args.get(0), settings);
|
||||
}
|
||||
|
||||
// pkg private for testing
|
||||
void execute(Terminal terminal, String pluginName) throws Exception {
|
||||
void execute(Terminal terminal, String pluginName, Map<String, String> settings) throws Exception {
|
||||
final Environment env = InternalSettingsPreparer.prepareEnvironment(Settings.EMPTY, terminal, settings);
|
||||
|
||||
terminal.println("-> Removing " + Strings.coalesceToEmpty(pluginName) + "...");
|
||||
|
||||
Path pluginDir = env.pluginsFile().resolve(pluginName);
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.NamedWriteable;
|
|||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ParseFieldRegistry;
|
||||
import org.elasticsearch.index.percolator.PercolatorHighlightSubFetchPhase;
|
||||
|
@ -97,51 +98,51 @@ import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
|||
import org.elasticsearch.search.action.SearchTransportService;
|
||||
import org.elasticsearch.search.aggregations.AggregationPhase;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.InternalChildren;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.InternalFilters;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGridParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.InternalNested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.InternalReverseNested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.InternalRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.InternalDateRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.InternalGeoDistance;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.InternalBinaryRange;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedSamplerParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.InternalSampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.UnmappedSampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantLongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantStringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.UnmappedSignificantTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare;
|
||||
|
@ -155,50 +156,50 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi
|
|||
import org.elasticsearch.search.aggregations.bucket.terms.DoubleTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.LongTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.UnmappedTerms;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.InternalGeoBounds;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.InternalGeoCentroid;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.InternalMin;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.hdr.InternalHDRPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.tdigest.InternalTDigestPercentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMetric;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.InternalStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.InternalExtendedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.InternalSum;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumParser;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.InternalTopHits;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountParser;
|
||||
import org.elasticsearch.search.aggregations.pipeline.InternalSimpleValue;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
@ -290,6 +291,8 @@ public class SearchModule extends AbstractModule {
|
|||
|
||||
private final Settings settings;
|
||||
private final NamedWriteableRegistry namedWriteableRegistry;
|
||||
public static final Setting<Integer> INDICES_MAX_CLAUSE_COUNT_SETTING = Setting.intSetting("indices.query.bool.max_clause_count",
|
||||
1024, 1, Integer.MAX_VALUE, Setting.Property.NodeScope);
|
||||
|
||||
// pkg private so tests can mock
|
||||
Class<? extends SearchService> searchServiceImpl = SearchService.class;
|
||||
|
@ -421,10 +424,10 @@ public class SearchModule extends AbstractModule {
|
|||
* @param aggregationName names by which the aggregation may be parsed. The first name is special because it is the name that the reader
|
||||
* is registered under.
|
||||
*/
|
||||
public <AB extends AggregatorBuilder<AB>> void registerAggregation(Writeable.Reader<AB> reader, Aggregator.Parser aggregationParser,
|
||||
ParseField aggregationName) {
|
||||
public <AB extends AggregationBuilder<AB>> void registerAggregation(Writeable.Reader<AB> reader, Aggregator.Parser aggregationParser,
|
||||
ParseField aggregationName) {
|
||||
aggregationParserRegistry.register(aggregationParser, aggregationName);
|
||||
namedWriteableRegistry.register(AggregatorBuilder.class, aggregationName.getPreferredName(), reader);
|
||||
namedWriteableRegistry.register(AggregationBuilder.class, aggregationName.getPreferredName(), reader);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -478,55 +481,57 @@ public class SearchModule extends AbstractModule {
|
|||
}
|
||||
|
||||
protected void configureAggs() {
|
||||
registerAggregation(AvgAggregatorBuilder::new, new AvgParser(), AvgAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SumAggregatorBuilder::new, new SumParser(), SumAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MinAggregatorBuilder::new, new MinParser(), MinAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MaxAggregatorBuilder::new, new MaxParser(), MaxAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(StatsAggregatorBuilder::new, new StatsParser(), StatsAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ExtendedStatsAggregatorBuilder::new, new ExtendedStatsParser(),
|
||||
ExtendedStatsAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ValueCountAggregatorBuilder::new, new ValueCountParser(), ValueCountAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(PercentilesAggregatorBuilder::new, new PercentilesParser(),
|
||||
PercentilesAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(PercentileRanksAggregatorBuilder::new, new PercentileRanksParser(),
|
||||
PercentileRanksAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(CardinalityAggregatorBuilder::new, new CardinalityParser(),
|
||||
CardinalityAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GlobalAggregatorBuilder::new, GlobalAggregatorBuilder::parse, GlobalAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MissingAggregatorBuilder::new, new MissingParser(), MissingAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(FilterAggregatorBuilder::new, FilterAggregatorBuilder::parse, FilterAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(FiltersAggregatorBuilder::new, FiltersAggregatorBuilder::parse,
|
||||
FiltersAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SamplerAggregatorBuilder::new, SamplerAggregatorBuilder::parse,
|
||||
SamplerAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DiversifiedAggregatorBuilder::new, new DiversifiedSamplerParser(),
|
||||
DiversifiedAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(TermsAggregatorBuilder::new, new TermsParser(), TermsAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SignificantTermsAggregatorBuilder::new,
|
||||
registerAggregation(AvgAggregationBuilder::new, new AvgParser(), AvgAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SumAggregationBuilder::new, new SumParser(), SumAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MinAggregationBuilder::new, new MinParser(), MinAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MaxAggregationBuilder::new, new MaxParser(), MaxAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(StatsAggregationBuilder::new, new StatsParser(), StatsAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ExtendedStatsAggregationBuilder::new, new ExtendedStatsParser(),
|
||||
ExtendedStatsAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ValueCountAggregationBuilder::new, new ValueCountParser(), ValueCountAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(PercentilesAggregationBuilder::new, new PercentilesParser(),
|
||||
PercentilesAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(PercentileRanksAggregationBuilder::new, new PercentileRanksParser(),
|
||||
PercentileRanksAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(CardinalityAggregationBuilder::new, new CardinalityParser(),
|
||||
CardinalityAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GlobalAggregationBuilder::new, GlobalAggregationBuilder::parse,
|
||||
GlobalAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(MissingAggregationBuilder::new, new MissingParser(), MissingAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(FilterAggregationBuilder::new, FilterAggregationBuilder::parse,
|
||||
FilterAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(FiltersAggregationBuilder::new, FiltersAggregationBuilder::parse,
|
||||
FiltersAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SamplerAggregationBuilder::new, SamplerAggregationBuilder::parse,
|
||||
SamplerAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DiversifiedAggregationBuilder::new, new DiversifiedSamplerParser(),
|
||||
DiversifiedAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(TermsAggregationBuilder::new, new TermsParser(), TermsAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(SignificantTermsAggregationBuilder::new,
|
||||
new SignificantTermsParser(significanceHeuristicParserRegistry, queryParserRegistry),
|
||||
SignificantTermsAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(RangeAggregatorBuilder::new, new RangeParser(), RangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DateRangeAggregatorBuilder::new, new DateRangeParser(), DateRangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(IpRangeAggregatorBuilder::new, new IpRangeParser(), IpRangeAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(HistogramAggregatorBuilder::new, new HistogramParser(), HistogramAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DateHistogramAggregatorBuilder::new, new DateHistogramParser(),
|
||||
DateHistogramAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoDistanceAggregatorBuilder::new, new GeoDistanceParser(),
|
||||
GeoDistanceAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoGridAggregatorBuilder::new, new GeoHashGridParser(), GeoGridAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(NestedAggregatorBuilder::new, NestedAggregatorBuilder::parse, NestedAggregatorBuilder.AGGREGATION_FIELD_NAME);
|
||||
registerAggregation(ReverseNestedAggregatorBuilder::new, ReverseNestedAggregatorBuilder::parse,
|
||||
ReverseNestedAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(TopHitsAggregatorBuilder::new, TopHitsAggregatorBuilder::parse,
|
||||
TopHitsAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoBoundsAggregatorBuilder::new, new GeoBoundsParser(), GeoBoundsAggregatorBuilder.AGGREGATION_NAME_FIED);
|
||||
registerAggregation(GeoCentroidAggregatorBuilder::new, new GeoCentroidParser(),
|
||||
GeoCentroidAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ScriptedMetricAggregatorBuilder::new, ScriptedMetricAggregatorBuilder::parse,
|
||||
ScriptedMetricAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ChildrenAggregatorBuilder::new, ChildrenAggregatorBuilder::parse,
|
||||
ChildrenAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
|
||||
SignificantTermsAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(RangeAggregationBuilder::new, new RangeParser(), RangeAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DateRangeAggregationBuilder::new, new DateRangeParser(), DateRangeAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(IpRangeAggregationBuilder::new, new IpRangeParser(), IpRangeAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(HistogramAggregationBuilder::new, new HistogramParser(), HistogramAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(DateHistogramAggregationBuilder::new, new DateHistogramParser(),
|
||||
DateHistogramAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoDistanceAggregationBuilder::new, new GeoDistanceParser(),
|
||||
GeoDistanceAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoGridAggregationBuilder::new, new GeoHashGridParser(), GeoGridAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(NestedAggregationBuilder::new, NestedAggregationBuilder::parse,
|
||||
NestedAggregationBuilder.AGGREGATION_FIELD_NAME);
|
||||
registerAggregation(ReverseNestedAggregationBuilder::new, ReverseNestedAggregationBuilder::parse,
|
||||
ReverseNestedAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(TopHitsAggregationBuilder::new, TopHitsAggregationBuilder::parse,
|
||||
TopHitsAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(GeoBoundsAggregationBuilder::new, new GeoBoundsParser(), GeoBoundsAggregationBuilder.AGGREGATION_NAME_FIED);
|
||||
registerAggregation(GeoCentroidAggregationBuilder::new, new GeoCentroidParser(),
|
||||
GeoCentroidAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ScriptedMetricAggregationBuilder::new, ScriptedMetricAggregationBuilder::parse,
|
||||
ScriptedMetricAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerAggregation(ChildrenAggregationBuilder::new, ChildrenAggregationBuilder::parse,
|
||||
ChildrenAggregationBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerPipelineAggregation(DerivativePipelineAggregatorBuilder::new, DerivativePipelineAggregatorBuilder::parse,
|
||||
DerivativePipelineAggregatorBuilder.AGGREGATION_NAME_FIELD);
|
||||
registerPipelineAggregation(MaxBucketPipelineAggregatorBuilder::new, MaxBucketPipelineAggregatorBuilder.PARSER,
|
||||
|
@ -650,8 +655,7 @@ public class SearchModule extends AbstractModule {
|
|||
registerQuery(MatchAllQueryBuilder::new, MatchAllQueryBuilder::fromXContent, MatchAllQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(QueryStringQueryBuilder::new, QueryStringQueryBuilder::fromXContent, QueryStringQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(BoostingQueryBuilder::new, BoostingQueryBuilder::fromXContent, BoostingQueryBuilder.QUERY_NAME_FIELD);
|
||||
BooleanQuery.setMaxClauseCount(settings.getAsInt("index.query.bool.max_clause_count",
|
||||
settings.getAsInt("indices.query.bool.max_clause_count", BooleanQuery.getMaxClauseCount())));
|
||||
BooleanQuery.setMaxClauseCount(INDICES_MAX_CLAUSE_COUNT_SETTING.get(settings));
|
||||
registerQuery(BoolQueryBuilder::new, BoolQueryBuilder::fromXContent, BoolQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(TermQueryBuilder::new, TermQueryBuilder::fromXContent, TermQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(TermsQueryBuilder::new, TermsQueryBuilder::fromXContent, TermsQueryBuilder.QUERY_NAME_FIELD);
|
||||
|
|
|
@ -36,7 +36,9 @@ import java.util.Objects;
|
|||
/**
|
||||
* A factory that knows how to create an {@link Aggregator} of a specific type.
|
||||
*/
|
||||
public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extends ToXContentToBytes implements NamedWriteable, ToXContent {
|
||||
public abstract class AggregationBuilder<AB extends AggregationBuilder<AB>>
|
||||
extends ToXContentToBytes
|
||||
implements NamedWriteable, ToXContent {
|
||||
|
||||
protected String name;
|
||||
protected Type type;
|
||||
|
@ -44,12 +46,12 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
|
|||
protected Map<String, Object> metaData;
|
||||
|
||||
/**
|
||||
* Constructs a new aggregator factory.
|
||||
* Constructs a new aggregation builder.
|
||||
*
|
||||
* @param name The aggregation name
|
||||
* @param type The aggregation type
|
||||
*/
|
||||
public AggregatorBuilder(String name, Type type) {
|
||||
public AggregationBuilder(String name, Type type) {
|
||||
if (name == null) {
|
||||
throw new IllegalArgumentException("[name] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -63,7 +65,7 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
protected AggregatorBuilder(StreamInput in, Type type) throws IOException {
|
||||
protected AggregationBuilder(StreamInput in, Type type) throws IOException {
|
||||
name = in.readString();
|
||||
this.type = type;
|
||||
factoriesBuilder = new AggregatorFactories.Builder(in);
|
||||
|
@ -84,7 +86,7 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
|
|||
* Add a sub aggregation to this aggregation.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public AB subAggregation(AggregatorBuilder<?> aggregation) {
|
||||
public AB subAggregation(AggregationBuilder<?> aggregation) {
|
||||
if (aggregation == null) {
|
||||
throw new IllegalArgumentException("[aggregation] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -178,7 +180,7 @@ public abstract class AggregatorBuilder<AB extends AggregatorBuilder<AB>> extend
|
|||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
@SuppressWarnings("unchecked")
|
||||
AggregatorBuilder<AB> other = (AggregatorBuilder<AB>) obj;
|
||||
AggregationBuilder<AB> other = (AggregationBuilder<AB>) obj;
|
||||
if (!Objects.equals(name, other.name))
|
||||
return false;
|
||||
if (!Objects.equals(type, other.type))
|
|
@ -22,65 +22,65 @@ import org.elasticsearch.common.geo.GeoDistance;
|
|||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.Children;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.children.ChildrenAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.FilterAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.Filters;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.Global;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.Missing;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.Nested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNested;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.nested.ReverseNestedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.Range;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.date.DateRangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.Sampler;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.significant.SignificantTermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.Avg;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.Cardinality;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.cardinality.CardinalityAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBounds;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geobounds.GeoBoundsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroid;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.geocentroid.GeoCentroidAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.Max;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.Min;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanks;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentileRanksAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.percentiles.PercentilesAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetric;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.scripted.ScriptedMetricAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.Stats;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.StatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStats;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.stats.extended.ExtendedStatsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.Sum;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHits;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.tophits.TopHitsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCount;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
|
||||
|
||||
/**
|
||||
* Utility class to create aggregations.
|
||||
|
@ -93,234 +93,234 @@ public class AggregationBuilders {
|
|||
/**
|
||||
* Create a new {@link ValueCount} aggregation with the given name.
|
||||
*/
|
||||
public static ValueCountAggregatorBuilder count(String name) {
|
||||
return new ValueCountAggregatorBuilder(name, null);
|
||||
public static ValueCountAggregationBuilder count(String name) {
|
||||
return new ValueCountAggregationBuilder(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Avg} aggregation with the given name.
|
||||
*/
|
||||
public static AvgAggregatorBuilder avg(String name) {
|
||||
return new AvgAggregatorBuilder(name);
|
||||
public static AvgAggregationBuilder avg(String name) {
|
||||
return new AvgAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Max} aggregation with the given name.
|
||||
*/
|
||||
public static MaxAggregatorBuilder max(String name) {
|
||||
return new MaxAggregatorBuilder(name);
|
||||
public static MaxAggregationBuilder max(String name) {
|
||||
return new MaxAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Min} aggregation with the given name.
|
||||
*/
|
||||
public static MinAggregatorBuilder min(String name) {
|
||||
return new MinAggregatorBuilder(name);
|
||||
public static MinAggregationBuilder min(String name) {
|
||||
return new MinAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Sum} aggregation with the given name.
|
||||
*/
|
||||
public static SumAggregatorBuilder sum(String name) {
|
||||
return new SumAggregatorBuilder(name);
|
||||
public static SumAggregationBuilder sum(String name) {
|
||||
return new SumAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Stats} aggregation with the given name.
|
||||
*/
|
||||
public static StatsAggregatorBuilder stats(String name) {
|
||||
return new StatsAggregatorBuilder(name);
|
||||
public static StatsAggregationBuilder stats(String name) {
|
||||
return new StatsAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ExtendedStats} aggregation with the given name.
|
||||
*/
|
||||
public static ExtendedStatsAggregatorBuilder extendedStats(String name) {
|
||||
return new ExtendedStatsAggregatorBuilder(name);
|
||||
public static ExtendedStatsAggregationBuilder extendedStats(String name) {
|
||||
return new ExtendedStatsAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Filter} aggregation with the given name.
|
||||
*/
|
||||
public static FilterAggregatorBuilder filter(String name, QueryBuilder filter) {
|
||||
return new FilterAggregatorBuilder(name, filter);
|
||||
public static FilterAggregationBuilder filter(String name, QueryBuilder filter) {
|
||||
return new FilterAggregationBuilder(name, filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Filters} aggregation with the given name.
|
||||
*/
|
||||
public static FiltersAggregatorBuilder filters(String name, KeyedFilter... filters) {
|
||||
return new FiltersAggregatorBuilder(name, filters);
|
||||
public static FiltersAggregationBuilder filters(String name, KeyedFilter... filters) {
|
||||
return new FiltersAggregationBuilder(name, filters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Filters} aggregation with the given name.
|
||||
*/
|
||||
public static FiltersAggregatorBuilder filters(String name, QueryBuilder... filters) {
|
||||
return new FiltersAggregatorBuilder(name, filters);
|
||||
public static FiltersAggregationBuilder filters(String name, QueryBuilder... filters) {
|
||||
return new FiltersAggregationBuilder(name, filters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Sampler} aggregation with the given name.
|
||||
*/
|
||||
public static SamplerAggregatorBuilder sampler(String name) {
|
||||
return new SamplerAggregatorBuilder(name);
|
||||
public static SamplerAggregationBuilder sampler(String name) {
|
||||
return new SamplerAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Sampler} aggregation with the given name.
|
||||
*/
|
||||
public static DiversifiedAggregatorBuilder diversifiedSampler(String name) {
|
||||
return new DiversifiedAggregatorBuilder(name);
|
||||
public static DiversifiedAggregationBuilder diversifiedSampler(String name) {
|
||||
return new DiversifiedAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Global} aggregation with the given name.
|
||||
*/
|
||||
public static GlobalAggregatorBuilder global(String name) {
|
||||
return new GlobalAggregatorBuilder(name);
|
||||
public static GlobalAggregationBuilder global(String name) {
|
||||
return new GlobalAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Missing} aggregation with the given name.
|
||||
*/
|
||||
public static MissingAggregatorBuilder missing(String name) {
|
||||
return new MissingAggregatorBuilder(name, null);
|
||||
public static MissingAggregationBuilder missing(String name) {
|
||||
return new MissingAggregationBuilder(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Nested} aggregation with the given name.
|
||||
*/
|
||||
public static NestedAggregatorBuilder nested(String name, String path) {
|
||||
return new NestedAggregatorBuilder(name, path);
|
||||
public static NestedAggregationBuilder nested(String name, String path) {
|
||||
return new NestedAggregationBuilder(name, path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ReverseNested} aggregation with the given name.
|
||||
*/
|
||||
public static ReverseNestedAggregatorBuilder reverseNested(String name) {
|
||||
return new ReverseNestedAggregatorBuilder(name);
|
||||
public static ReverseNestedAggregationBuilder reverseNested(String name) {
|
||||
return new ReverseNestedAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Children} aggregation with the given name.
|
||||
*/
|
||||
public static ChildrenAggregatorBuilder children(String name, String childType) {
|
||||
return new ChildrenAggregatorBuilder(name, childType);
|
||||
public static ChildrenAggregationBuilder children(String name, String childType) {
|
||||
return new ChildrenAggregationBuilder(name, childType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link GeoDistance} aggregation with the given name.
|
||||
*/
|
||||
public static GeoDistanceAggregatorBuilder geoDistance(String name, GeoPoint origin) {
|
||||
return new GeoDistanceAggregatorBuilder(name, origin);
|
||||
public static GeoDistanceAggregationBuilder geoDistance(String name, GeoPoint origin) {
|
||||
return new GeoDistanceAggregationBuilder(name, origin);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Histogram} aggregation with the given name.
|
||||
*/
|
||||
public static HistogramAggregatorBuilder histogram(String name) {
|
||||
return new HistogramAggregatorBuilder(name);
|
||||
public static HistogramAggregationBuilder histogram(String name) {
|
||||
return new HistogramAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link GeoHashGrid} aggregation with the given name.
|
||||
*/
|
||||
public static GeoGridAggregatorBuilder geohashGrid(String name) {
|
||||
return new GeoGridAggregatorBuilder(name);
|
||||
public static GeoGridAggregationBuilder geohashGrid(String name) {
|
||||
return new GeoGridAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link SignificantTerms} aggregation with the given name.
|
||||
*/
|
||||
public static SignificantTermsAggregatorBuilder significantTerms(String name) {
|
||||
return new SignificantTermsAggregatorBuilder(name, null);
|
||||
public static SignificantTermsAggregationBuilder significantTerms(String name) {
|
||||
return new SignificantTermsAggregationBuilder(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DateHistogramAggregatorBuilder} aggregation with the given
|
||||
* Create a new {@link DateHistogramAggregationBuilder} aggregation with the given
|
||||
* name.
|
||||
*/
|
||||
public static DateHistogramAggregatorBuilder dateHistogram(String name) {
|
||||
return new DateHistogramAggregatorBuilder(name);
|
||||
public static DateHistogramAggregationBuilder dateHistogram(String name) {
|
||||
return new DateHistogramAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Range} aggregation with the given name.
|
||||
*/
|
||||
public static RangeAggregatorBuilder range(String name) {
|
||||
return new RangeAggregatorBuilder(name);
|
||||
public static RangeAggregationBuilder range(String name) {
|
||||
return new RangeAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link DateRangeAggregatorBuilder} aggregation with the
|
||||
* Create a new {@link DateRangeAggregationBuilder} aggregation with the
|
||||
* given name.
|
||||
*/
|
||||
public static DateRangeAggregatorBuilder dateRange(String name) {
|
||||
return new DateRangeAggregatorBuilder(name);
|
||||
public static DateRangeAggregationBuilder dateRange(String name) {
|
||||
return new DateRangeAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link IpRangeAggregatorBuilder} aggregation with the
|
||||
* Create a new {@link IpRangeAggregationBuilder} aggregation with the
|
||||
* given name.
|
||||
*/
|
||||
public static IpRangeAggregatorBuilder ipRange(String name) {
|
||||
return new IpRangeAggregatorBuilder(name);
|
||||
public static IpRangeAggregationBuilder ipRange(String name) {
|
||||
return new IpRangeAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Terms} aggregation with the given name.
|
||||
*/
|
||||
public static TermsAggregatorBuilder terms(String name) {
|
||||
return new TermsAggregatorBuilder(name, null);
|
||||
public static TermsAggregationBuilder terms(String name) {
|
||||
return new TermsAggregationBuilder(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Percentiles} aggregation with the given name.
|
||||
*/
|
||||
public static PercentilesAggregatorBuilder percentiles(String name) {
|
||||
return new PercentilesAggregatorBuilder(name);
|
||||
public static PercentilesAggregationBuilder percentiles(String name) {
|
||||
return new PercentilesAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link PercentileRanks} aggregation with the given name.
|
||||
*/
|
||||
public static PercentileRanksAggregatorBuilder percentileRanks(String name) {
|
||||
return new PercentileRanksAggregatorBuilder(name);
|
||||
public static PercentileRanksAggregationBuilder percentileRanks(String name) {
|
||||
return new PercentileRanksAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link Cardinality} aggregation with the given name.
|
||||
*/
|
||||
public static CardinalityAggregatorBuilder cardinality(String name) {
|
||||
return new CardinalityAggregatorBuilder(name, null);
|
||||
public static CardinalityAggregationBuilder cardinality(String name) {
|
||||
return new CardinalityAggregationBuilder(name, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link TopHits} aggregation with the given name.
|
||||
*/
|
||||
public static TopHitsAggregatorBuilder topHits(String name) {
|
||||
return new TopHitsAggregatorBuilder(name);
|
||||
public static TopHitsAggregationBuilder topHits(String name) {
|
||||
return new TopHitsAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link GeoBounds} aggregation with the given name.
|
||||
*/
|
||||
public static GeoBoundsAggregatorBuilder geoBounds(String name) {
|
||||
return new GeoBoundsAggregatorBuilder(name);
|
||||
public static GeoBoundsAggregationBuilder geoBounds(String name) {
|
||||
return new GeoBoundsAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link GeoCentroid} aggregation with the given name.
|
||||
*/
|
||||
public static GeoCentroidAggregatorBuilder geoCentroid(String name) {
|
||||
return new GeoCentroidAggregatorBuilder(name);
|
||||
public static GeoCentroidAggregationBuilder geoCentroid(String name) {
|
||||
return new GeoCentroidAggregationBuilder(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new {@link ScriptedMetric} aggregation with the given name.
|
||||
*/
|
||||
public static ScriptedMetricAggregatorBuilder scriptedMetric(String name) {
|
||||
return new ScriptedMetricAggregatorBuilder(name);
|
||||
public static ScriptedMetricAggregationBuilder scriptedMetric(String name) {
|
||||
return new ScriptedMetricAggregationBuilder(name);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
|
|||
/**
|
||||
* Parses the aggregation request and creates the appropriate aggregator factory for it.
|
||||
*
|
||||
* @see AggregatorBuilder
|
||||
* @see AggregationBuilder
|
||||
*/
|
||||
@FunctionalInterface
|
||||
public interface Parser {
|
||||
|
@ -55,7 +55,7 @@ public abstract class Aggregator extends BucketCollector implements Releasable {
|
|||
* @return The resolved aggregator factory or {@code null} in case the aggregation should be skipped
|
||||
* @throws java.io.IOException When parsing fails
|
||||
*/
|
||||
AggregatorBuilder<?> parse(String aggregationName, QueryParseContext context) throws IOException;
|
||||
AggregationBuilder<?> parse(String aggregationName, QueryParseContext context) throws IOException;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -124,7 +124,7 @@ public class AggregatorFactories {
|
|||
|
||||
public static class Builder extends ToXContentToBytes implements Writeable {
|
||||
private final Set<String> names = new HashSet<>();
|
||||
private final List<AggregatorBuilder<?>> aggregatorBuilders = new ArrayList<>();
|
||||
private final List<AggregationBuilder<?>> aggregationBuilders = new ArrayList<>();
|
||||
private final List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders = new ArrayList<>();
|
||||
private boolean skipResolveOrder;
|
||||
|
||||
|
@ -140,7 +140,7 @@ public class AggregatorFactories {
|
|||
public Builder(StreamInput in) throws IOException {
|
||||
int factoriesSize = in.readVInt();
|
||||
for (int i = 0; i < factoriesSize; i++) {
|
||||
addAggregator(in.readNamedWriteable(AggregatorBuilder.class));
|
||||
addAggregator(in.readNamedWriteable(AggregationBuilder.class));
|
||||
}
|
||||
int pipelineFactoriesSize = in.readVInt();
|
||||
for (int i = 0; i < pipelineFactoriesSize; i++) {
|
||||
|
@ -150,8 +150,8 @@ public class AggregatorFactories {
|
|||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(this.aggregatorBuilders.size());
|
||||
for (AggregatorBuilder<?> factory : aggregatorBuilders) {
|
||||
out.writeVInt(this.aggregationBuilders.size());
|
||||
for (AggregationBuilder<?> factory : aggregationBuilders) {
|
||||
out.writeNamedWriteable(factory);
|
||||
}
|
||||
out.writeVInt(this.pipelineAggregatorBuilders.size());
|
||||
|
@ -164,11 +164,11 @@ public class AggregatorFactories {
|
|||
throw new UnsupportedOperationException("This needs to be removed");
|
||||
}
|
||||
|
||||
public Builder addAggregator(AggregatorBuilder<?> factory) {
|
||||
public Builder addAggregator(AggregationBuilder<?> factory) {
|
||||
if (!names.add(factory.name)) {
|
||||
throw new IllegalArgumentException("Two sibling aggregations cannot have the same name: [" + factory.name + "]");
|
||||
}
|
||||
aggregatorBuilders.add(factory);
|
||||
aggregationBuilders.add(factory);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -186,30 +186,30 @@ public class AggregatorFactories {
|
|||
}
|
||||
|
||||
public AggregatorFactories build(AggregationContext context, AggregatorFactory<?> parent) throws IOException {
|
||||
if (aggregatorBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) {
|
||||
if (aggregationBuilders.isEmpty() && pipelineAggregatorBuilders.isEmpty()) {
|
||||
return EMPTY;
|
||||
}
|
||||
List<PipelineAggregatorBuilder<?>> orderedpipelineAggregators = null;
|
||||
if (skipResolveOrder) {
|
||||
orderedpipelineAggregators = new ArrayList<>(pipelineAggregatorBuilders);
|
||||
} else {
|
||||
orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregatorBuilders);
|
||||
orderedpipelineAggregators = resolvePipelineAggregatorOrder(this.pipelineAggregatorBuilders, this.aggregationBuilders);
|
||||
}
|
||||
AggregatorFactory<?>[] aggFactories = new AggregatorFactory<?>[aggregatorBuilders.size()];
|
||||
for (int i = 0; i < aggregatorBuilders.size(); i++) {
|
||||
aggFactories[i] = aggregatorBuilders.get(i).build(context, parent);
|
||||
AggregatorFactory<?>[] aggFactories = new AggregatorFactory<?>[aggregationBuilders.size()];
|
||||
for (int i = 0; i < aggregationBuilders.size(); i++) {
|
||||
aggFactories[i] = aggregationBuilders.get(i).build(context, parent);
|
||||
}
|
||||
return new AggregatorFactories(parent, aggFactories, orderedpipelineAggregators);
|
||||
}
|
||||
|
||||
private List<PipelineAggregatorBuilder<?>> resolvePipelineAggregatorOrder(
|
||||
List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders, List<AggregatorBuilder<?>> aggBuilders) {
|
||||
List<PipelineAggregatorBuilder<?>> pipelineAggregatorBuilders, List<AggregationBuilder<?>> aggBuilders) {
|
||||
Map<String, PipelineAggregatorBuilder<?>> pipelineAggregatorBuildersMap = new HashMap<>();
|
||||
for (PipelineAggregatorBuilder<?> builder : pipelineAggregatorBuilders) {
|
||||
pipelineAggregatorBuildersMap.put(builder.getName(), builder);
|
||||
}
|
||||
Map<String, AggregatorBuilder<?>> aggBuildersMap = new HashMap<>();
|
||||
for (AggregatorBuilder<?> aggBuilder : aggBuilders) {
|
||||
Map<String, AggregationBuilder<?>> aggBuildersMap = new HashMap<>();
|
||||
for (AggregationBuilder<?> aggBuilder : aggBuilders) {
|
||||
aggBuildersMap.put(aggBuilder.name, aggBuilder);
|
||||
}
|
||||
List<PipelineAggregatorBuilder<?>> orderedPipelineAggregatorrs = new LinkedList<>();
|
||||
|
@ -223,7 +223,7 @@ public class AggregatorFactories {
|
|||
return orderedPipelineAggregatorrs;
|
||||
}
|
||||
|
||||
private void resolvePipelineAggregatorOrder(Map<String, AggregatorBuilder<?>> aggBuildersMap,
|
||||
private void resolvePipelineAggregatorOrder(Map<String, AggregationBuilder<?>> aggBuildersMap,
|
||||
Map<String, PipelineAggregatorBuilder<?>> pipelineAggregatorBuildersMap,
|
||||
List<PipelineAggregatorBuilder<?>> orderedPipelineAggregators, List<PipelineAggregatorBuilder<?>> unmarkedBuilders,
|
||||
Set<PipelineAggregatorBuilder<?>> temporarilyMarked, PipelineAggregatorBuilder<?> builder) {
|
||||
|
@ -238,7 +238,7 @@ public class AggregatorFactories {
|
|||
if (bucketsPath.equals("_count") || bucketsPath.equals("_key")) {
|
||||
continue;
|
||||
} else if (aggBuildersMap.containsKey(firstAggName)) {
|
||||
AggregatorBuilder<?> aggBuilder = aggBuildersMap.get(firstAggName);
|
||||
AggregationBuilder<?> aggBuilder = aggBuildersMap.get(firstAggName);
|
||||
for (int i = 1; i < bucketsPathElements.size(); i++) {
|
||||
PathElement pathElement = bucketsPathElements.get(i);
|
||||
String aggName = pathElement.name;
|
||||
|
@ -247,9 +247,9 @@ public class AggregatorFactories {
|
|||
} else {
|
||||
// Check the non-pipeline sub-aggregator
|
||||
// factories
|
||||
AggregatorBuilder<?>[] subBuilders = aggBuilder.factoriesBuilder.getAggregatorFactories();
|
||||
AggregationBuilder<?>[] subBuilders = aggBuilder.factoriesBuilder.getAggregatorFactories();
|
||||
boolean foundSubBuilder = false;
|
||||
for (AggregatorBuilder<?> subBuilder : subBuilders) {
|
||||
for (AggregationBuilder<?> subBuilder : subBuilders) {
|
||||
if (aggName.equals(subBuilder.name)) {
|
||||
aggBuilder = subBuilder;
|
||||
foundSubBuilder = true;
|
||||
|
@ -289,8 +289,8 @@ public class AggregatorFactories {
|
|||
}
|
||||
}
|
||||
|
||||
AggregatorBuilder<?>[] getAggregatorFactories() {
|
||||
return this.aggregatorBuilders.toArray(new AggregatorBuilder<?>[this.aggregatorBuilders.size()]);
|
||||
AggregationBuilder<?>[] getAggregatorFactories() {
|
||||
return this.aggregationBuilders.toArray(new AggregationBuilder<?>[this.aggregationBuilders.size()]);
|
||||
}
|
||||
|
||||
List<PipelineAggregatorBuilder<?>> getPipelineAggregatorFactories() {
|
||||
|
@ -298,14 +298,14 @@ public class AggregatorFactories {
|
|||
}
|
||||
|
||||
public int count() {
|
||||
return aggregatorBuilders.size() + pipelineAggregatorBuilders.size();
|
||||
return aggregationBuilders.size() + pipelineAggregatorBuilders.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
if (aggregatorBuilders != null) {
|
||||
for (AggregatorBuilder<?> subAgg : aggregatorBuilders) {
|
||||
if (aggregationBuilders != null) {
|
||||
for (AggregationBuilder<?> subAgg : aggregationBuilders) {
|
||||
subAgg.toXContent(builder, params);
|
||||
}
|
||||
}
|
||||
|
@ -320,7 +320,7 @@ public class AggregatorFactories {
|
|||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(aggregatorBuilders, pipelineAggregatorBuilders);
|
||||
return Objects.hash(aggregationBuilders, pipelineAggregatorBuilders);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -330,7 +330,7 @@ public class AggregatorFactories {
|
|||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
Builder other = (Builder) obj;
|
||||
if (!Objects.equals(aggregatorBuilders, other.aggregatorBuilders))
|
||||
if (!Objects.equals(aggregationBuilders, other.aggregationBuilders))
|
||||
return false;
|
||||
if (!Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders))
|
||||
return false;
|
||||
|
|
|
@ -104,7 +104,7 @@ public class AggregatorParsers {
|
|||
+ token + "], expected a [" + XContentParser.Token.START_OBJECT + "].");
|
||||
}
|
||||
|
||||
AggregatorBuilder<?> aggFactory = null;
|
||||
AggregationBuilder<?> aggFactory = null;
|
||||
PipelineAggregatorBuilder<?> pipelineAggregatorFactory = null;
|
||||
AggregatorFactories.Builder subFactories = null;
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.FieldContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.ParentChild;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -44,7 +44,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<ParentChild, ChildrenAggregatorBuilder> {
|
||||
public class ChildrenAggregationBuilder extends ValuesSourceAggregationBuilder<ParentChild, ChildrenAggregationBuilder> {
|
||||
public static final String NAME = InternalChildren.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -59,7 +59,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
|
|||
* @param childType
|
||||
* the type of children documents
|
||||
*/
|
||||
public ChildrenAggregatorBuilder(String name, String childType) {
|
||||
public ChildrenAggregationBuilder(String name, String childType) {
|
||||
super(name, InternalChildren.TYPE, ValuesSourceType.BYTES, ValueType.STRING);
|
||||
if (childType == null) {
|
||||
throw new IllegalArgumentException("[childType] must not be null: [" + name + "]");
|
||||
|
@ -70,7 +70,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public ChildrenAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public ChildrenAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalChildren.TYPE, ValuesSourceType.BYTES, ValueType.STRING);
|
||||
childType = in.readString();
|
||||
}
|
||||
|
@ -121,7 +121,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static ChildrenAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
public static ChildrenAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
String childType = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
|
@ -148,7 +148,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
|
|||
}
|
||||
|
||||
|
||||
return new ChildrenAggregatorBuilder(aggregationName, childType);
|
||||
return new ChildrenAggregationBuilder(aggregationName, childType);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -158,7 +158,7 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
ChildrenAggregatorBuilder other = (ChildrenAggregatorBuilder) obj;
|
||||
ChildrenAggregationBuilder other = (ChildrenAggregationBuilder) obj;
|
||||
return Objects.equals(childType, other.childType);
|
||||
}
|
||||
|
||||
|
@ -166,4 +166,4 @@ public class ChildrenAggregatorBuilder extends ValuesSourceAggregatorBuilder<Par
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -24,12 +24,11 @@ import org.elasticsearch.common.ParsingException;
|
|||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.EmptyQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchAllQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
@ -37,7 +36,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorBuilder> {
|
||||
public class FilterAggregationBuilder extends AggregationBuilder<FilterAggregationBuilder> {
|
||||
public static final String NAME = InternalFilter.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -51,7 +50,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
|
|||
* filter will fall into the bucket defined by this
|
||||
* {@link Filter} aggregation.
|
||||
*/
|
||||
public FilterAggregatorBuilder(String name, QueryBuilder filter) {
|
||||
public FilterAggregationBuilder(String name, QueryBuilder filter) {
|
||||
super(name, InternalFilter.TYPE);
|
||||
if (filter == null) {
|
||||
throw new IllegalArgumentException("[filter] must not be null: [" + name + "]");
|
||||
|
@ -66,7 +65,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public FilterAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public FilterAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalFilter.TYPE);
|
||||
filter = in.readNamedWriteable(QueryBuilder.class);
|
||||
}
|
||||
|
@ -90,7 +89,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static FilterAggregatorBuilder parse(String aggregationName, QueryParseContext context)
|
||||
public static FilterAggregationBuilder parse(String aggregationName, QueryParseContext context)
|
||||
throws IOException {
|
||||
QueryBuilder filter = context.parseInnerQueryBuilder();
|
||||
|
||||
|
@ -98,7 +97,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
|
|||
throw new ParsingException(null, "filter cannot be null in filter aggregation [{}]", aggregationName);
|
||||
}
|
||||
|
||||
return new FilterAggregatorBuilder(aggregationName, filter);
|
||||
return new FilterAggregationBuilder(aggregationName, filter);
|
||||
}
|
||||
|
||||
|
||||
|
@ -109,7 +108,7 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
FilterAggregatorBuilder other = (FilterAggregatorBuilder) obj;
|
||||
FilterAggregationBuilder other = (FilterAggregationBuilder) obj;
|
||||
return Objects.equals(filter, other.filter);
|
||||
}
|
||||
|
||||
|
@ -117,4 +116,4 @@ public class FilterAggregatorBuilder extends AggregatorBuilder<FilterAggregatorB
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter;
|
||||
|
@ -43,7 +43,7 @@ import java.util.Objects;
|
|||
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
|
||||
public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregatorBuilder> {
|
||||
public class FiltersAggregationBuilder extends AggregationBuilder<FiltersAggregationBuilder> {
|
||||
public static final String NAME = InternalFilters.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -62,11 +62,11 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
* @param filters
|
||||
* the KeyedFilters to use with this aggregation.
|
||||
*/
|
||||
public FiltersAggregatorBuilder(String name, KeyedFilter... filters) {
|
||||
public FiltersAggregationBuilder(String name, KeyedFilter... filters) {
|
||||
this(name, Arrays.asList(filters));
|
||||
}
|
||||
|
||||
private FiltersAggregatorBuilder(String name, List<KeyedFilter> filters) {
|
||||
private FiltersAggregationBuilder(String name, List<KeyedFilter> filters) {
|
||||
super(name, InternalFilters.TYPE);
|
||||
// internally we want to have a fixed order of filters, regardless of the order of the filters in the request
|
||||
this.filters = new ArrayList<>(filters);
|
||||
|
@ -80,7 +80,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
* @param filters
|
||||
* the filters to use with this aggregation
|
||||
*/
|
||||
public FiltersAggregatorBuilder(String name, QueryBuilder... filters) {
|
||||
public FiltersAggregationBuilder(String name, QueryBuilder... filters) {
|
||||
super(name, InternalFilters.TYPE);
|
||||
List<KeyedFilter> keyedFilters = new ArrayList<>(filters.length);
|
||||
for (int i = 0; i < filters.length; i++) {
|
||||
|
@ -93,7 +93,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public FiltersAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public FiltersAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalFilters.TYPE);
|
||||
keyed = in.readBoolean();
|
||||
int filtersSize = in.readVInt();
|
||||
|
@ -131,7 +131,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
/**
|
||||
* Set whether to include a bucket for documents not matching any filter
|
||||
*/
|
||||
public FiltersAggregatorBuilder otherBucket(boolean otherBucket) {
|
||||
public FiltersAggregationBuilder otherBucket(boolean otherBucket) {
|
||||
this.otherBucket = otherBucket;
|
||||
return this;
|
||||
}
|
||||
|
@ -154,7 +154,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
* Set the key to use for the bucket for documents not matching any
|
||||
* filter.
|
||||
*/
|
||||
public FiltersAggregatorBuilder otherBucketKey(String otherBucketKey) {
|
||||
public FiltersAggregationBuilder otherBucketKey(String otherBucketKey) {
|
||||
if (otherBucketKey == null) {
|
||||
throw new IllegalArgumentException("[otherBucketKey] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -199,7 +199,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static FiltersAggregatorBuilder parse(String aggregationName, QueryParseContext context)
|
||||
public static FiltersAggregationBuilder parse(String aggregationName, QueryParseContext context)
|
||||
throws IOException {
|
||||
XContentParser parser = context.parser();
|
||||
|
||||
|
@ -264,12 +264,12 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
otherBucketKey = "_other_";
|
||||
}
|
||||
|
||||
FiltersAggregatorBuilder factory;
|
||||
FiltersAggregationBuilder factory;
|
||||
if (keyedFilters != null) {
|
||||
factory = new FiltersAggregatorBuilder(aggregationName,
|
||||
factory = new FiltersAggregationBuilder(aggregationName,
|
||||
keyedFilters.toArray(new FiltersAggregator.KeyedFilter[keyedFilters.size()]));
|
||||
} else {
|
||||
factory = new FiltersAggregatorBuilder(aggregationName,
|
||||
factory = new FiltersAggregationBuilder(aggregationName,
|
||||
nonKeyedFilters.toArray(new QueryBuilder[nonKeyedFilters.size()]));
|
||||
}
|
||||
if (otherBucket != null) {
|
||||
|
@ -288,7 +288,7 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
FiltersAggregatorBuilder other = (FiltersAggregatorBuilder) obj;
|
||||
FiltersAggregationBuilder other = (FiltersAggregationBuilder) obj;
|
||||
return Objects.equals(filters, other.filters)
|
||||
&& Objects.equals(keyed, other.keyed)
|
||||
&& Objects.equals(otherBucket, other.otherBucket)
|
||||
|
@ -299,4 +299,4 @@ public class FiltersAggregatorBuilder extends AggregatorBuilder<FiltersAggregato
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -37,7 +37,7 @@ import org.elasticsearch.search.aggregations.bucket.BucketUtils;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -45,7 +45,7 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource.GeoPoint, GeoGridAggregatorBuilder> {
|
||||
public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoGridAggregationBuilder> {
|
||||
public static final String NAME = InternalGeoHashGrid.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -53,14 +53,14 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
|
|||
private int requiredSize = GeoHashGridParser.DEFAULT_MAX_NUM_CELLS;
|
||||
private int shardSize = -1;
|
||||
|
||||
public GeoGridAggregatorBuilder(String name) {
|
||||
public GeoGridAggregationBuilder(String name) {
|
||||
super(name, InternalGeoHashGrid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public GeoGridAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public GeoGridAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalGeoHashGrid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
precision = in.readVInt();
|
||||
requiredSize = in.readVInt();
|
||||
|
@ -74,7 +74,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
|
|||
out.writeVInt(shardSize);
|
||||
}
|
||||
|
||||
public GeoGridAggregatorBuilder precision(int precision) {
|
||||
public GeoGridAggregationBuilder precision(int precision) {
|
||||
this.precision = GeoHashGridParams.checkPrecision(precision);
|
||||
return this;
|
||||
}
|
||||
|
@ -83,7 +83,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
|
|||
return precision;
|
||||
}
|
||||
|
||||
public GeoGridAggregatorBuilder size(int size) {
|
||||
public GeoGridAggregationBuilder size(int size) {
|
||||
if (size < -1) {
|
||||
throw new IllegalArgumentException(
|
||||
"[size] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
|
||||
|
@ -96,7 +96,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
|
|||
return requiredSize;
|
||||
}
|
||||
|
||||
public GeoGridAggregatorBuilder shardSize(int shardSize) {
|
||||
public GeoGridAggregationBuilder shardSize(int shardSize) {
|
||||
if (shardSize < -1) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
|
||||
|
@ -145,7 +145,7 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
GeoGridAggregatorBuilder other = (GeoGridAggregatorBuilder) obj;
|
||||
GeoGridAggregationBuilder other = (GeoGridAggregationBuilder) obj;
|
||||
if (precision != other.precision) {
|
||||
return false;
|
||||
}
|
|
@ -46,10 +46,10 @@ public class GeoHashGridAggregator extends BucketsAggregator {
|
|||
|
||||
private final int requiredSize;
|
||||
private final int shardSize;
|
||||
private final GeoGridAggregatorBuilder.CellIdSource valuesSource;
|
||||
private final GeoGridAggregationBuilder.CellIdSource valuesSource;
|
||||
private final LongHash bucketOrds;
|
||||
|
||||
public GeoHashGridAggregator(String name, AggregatorFactories factories, GeoGridAggregatorBuilder.CellIdSource valuesSource,
|
||||
public GeoHashGridAggregator(String name, AggregatorFactories factories, GeoGridAggregationBuilder.CellIdSource valuesSource,
|
||||
int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
super(name, factories, aggregationContext, parent, pipelineAggregators, metaData);
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.NonCollectingAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregatorBuilder.CellIdSource;
|
||||
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoGridAggregationBuilder.CellIdSource;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
|
|
|
@ -45,10 +45,10 @@ public class GeoHashGridParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected GeoGridAggregatorBuilder createFactory(
|
||||
protected GeoGridAggregationBuilder createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
GeoGridAggregatorBuilder factory = new GeoGridAggregatorBuilder(aggregationName);
|
||||
GeoGridAggregationBuilder factory = new GeoGridAggregationBuilder(aggregationName);
|
||||
Integer precision = (Integer) otherOptions.get(GeoHashGridParams.FIELD_PRECISION);
|
||||
if (precision != null) {
|
||||
factory.precision(precision);
|
||||
|
|
|
@ -24,25 +24,25 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class GlobalAggregatorBuilder extends AggregatorBuilder<GlobalAggregatorBuilder> {
|
||||
public class GlobalAggregationBuilder extends AggregationBuilder<GlobalAggregationBuilder> {
|
||||
public static final String NAME = InternalGlobal.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public GlobalAggregatorBuilder(String name) {
|
||||
public GlobalAggregationBuilder(String name) {
|
||||
super(name, InternalGlobal.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public GlobalAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public GlobalAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalGlobal.TYPE);
|
||||
}
|
||||
|
||||
|
@ -64,9 +64,9 @@ public class GlobalAggregatorBuilder extends AggregatorBuilder<GlobalAggregatorB
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static GlobalAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
public static GlobalAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
context.parser().nextToken();
|
||||
return new GlobalAggregatorBuilder(aggregationName);
|
||||
return new GlobalAggregationBuilder(aggregationName);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -83,4 +83,4 @@ public class GlobalAggregatorBuilder extends AggregatorBuilder<GlobalAggregatorB
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -24,14 +24,14 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.rounding.Rounding;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public abstract class AbstractHistogramBuilder<AB extends AbstractHistogramBuilder<AB>>
|
||||
extends ValuesSourceAggregatorBuilder<ValuesSource.Numeric, AB> {
|
||||
extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, AB> {
|
||||
|
||||
protected long interval;
|
||||
protected long offset = 0;
|
||||
|
@ -200,4 +200,4 @@ public abstract class AbstractHistogramBuilder<AB extends AbstractHistogramBuild
|
|||
&& Objects.equals(minDocCount, other.minDocCount)
|
||||
&& Objects.equals(extendedBounds, other.extendedBounds);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,21 +33,21 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<DateHistogramAggregatorBuilder> {
|
||||
public class DateHistogramAggregationBuilder extends AbstractHistogramBuilder<DateHistogramAggregationBuilder> {
|
||||
|
||||
public static final String NAME = InternalDateHistogram.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
private DateHistogramInterval dateHistogramInterval;
|
||||
|
||||
public DateHistogramAggregatorBuilder(String name) {
|
||||
public DateHistogramAggregationBuilder(String name) {
|
||||
super(name, InternalDateHistogram.HISTOGRAM_FACTORY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public DateHistogramAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public DateHistogramAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalDateHistogram.HISTOGRAM_FACTORY);
|
||||
dateHistogramInterval = in.readOptionalWriteable(DateHistogramInterval::new);
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
|
|||
/**
|
||||
* Set the interval.
|
||||
*/
|
||||
public DateHistogramAggregatorBuilder dateHistogramInterval(DateHistogramInterval dateHistogramInterval) {
|
||||
public DateHistogramAggregationBuilder dateHistogramInterval(DateHistogramInterval dateHistogramInterval) {
|
||||
if (dateHistogramInterval == null) {
|
||||
throw new IllegalArgumentException("[dateHistogramInterval] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -69,7 +69,7 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
|
|||
return this;
|
||||
}
|
||||
|
||||
public DateHistogramAggregatorBuilder offset(String offset) {
|
||||
public DateHistogramAggregationBuilder offset(String offset) {
|
||||
if (offset == null) {
|
||||
throw new IllegalArgumentException("[offset] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -79,12 +79,12 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
|
|||
protected static long parseStringOffset(String offset) {
|
||||
if (offset.charAt(0) == '-') {
|
||||
return -TimeValue
|
||||
.parseTimeValue(offset.substring(1), null, DateHistogramAggregatorBuilder.class.getSimpleName() + ".parseOffset")
|
||||
.parseTimeValue(offset.substring(1), null, DateHistogramAggregationBuilder.class.getSimpleName() + ".parseOffset")
|
||||
.millis();
|
||||
}
|
||||
int beginIndex = offset.charAt(0) == '+' ? 1 : 0;
|
||||
return TimeValue
|
||||
.parseTimeValue(offset.substring(beginIndex), null, DateHistogramAggregatorBuilder.class.getSimpleName() + ".parseOffset")
|
||||
.parseTimeValue(offset.substring(beginIndex), null, DateHistogramAggregationBuilder.class.getSimpleName() + ".parseOffset")
|
||||
.millis();
|
||||
}
|
||||
|
||||
|
@ -121,7 +121,7 @@ public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<Dat
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
DateHistogramAggregatorBuilder other = (DateHistogramAggregatorBuilder) obj;
|
||||
DateHistogramAggregationBuilder other = (DateHistogramAggregationBuilder) obj;
|
||||
return super.innerEquals(obj) && Objects.equals(dateHistogramInterval, other.dateHistogramInterval);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -42,9 +42,9 @@ public class DateHistogramParser extends HistogramParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected DateHistogramAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DateHistogramAggregatorBuilder factory = new DateHistogramAggregatorBuilder(aggregationName);
|
||||
protected DateHistogramAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder(aggregationName);
|
||||
Object interval = otherOptions.get(Rounding.Interval.INTERVAL_FIELD);
|
||||
if (interval == null) {
|
||||
throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]");
|
||||
|
@ -89,6 +89,6 @@ public class DateHistogramParser extends HistogramParser {
|
|||
|
||||
@Override
|
||||
protected long parseStringOffset(String offset) throws IOException {
|
||||
return DateHistogramAggregatorBuilder.parseStringOffset(offset);
|
||||
return DateHistogramAggregationBuilder.parseStringOffset(offset);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,18 +29,18 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class HistogramAggregatorBuilder extends AbstractHistogramBuilder<HistogramAggregatorBuilder> {
|
||||
public class HistogramAggregationBuilder extends AbstractHistogramBuilder<HistogramAggregationBuilder> {
|
||||
public static final String NAME = InternalHistogram.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public HistogramAggregatorBuilder(String name) {
|
||||
public HistogramAggregationBuilder(String name) {
|
||||
super(name, InternalHistogram.HISTOGRAM_FACTORY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public HistogramAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public HistogramAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalHistogram.HISTOGRAM_FACTORY);
|
||||
}
|
||||
|
||||
|
@ -55,4 +55,4 @@ public class HistogramAggregatorBuilder extends AbstractHistogramBuilder<Histogr
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -47,7 +47,7 @@ public class HistogramParser extends NumericValuesSourceParser {
|
|||
@Override
|
||||
protected AbstractHistogramBuilder<?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
HistogramAggregatorBuilder factory = new HistogramAggregatorBuilder(aggregationName);
|
||||
HistogramAggregationBuilder factory = new HistogramAggregationBuilder(aggregationName);
|
||||
Long interval = (Long) otherOptions.get(Rounding.Interval.INTERVAL_FIELD);
|
||||
if (interval == null) {
|
||||
throw new ParsingException(null, "Missing required field [interval] for histogram aggregation [" + aggregationName + "]");
|
||||
|
|
|
@ -28,25 +28,25 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class MissingAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource, MissingAggregatorBuilder> {
|
||||
public class MissingAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, MissingAggregationBuilder> {
|
||||
public static final String NAME = InternalMissing.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public MissingAggregatorBuilder(String name, ValueType targetValueType) {
|
||||
public MissingAggregationBuilder(String name, ValueType targetValueType) {
|
||||
super(name, InternalMissing.TYPE, ValuesSourceType.ANY, targetValueType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public MissingAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public MissingAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalMissing.TYPE, ValuesSourceType.ANY);
|
||||
}
|
||||
|
||||
|
@ -85,4 +85,4 @@ public class MissingAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -41,8 +41,8 @@ public class MissingParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected MissingAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MissingAggregatorBuilder(aggregationName, targetValueType);
|
||||
protected MissingAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MissingAggregationBuilder(aggregationName, targetValueType);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
@ -34,7 +34,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorBuilder> {
|
||||
public class NestedAggregationBuilder extends AggregationBuilder<NestedAggregationBuilder> {
|
||||
public static final String NAME = InternalNested.TYPE.name();
|
||||
public static final ParseField AGGREGATION_FIELD_NAME = new ParseField(NAME);
|
||||
|
||||
|
@ -47,7 +47,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorB
|
|||
* the path to use for this nested aggregation. The path must
|
||||
* match the path to a nested object in the mappings.
|
||||
*/
|
||||
public NestedAggregatorBuilder(String name, String path) {
|
||||
public NestedAggregationBuilder(String name, String path) {
|
||||
super(name, InternalNested.TYPE);
|
||||
if (path == null) {
|
||||
throw new IllegalArgumentException("[path] must not be null: [" + name + "]");
|
||||
|
@ -58,7 +58,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorB
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public NestedAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public NestedAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalNested.TYPE);
|
||||
path = in.readString();
|
||||
}
|
||||
|
@ -89,7 +89,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorB
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static NestedAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
public static NestedAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
String path = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
|
@ -115,7 +115,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorB
|
|||
throw new ParsingException(parser.getTokenLocation(), "Missing [path] field for nested aggregation [" + aggregationName + "]");
|
||||
}
|
||||
|
||||
return new NestedAggregatorBuilder(aggregationName, path);
|
||||
return new NestedAggregationBuilder(aggregationName, path);
|
||||
}
|
||||
|
||||
|
||||
|
@ -126,7 +126,7 @@ public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorB
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
NestedAggregatorBuilder other = (NestedAggregatorBuilder) obj;
|
||||
NestedAggregationBuilder other = (NestedAggregationBuilder) obj;
|
||||
return Objects.equals(path, other.path);
|
||||
}
|
||||
|
||||
|
@ -134,4 +134,4 @@ public class NestedAggregatorBuilder extends AggregatorBuilder<NestedAggregatorB
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
@ -34,20 +34,20 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ReverseNestedAggregatorBuilder extends AggregatorBuilder<ReverseNestedAggregatorBuilder> {
|
||||
public class ReverseNestedAggregationBuilder extends AggregationBuilder<ReverseNestedAggregationBuilder> {
|
||||
public static final String NAME = InternalReverseNested.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
private String path;
|
||||
|
||||
public ReverseNestedAggregatorBuilder(String name) {
|
||||
public ReverseNestedAggregationBuilder(String name) {
|
||||
super(name, InternalReverseNested.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public ReverseNestedAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public ReverseNestedAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalReverseNested.TYPE);
|
||||
path = in.readOptionalString();
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ public class ReverseNestedAggregatorBuilder extends AggregatorBuilder<ReverseNes
|
|||
* the path to a nested object in the mappings. If it is not specified
|
||||
* then this aggregation will go back to the root document.
|
||||
*/
|
||||
public ReverseNestedAggregatorBuilder path(String path) {
|
||||
public ReverseNestedAggregationBuilder path(String path) {
|
||||
if (path == null) {
|
||||
throw new IllegalArgumentException("[path] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ public class ReverseNestedAggregatorBuilder extends AggregatorBuilder<ReverseNes
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static ReverseNestedAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
public static ReverseNestedAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
String path = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
|
@ -114,7 +114,7 @@ public class ReverseNestedAggregatorBuilder extends AggregatorBuilder<ReverseNes
|
|||
}
|
||||
}
|
||||
|
||||
ReverseNestedAggregatorBuilder factory = new ReverseNestedAggregatorBuilder(
|
||||
ReverseNestedAggregationBuilder factory = new ReverseNestedAggregationBuilder(
|
||||
aggregationName);
|
||||
if (path != null) {
|
||||
factory.path(path);
|
||||
|
@ -130,7 +130,7 @@ public class ReverseNestedAggregatorBuilder extends AggregatorBuilder<ReverseNes
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
ReverseNestedAggregatorBuilder other = (ReverseNestedAggregatorBuilder) obj;
|
||||
ReverseNestedAggregationBuilder other = (ReverseNestedAggregationBuilder) obj;
|
||||
return Objects.equals(path, other.path);
|
||||
}
|
||||
|
||||
|
@ -138,4 +138,4 @@ public class ReverseNestedAggregatorBuilder extends AggregatorBuilder<ReverseNes
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator.Range;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -33,7 +33,7 @@ import java.util.List;
|
|||
import java.util.Objects;
|
||||
|
||||
public abstract class AbstractRangeBuilder<AB extends AbstractRangeBuilder<AB, R>, R extends Range>
|
||||
extends ValuesSourceAggregatorBuilder<ValuesSource.Numeric, AB> {
|
||||
extends ValuesSourceAggregationBuilder<ValuesSource.Numeric, AB> {
|
||||
|
||||
protected final InternalRange.Factory<?, ?> rangeFactory;
|
||||
protected List<R> ranges = new ArrayList<>();
|
||||
|
@ -103,4 +103,4 @@ public abstract class AbstractRangeBuilder<AB extends AbstractRangeBuilder<AB, R
|
|||
return Objects.equals(ranges, other.ranges)
|
||||
&& Objects.equals(keyed, other.keyed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -30,18 +30,18 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregatorBuilder, Range> {
|
||||
public class RangeAggregationBuilder extends AbstractRangeBuilder<RangeAggregationBuilder, Range> {
|
||||
public static final String NAME = InternalRange.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public RangeAggregatorBuilder(String name) {
|
||||
public RangeAggregationBuilder(String name) {
|
||||
super(name, InternalRange.FACTORY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public RangeAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public RangeAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalRange.FACTORY, Range::new);
|
||||
}
|
||||
|
||||
|
@ -55,7 +55,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public RangeAggregatorBuilder addRange(String key, double from, double to) {
|
||||
public RangeAggregationBuilder addRange(String key, double from, double to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* automatically generated based on <code>from</code> and
|
||||
* <code>to</code>.
|
||||
*/
|
||||
public RangeAggregatorBuilder addRange(double from, double to) {
|
||||
public RangeAggregationBuilder addRange(double from, double to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -77,7 +77,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public RangeAggregatorBuilder addUnboundedTo(String key, double to) {
|
||||
public RangeAggregationBuilder addUnboundedTo(String key, double to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -86,7 +86,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* Same as {@link #addUnboundedTo(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public RangeAggregatorBuilder addUnboundedTo(double to) {
|
||||
public RangeAggregationBuilder addUnboundedTo(double to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public RangeAggregatorBuilder addUnboundedFrom(String key, double from) {
|
||||
public RangeAggregationBuilder addUnboundedFrom(String key, double from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
@ -107,7 +107,7 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
* Same as {@link #addUnboundedFrom(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public RangeAggregatorBuilder addUnboundedFrom(double from) {
|
||||
public RangeAggregationBuilder addUnboundedFrom(double from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
|
@ -122,4 +122,4 @@ public class RangeAggregatorBuilder extends AbstractRangeBuilder<RangeAggregator
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -51,7 +51,7 @@ public class RangeParser extends NumericValuesSourceParser {
|
|||
@Override
|
||||
protected AbstractRangeBuilder<?, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
RangeAggregatorBuilder factory = new RangeAggregatorBuilder(aggregationName);
|
||||
RangeAggregationBuilder factory = new RangeAggregationBuilder(aggregationName);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<? extends Range> ranges = (List<? extends Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
for (Range range : ranges) {
|
||||
|
|
|
@ -33,18 +33,18 @@ import org.joda.time.DateTime;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAggregatorBuilder, RangeAggregator.Range> {
|
||||
public class DateRangeAggregationBuilder extends AbstractRangeBuilder<DateRangeAggregationBuilder, RangeAggregator.Range> {
|
||||
public static final String NAME = InternalDateRange.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public DateRangeAggregatorBuilder(String name) {
|
||||
public DateRangeAggregationBuilder(String name) {
|
||||
super(name, InternalDateRange.FACTORY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public DateRangeAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalDateRange.FACTORY, Range::new);
|
||||
}
|
||||
|
||||
|
@ -63,7 +63,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(String key, String from, String to) {
|
||||
public DateRangeAggregationBuilder addRange(String key, String from, String to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -72,7 +72,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addRange(String, String, String)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(String from, String to) {
|
||||
public DateRangeAggregationBuilder addRange(String from, String to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(String key, String to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(String key, String to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -93,7 +93,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedTo(String, String)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(String to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(String to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -105,7 +105,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(String key, String from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(String key, String from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
@ -114,7 +114,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedFrom(String, String)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(String from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(String from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
|
@ -128,7 +128,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(String key, double from, double to) {
|
||||
public DateRangeAggregationBuilder addRange(String key, double from, double to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addRange(String, double, double)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(double from, double to) {
|
||||
public DateRangeAggregationBuilder addRange(double from, double to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -149,7 +149,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(String key, double to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(String key, double to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -158,7 +158,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedTo(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(double to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(double to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -170,7 +170,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(String key, double from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(String key, double from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
@ -179,7 +179,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedFrom(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(double from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(double from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
|
@ -193,7 +193,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(String key, DateTime from, DateTime to) {
|
||||
public DateRangeAggregationBuilder addRange(String key, DateTime from, DateTime to) {
|
||||
addRange(new Range(key, convertDateTime(from), convertDateTime(to)));
|
||||
return this;
|
||||
}
|
||||
|
@ -210,7 +210,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addRange(String, DateTime, DateTime)} but the key will be
|
||||
* automatically generated based on <code>from</code> and <code>to</code>.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addRange(DateTime from, DateTime to) {
|
||||
public DateRangeAggregationBuilder addRange(DateTime from, DateTime to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -222,7 +222,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param to
|
||||
* the upper bound on the dates, exclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(String key, DateTime to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(String key, DateTime to) {
|
||||
addRange(new Range(key, null, convertDateTime(to)));
|
||||
return this;
|
||||
}
|
||||
|
@ -231,7 +231,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedTo(String, DateTime)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedTo(DateTime to) {
|
||||
public DateRangeAggregationBuilder addUnboundedTo(DateTime to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -243,7 +243,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(String key, DateTime from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(String key, DateTime from) {
|
||||
addRange(new Range(key, convertDateTime(from), null));
|
||||
return this;
|
||||
}
|
||||
|
@ -252,7 +252,7 @@ public class DateRangeAggregatorBuilder extends AbstractRangeBuilder<DateRangeAg
|
|||
* Same as {@link #addUnboundedFrom(String, DateTime)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public DateRangeAggregatorBuilder addUnboundedFrom(DateTime from) {
|
||||
public DateRangeAggregationBuilder addUnboundedFrom(DateTime from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
|
@ -38,9 +38,9 @@ public class DateRangeParser extends RangeParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected DateRangeAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DateRangeAggregatorBuilder factory = new DateRangeAggregatorBuilder(aggregationName);
|
||||
protected DateRangeAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DateRangeAggregationBuilder factory = new DateRangeAggregationBuilder(aggregationName);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Range> ranges = (List<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
for (Range range : ranges) {
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
|||
import org.elasticsearch.search.aggregations.bucket.range.geodistance.GeoDistanceParser.Range;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
|
||||
|
@ -42,7 +42,7 @@ import java.util.ArrayList;
|
|||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource.GeoPoint, GeoDistanceAggregatorBuilder> {
|
||||
public class GeoDistanceAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoDistanceAggregationBuilder> {
|
||||
public static final String NAME = InternalGeoDistance.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -52,12 +52,12 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
private GeoDistance distanceType = GeoDistance.DEFAULT;
|
||||
private boolean keyed = false;
|
||||
|
||||
public GeoDistanceAggregatorBuilder(String name, GeoPoint origin) {
|
||||
public GeoDistanceAggregationBuilder(String name, GeoPoint origin) {
|
||||
this(name, origin, InternalGeoDistance.FACTORY);
|
||||
}
|
||||
|
||||
private GeoDistanceAggregatorBuilder(String name, GeoPoint origin,
|
||||
InternalRange.Factory<InternalGeoDistance.Bucket, InternalGeoDistance> rangeFactory) {
|
||||
private GeoDistanceAggregationBuilder(String name, GeoPoint origin,
|
||||
InternalRange.Factory<InternalGeoDistance.Bucket, InternalGeoDistance> rangeFactory) {
|
||||
super(name, rangeFactory.type(), rangeFactory.getValueSourceType(), rangeFactory.getValueType());
|
||||
if (origin == null) {
|
||||
throw new IllegalArgumentException("[origin] must not be null: [" + name + "]");
|
||||
|
@ -68,7 +68,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public GeoDistanceAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalGeoDistance.FACTORY.type(), InternalGeoDistance.FACTORY.getValueSourceType(),
|
||||
InternalGeoDistance.FACTORY.getValueType());
|
||||
origin = new GeoPoint(in.readDouble(), in.readDouble());
|
||||
|
@ -95,7 +95,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
unit.writeTo(out);
|
||||
}
|
||||
|
||||
public GeoDistanceAggregatorBuilder addRange(Range range) {
|
||||
public GeoDistanceAggregationBuilder addRange(Range range) {
|
||||
if (range == null) {
|
||||
throw new IllegalArgumentException("[range] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -113,7 +113,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addRange(String key, double from, double to) {
|
||||
public GeoDistanceAggregationBuilder addRange(String key, double from, double to) {
|
||||
ranges.add(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* automatically generated based on <code>from</code> and
|
||||
* <code>to</code>.
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addRange(double from, double to) {
|
||||
public GeoDistanceAggregationBuilder addRange(double from, double to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -135,7 +135,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addUnboundedTo(String key, double to) {
|
||||
public GeoDistanceAggregationBuilder addUnboundedTo(String key, double to) {
|
||||
ranges.add(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -144,7 +144,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* Same as {@link #addUnboundedTo(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addUnboundedTo(double to) {
|
||||
public GeoDistanceAggregationBuilder addUnboundedTo(double to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -156,7 +156,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* @param from
|
||||
* the lower bound on the distances, inclusive
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addUnboundedFrom(String key, double from) {
|
||||
public GeoDistanceAggregationBuilder addUnboundedFrom(String key, double from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
@ -165,7 +165,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
* Same as {@link #addUnboundedFrom(String, double)} but the key will be
|
||||
* computed automatically.
|
||||
*/
|
||||
public GeoDistanceAggregatorBuilder addUnboundedFrom(double from) {
|
||||
public GeoDistanceAggregationBuilder addUnboundedFrom(double from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
|
@ -178,7 +178,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
return NAME;
|
||||
}
|
||||
|
||||
public GeoDistanceAggregatorBuilder unit(DistanceUnit unit) {
|
||||
public GeoDistanceAggregationBuilder unit(DistanceUnit unit) {
|
||||
if (unit == null) {
|
||||
throw new IllegalArgumentException("[unit] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -190,7 +190,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
return unit;
|
||||
}
|
||||
|
||||
public GeoDistanceAggregatorBuilder distanceType(GeoDistance distanceType) {
|
||||
public GeoDistanceAggregationBuilder distanceType(GeoDistance distanceType) {
|
||||
if (distanceType == null) {
|
||||
throw new IllegalArgumentException("[distanceType] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -202,7 +202,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
return distanceType;
|
||||
}
|
||||
|
||||
public GeoDistanceAggregatorBuilder keyed(boolean keyed) {
|
||||
public GeoDistanceAggregationBuilder keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
@ -236,7 +236,7 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
GeoDistanceAggregatorBuilder other = (GeoDistanceAggregatorBuilder) obj;
|
||||
GeoDistanceAggregationBuilder other = (GeoDistanceAggregationBuilder) obj;
|
||||
return Objects.equals(origin, other.origin)
|
||||
&& Objects.equals(ranges, other.ranges)
|
||||
&& Objects.equals(keyed, other.keyed)
|
||||
|
@ -244,4 +244,4 @@ public class GeoDistanceAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
&& Objects.equals(unit, other.unit);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -85,10 +85,10 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected GeoDistanceAggregatorBuilder createFactory(
|
||||
protected GeoDistanceAggregationBuilder createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType, ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
GeoPoint origin = (GeoPoint) otherOptions.get(ORIGIN_FIELD);
|
||||
GeoDistanceAggregatorBuilder factory = new GeoDistanceAggregatorBuilder(aggregationName, origin);
|
||||
GeoDistanceAggregationBuilder factory = new GeoDistanceAggregationBuilder(aggregationName, origin);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Range> ranges = (List<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
for (Range range : ranges) {
|
||||
|
@ -171,4 +171,4 @@ public class GeoDistanceParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,14 +44,14 @@ import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
|
||||
public final class IpRangeAggregatorBuilder
|
||||
extends ValuesSourceAggregatorBuilder<ValuesSource.Bytes, IpRangeAggregatorBuilder> {
|
||||
public final class IpRangeAggregationBuilder
|
||||
extends ValuesSourceAggregationBuilder<ValuesSource.Bytes, IpRangeAggregationBuilder> {
|
||||
private static final String NAME = "ip_range";
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
private static final InternalAggregation.Type TYPE = new InternalAggregation.Type(NAME);
|
||||
|
@ -163,7 +163,7 @@ public final class IpRangeAggregatorBuilder
|
|||
private boolean keyed = false;
|
||||
private List<Range> ranges = new ArrayList<>();
|
||||
|
||||
public IpRangeAggregatorBuilder(String name) {
|
||||
public IpRangeAggregationBuilder(String name) {
|
||||
super(name, TYPE, ValuesSourceType.BYTES, ValueType.IP);
|
||||
}
|
||||
|
||||
|
@ -172,7 +172,7 @@ public final class IpRangeAggregatorBuilder
|
|||
return NAME;
|
||||
}
|
||||
|
||||
public IpRangeAggregatorBuilder keyed(boolean keyed) {
|
||||
public IpRangeAggregationBuilder keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
@ -187,7 +187,7 @@ public final class IpRangeAggregatorBuilder
|
|||
}
|
||||
|
||||
/** Add a new {@link Range} to this aggregation. */
|
||||
public IpRangeAggregatorBuilder addRange(Range range) {
|
||||
public IpRangeAggregationBuilder addRange(Range range) {
|
||||
ranges.add(range);
|
||||
return this;
|
||||
}
|
||||
|
@ -202,7 +202,7 @@ public final class IpRangeAggregatorBuilder
|
|||
* @param to
|
||||
* the upper bound on the distances, exclusive
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addRange(String key, String from, String to) {
|
||||
public IpRangeAggregationBuilder addRange(String key, String from, String to) {
|
||||
addRange(new Range(key, from, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -210,7 +210,7 @@ public final class IpRangeAggregatorBuilder
|
|||
/**
|
||||
* Add a new range to this aggregation using the CIDR notation.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addMaskRange(String key, String mask) {
|
||||
public IpRangeAggregationBuilder addMaskRange(String key, String mask) {
|
||||
return addRange(new Range(key, mask));
|
||||
}
|
||||
|
||||
|
@ -218,7 +218,7 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addMaskRange(String, String)} but uses the mask itself as
|
||||
* a key.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addMaskRange(String mask) {
|
||||
public IpRangeAggregationBuilder addMaskRange(String mask) {
|
||||
return addRange(new Range(mask, mask));
|
||||
}
|
||||
|
||||
|
@ -226,7 +226,7 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addRange(String, String, String)} but the key will be
|
||||
* automatically generated.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addRange(String from, String to) {
|
||||
public IpRangeAggregationBuilder addRange(String from, String to) {
|
||||
return addRange(null, from, to);
|
||||
}
|
||||
|
||||
|
@ -234,7 +234,7 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addRange(String, String, String)} but there will be no
|
||||
* lower bound.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addUnboundedTo(String key, String to) {
|
||||
public IpRangeAggregationBuilder addUnboundedTo(String key, String to) {
|
||||
addRange(new Range(key, null, to));
|
||||
return this;
|
||||
}
|
||||
|
@ -243,7 +243,7 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addUnboundedTo(String, String)} but the key will be
|
||||
* generated automatically.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addUnboundedTo(String to) {
|
||||
public IpRangeAggregationBuilder addUnboundedTo(String to) {
|
||||
return addUnboundedTo(null, to);
|
||||
}
|
||||
|
||||
|
@ -251,13 +251,13 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addRange(String, String, String)} but there will be no
|
||||
* upper bound.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addUnboundedFrom(String key, String from) {
|
||||
public IpRangeAggregationBuilder addUnboundedFrom(String key, String from) {
|
||||
addRange(new Range(key, from, null));
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IpRangeAggregatorBuilder script(Script script) {
|
||||
public IpRangeAggregationBuilder script(Script script) {
|
||||
throw new IllegalArgumentException("[ip_range] does not support scripts");
|
||||
}
|
||||
|
||||
|
@ -265,11 +265,11 @@ public final class IpRangeAggregatorBuilder
|
|||
* Same as {@link #addUnboundedFrom(String, String)} but the key will be
|
||||
* generated automatically.
|
||||
*/
|
||||
public IpRangeAggregatorBuilder addUnboundedFrom(String from) {
|
||||
public IpRangeAggregationBuilder addUnboundedFrom(String from) {
|
||||
return addUnboundedFrom(null, from);
|
||||
}
|
||||
|
||||
public IpRangeAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public IpRangeAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, TYPE, ValuesSourceType.BYTES, ValueType.IP);
|
||||
final int numRanges = in.readVInt();
|
||||
for (int i = 0; i < numRanges; ++i) {
|
||||
|
@ -323,7 +323,7 @@ public final class IpRangeAggregatorBuilder
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
IpRangeAggregatorBuilder that = (IpRangeAggregatorBuilder) obj;
|
||||
IpRangeAggregationBuilder that = (IpRangeAggregationBuilder) obj;
|
||||
return keyed == that.keyed
|
||||
&& ranges.equals(that.ranges);
|
||||
}
|
|
@ -30,10 +30,10 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.BytesValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.RangeAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregatorBuilder.Range;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.ip.IpRangeAggregationBuilder.Range;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
/**
|
||||
|
@ -48,10 +48,10 @@ public class IpRangeParser extends BytesValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregatorBuilder<ValuesSource.Bytes, ?> createFactory(
|
||||
protected ValuesSourceAggregationBuilder<ValuesSource.Bytes, ?> createFactory(
|
||||
String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
IpRangeAggregatorBuilder range = new IpRangeAggregatorBuilder(aggregationName);
|
||||
IpRangeAggregationBuilder range = new IpRangeAggregationBuilder(aggregationName);
|
||||
@SuppressWarnings("unchecked")
|
||||
Iterable<Range> ranges = (Iterable<Range>) otherOptions.get(RangeAggregator.RANGES_FIELD);
|
||||
if (otherOptions.containsKey(RangeAggregator.RANGES_FIELD)) {
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -36,25 +36,25 @@ import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource, DiversifiedAggregatorBuilder> {
|
||||
public class DiversifiedAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, DiversifiedAggregationBuilder> {
|
||||
public static final String NAME = "diversified_sampler";
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
public static final Type TYPE = new Type(NAME);
|
||||
|
||||
public static final int MAX_DOCS_PER_VALUE_DEFAULT = 1;
|
||||
|
||||
private int shardSize = SamplerAggregatorBuilder.DEFAULT_SHARD_SAMPLE_SIZE;
|
||||
private int shardSize = SamplerAggregationBuilder.DEFAULT_SHARD_SAMPLE_SIZE;
|
||||
private int maxDocsPerValue = MAX_DOCS_PER_VALUE_DEFAULT;
|
||||
private String executionHint = null;
|
||||
|
||||
public DiversifiedAggregatorBuilder(String name) {
|
||||
public DiversifiedAggregationBuilder(String name) {
|
||||
super(name, TYPE, ValuesSourceType.ANY, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public DiversifiedAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public DiversifiedAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, TYPE, ValuesSourceType.ANY, null);
|
||||
shardSize = in.readVInt();
|
||||
maxDocsPerValue = in.readVInt();
|
||||
|
@ -71,7 +71,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
/**
|
||||
* Set the max num docs to be returned from each shard.
|
||||
*/
|
||||
public DiversifiedAggregatorBuilder shardSize(int shardSize) {
|
||||
public DiversifiedAggregationBuilder shardSize(int shardSize) {
|
||||
if (shardSize < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
|
||||
|
@ -90,7 +90,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
/**
|
||||
* Set the max num docs to be returned per value.
|
||||
*/
|
||||
public DiversifiedAggregatorBuilder maxDocsPerValue(int maxDocsPerValue) {
|
||||
public DiversifiedAggregationBuilder maxDocsPerValue(int maxDocsPerValue) {
|
||||
if (maxDocsPerValue < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[maxDocsPerValue] must be greater than or equal to 0. Found [" + maxDocsPerValue + "] in [" + name + "]");
|
||||
|
@ -109,7 +109,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
/**
|
||||
* Set the execution hint.
|
||||
*/
|
||||
public DiversifiedAggregatorBuilder executionHint(String executionHint) {
|
||||
public DiversifiedAggregationBuilder executionHint(String executionHint) {
|
||||
this.executionHint = executionHint;
|
||||
return this;
|
||||
}
|
||||
|
@ -145,7 +145,7 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
DiversifiedAggregatorBuilder other = (DiversifiedAggregatorBuilder) obj;
|
||||
DiversifiedAggregationBuilder other = (DiversifiedAggregationBuilder) obj;
|
||||
return Objects.equals(shardSize, other.shardSize)
|
||||
&& Objects.equals(maxDocsPerValue, other.maxDocsPerValue)
|
||||
&& Objects.equals(executionHint, other.executionHint);
|
||||
|
@ -155,4 +155,4 @@ public class DiversifiedAggregatorBuilder extends ValuesSourceAggregatorBuilder<
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -38,9 +38,9 @@ public class DiversifiedSamplerParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected DiversifiedAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DiversifiedAggregatorBuilder factory = new DiversifiedAggregatorBuilder(aggregationName);
|
||||
protected DiversifiedAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
DiversifiedAggregationBuilder factory = new DiversifiedAggregationBuilder(aggregationName);
|
||||
Integer shardSize = (Integer) otherOptions.get(SamplerAggregator.SHARD_SIZE_FIELD);
|
||||
if (shardSize != null) {
|
||||
factory.shardSize(shardSize);
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.search.aggregations.AggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
|
@ -34,7 +34,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregatorBuilder> {
|
||||
public class SamplerAggregationBuilder extends AggregationBuilder<SamplerAggregationBuilder> {
|
||||
public static final String NAME = InternalSampler.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -42,14 +42,14 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregato
|
|||
|
||||
private int shardSize = DEFAULT_SHARD_SAMPLE_SIZE;
|
||||
|
||||
public SamplerAggregatorBuilder(String name) {
|
||||
public SamplerAggregationBuilder(String name) {
|
||||
super(name, InternalSampler.TYPE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public SamplerAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public SamplerAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalSampler.TYPE);
|
||||
shardSize = in.readVInt();
|
||||
}
|
||||
|
@ -62,7 +62,7 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregato
|
|||
/**
|
||||
* Set the max num docs to be returned from each shard.
|
||||
*/
|
||||
public SamplerAggregatorBuilder shardSize(int shardSize) {
|
||||
public SamplerAggregationBuilder shardSize(int shardSize) {
|
||||
this.shardSize = shardSize;
|
||||
return this;
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregato
|
|||
return builder;
|
||||
}
|
||||
|
||||
public static SamplerAggregatorBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
public static SamplerAggregationBuilder parse(String aggregationName, QueryParseContext context) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
Integer shardSize = null;
|
||||
|
@ -110,7 +110,7 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregato
|
|||
}
|
||||
}
|
||||
|
||||
SamplerAggregatorBuilder factory = new SamplerAggregatorBuilder(aggregationName);
|
||||
SamplerAggregationBuilder factory = new SamplerAggregationBuilder(aggregationName);
|
||||
if (shardSize != null) {
|
||||
factory.shardSize(shardSize);
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregato
|
|||
|
||||
@Override
|
||||
protected boolean doEquals(Object obj) {
|
||||
SamplerAggregatorBuilder other = (SamplerAggregatorBuilder) obj;
|
||||
SamplerAggregationBuilder other = (SamplerAggregationBuilder) obj;
|
||||
return Objects.equals(shardSize, other.shardSize);
|
||||
}
|
||||
|
||||
|
@ -132,4 +132,4 @@ public class SamplerAggregatorBuilder extends AggregatorBuilder<SamplerAggregato
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -29,12 +29,12 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHSc
|
|||
import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregator.BucketCountThresholds;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -45,7 +45,7 @@ import java.util.Objects;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource, SignificantTermsAggregatorBuilder> {
|
||||
public class SignificantTermsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, SignificantTermsAggregationBuilder> {
|
||||
public static final String NAME = SignificantStringTerms.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -62,14 +62,14 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
private TermsAggregator.BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
private SignificanceHeuristic significanceHeuristic = DEFAULT_SIGNIFICANCE_HEURISTIC;
|
||||
|
||||
public SignificantTermsAggregatorBuilder(String name, ValueType valueType) {
|
||||
public SignificantTermsAggregationBuilder(String name, ValueType valueType) {
|
||||
super(name, SignificantStringTerms.TYPE, ValuesSourceType.ANY, valueType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a Stream.
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public SignificantTermsAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, SignificantStringTerms.TYPE, ValuesSourceType.ANY);
|
||||
bucketCountThresholds = new BucketCountThresholds(in);
|
||||
executionHint = in.readOptionalString();
|
||||
|
@ -100,7 +100,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
return bucketCountThresholds;
|
||||
}
|
||||
|
||||
public SignificantTermsAggregatorBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) {
|
||||
public SignificantTermsAggregationBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) {
|
||||
if (bucketCountThresholds == null) {
|
||||
throw new IllegalArgumentException("[bucketCountThresholds] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -112,7 +112,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
* Sets the size - indicating how many term buckets should be returned
|
||||
* (defaults to 10)
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder size(int size) {
|
||||
public SignificantTermsAggregationBuilder size(int size) {
|
||||
if (size < 0) {
|
||||
throw new IllegalArgumentException("[size] must be greater than or equal to 0. Found [" + size + "] in [" + name + "]");
|
||||
}
|
||||
|
@ -126,7 +126,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
* search execution). The higher the shard size is, the more accurate the
|
||||
* results are.
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder shardSize(int shardSize) {
|
||||
public SignificantTermsAggregationBuilder shardSize(int shardSize) {
|
||||
if (shardSize < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
|
||||
|
@ -139,7 +139,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
* Set the minimum document count terms should have in order to appear in
|
||||
* the response.
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder minDocCount(long minDocCount) {
|
||||
public SignificantTermsAggregationBuilder minDocCount(long minDocCount) {
|
||||
if (minDocCount < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]");
|
||||
|
@ -152,7 +152,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
* Set the minimum document count terms should have on the shard in order to
|
||||
* appear in the response.
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder shardMinDocCount(long shardMinDocCount) {
|
||||
public SignificantTermsAggregationBuilder shardMinDocCount(long shardMinDocCount) {
|
||||
if (shardMinDocCount < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]");
|
||||
|
@ -164,7 +164,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
/**
|
||||
* Expert: sets an execution hint to the aggregation.
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder executionHint(String executionHint) {
|
||||
public SignificantTermsAggregationBuilder executionHint(String executionHint) {
|
||||
this.executionHint = executionHint;
|
||||
return this;
|
||||
}
|
||||
|
@ -176,7 +176,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
return executionHint;
|
||||
}
|
||||
|
||||
public SignificantTermsAggregatorBuilder backgroundFilter(QueryBuilder backgroundFilter) {
|
||||
public SignificantTermsAggregationBuilder backgroundFilter(QueryBuilder backgroundFilter) {
|
||||
if (backgroundFilter == null) {
|
||||
throw new IllegalArgumentException("[backgroundFilter] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -191,7 +191,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
/**
|
||||
* Set terms to include and exclude from the aggregation results
|
||||
*/
|
||||
public SignificantTermsAggregatorBuilder includeExclude(IncludeExclude includeExclude) {
|
||||
public SignificantTermsAggregationBuilder includeExclude(IncludeExclude includeExclude) {
|
||||
this.includeExclude = includeExclude;
|
||||
return this;
|
||||
}
|
||||
|
@ -203,7 +203,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
return includeExclude;
|
||||
}
|
||||
|
||||
public SignificantTermsAggregatorBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) {
|
||||
public SignificantTermsAggregationBuilder significanceHeuristic(SignificanceHeuristic significanceHeuristic) {
|
||||
if (significanceHeuristic == null) {
|
||||
throw new IllegalArgumentException("[significanceHeuristic] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -226,7 +226,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
|
||||
bucketCountThresholds.toXContent(builder, params);
|
||||
if (executionHint != null) {
|
||||
builder.field(TermsAggregatorBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
|
||||
builder.field(TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
|
||||
}
|
||||
if (filterBuilder != null) {
|
||||
builder.field(BACKGROUND_FILTER.getPreferredName(), filterBuilder);
|
||||
|
@ -245,7 +245,7 @@ public class SignificantTermsAggregatorBuilder extends ValuesSourceAggregatorBui
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
SignificantTermsAggregatorBuilder other = (SignificantTermsAggregatorBuilder) obj;
|
||||
SignificantTermsAggregationBuilder other = (SignificantTermsAggregationBuilder) obj;
|
||||
return Objects.equals(bucketCountThresholds, other.bucketCountThresholds)
|
||||
&& Objects.equals(executionHint, other.executionHint)
|
||||
&& Objects.equals(filterBuilder, other.filterBuilder)
|
|
@ -178,7 +178,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
|
||||
numberOfAggregatorsCreated++;
|
||||
BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(this.bucketCountThresholds);
|
||||
if (bucketCountThresholds.getShardSize() == SignificantTermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) {
|
||||
if (bucketCountThresholds.getShardSize() == SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) {
|
||||
// The user has not made a shardSize selection .
|
||||
// Use default heuristic to avoid any wrong-ranking caused by
|
||||
// distributed counting
|
||||
|
@ -211,7 +211,14 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
}
|
||||
}
|
||||
assert execution != null;
|
||||
return execution.create(name, factories, valuesSource, config.format(), bucketCountThresholds, includeExclude, context, parent,
|
||||
|
||||
DocValueFormat format = config.format();
|
||||
if ((includeExclude != null) && (includeExclude.isRegexBased()) && format != DocValueFormat.RAW) {
|
||||
throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude "
|
||||
+ "settings as they can only be applied to string fields. Use an array of values for include/exclude clauses");
|
||||
}
|
||||
|
||||
return execution.create(name, factories, valuesSource, format, bucketCountThresholds, includeExclude, context, parent,
|
||||
significanceHeuristic, this, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
|
@ -227,7 +234,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
}
|
||||
IncludeExclude.LongFilter longFilter = null;
|
||||
if (includeExclude != null) {
|
||||
longFilter = includeExclude.convertToLongFilter();
|
||||
longFilter = includeExclude.convertToLongFilter(config.format());
|
||||
}
|
||||
return new SignificantLongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(),
|
||||
bucketCountThresholds, context, parent, significanceHeuristic, this, longFilter, pipelineAggregators,
|
||||
|
@ -248,7 +255,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
||||
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter();
|
||||
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format);
|
||||
return new SignificantStringTermsAggregator(name, factories, valuesSource, format, bucketCountThresholds, filter,
|
||||
aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
||||
}
|
||||
|
@ -262,7 +269,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
||||
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
|
||||
return new GlobalOrdinalsSignificantTermsAggregator(name, factories,
|
||||
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, format, bucketCountThresholds, filter,
|
||||
aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
||||
|
@ -277,7 +284,7 @@ public class SignificantTermsAggregatorFactory extends ValuesSourceAggregatorFac
|
|||
AggregationContext aggregationContext, Aggregator parent, SignificanceHeuristic significanceHeuristic,
|
||||
SignificantTermsAggregatorFactory termsAggregatorFactory, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData) throws IOException {
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
|
||||
return new GlobalOrdinalsSignificantTermsAggregator.WithHash(name, factories,
|
||||
(ValuesSource.Bytes.WithOrdinals.FieldData) valuesSource, format, bucketCountThresholds, filter,
|
||||
aggregationContext, parent, significanceHeuristic, termsAggregatorFactory, pipelineAggregators, metaData);
|
||||
|
|
|
@ -53,10 +53,11 @@ public class SignificantTermsParser extends AbstractTermsParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected SignificantTermsAggregatorBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions) {
|
||||
SignificantTermsAggregatorBuilder factory = new SignificantTermsAggregatorBuilder(aggregationName, targetValueType);
|
||||
protected SignificantTermsAggregationBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds,
|
||||
SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions) {
|
||||
SignificantTermsAggregationBuilder factory = new SignificantTermsAggregationBuilder(aggregationName, targetValueType);
|
||||
if (bucketCountThresholds != null) {
|
||||
factory.bucketCountThresholds(bucketCountThresholds);
|
||||
}
|
||||
|
@ -66,11 +67,12 @@ public class SignificantTermsParser extends AbstractTermsParser {
|
|||
if (incExc != null) {
|
||||
factory.includeExclude(incExc);
|
||||
}
|
||||
QueryBuilder backgroundFilter = (QueryBuilder) otherOptions.get(SignificantTermsAggregatorBuilder.BACKGROUND_FILTER);
|
||||
QueryBuilder backgroundFilter = (QueryBuilder) otherOptions.get(SignificantTermsAggregationBuilder.BACKGROUND_FILTER);
|
||||
if (backgroundFilter != null) {
|
||||
factory.backgroundFilter(backgroundFilter);
|
||||
}
|
||||
SignificanceHeuristic significanceHeuristic = (SignificanceHeuristic) otherOptions.get(SignificantTermsAggregatorBuilder.HEURISTIC);
|
||||
SignificanceHeuristic significanceHeuristic =
|
||||
(SignificanceHeuristic) otherOptions.get(SignificantTermsAggregationBuilder.HEURISTIC);
|
||||
if (significanceHeuristic != null) {
|
||||
factory.significanceHeuristic(significanceHeuristic);
|
||||
}
|
||||
|
@ -85,12 +87,12 @@ public class SignificantTermsParser extends AbstractTermsParser {
|
|||
.lookupReturningNullIfNotFound(currentFieldName, parseFieldMatcher);
|
||||
if (significanceHeuristicParser != null) {
|
||||
SignificanceHeuristic significanceHeuristic = significanceHeuristicParser.parse(parser, parseFieldMatcher);
|
||||
otherOptions.put(SignificantTermsAggregatorBuilder.HEURISTIC, significanceHeuristic);
|
||||
otherOptions.put(SignificantTermsAggregationBuilder.HEURISTIC, significanceHeuristic);
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SignificantTermsAggregatorBuilder.BACKGROUND_FILTER)) {
|
||||
} else if (parseFieldMatcher.match(currentFieldName, SignificantTermsAggregationBuilder.BACKGROUND_FILTER)) {
|
||||
QueryParseContext queryParseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher);
|
||||
QueryBuilder filter = queryParseContext.parseInnerQueryBuilder();
|
||||
otherOptions.put(SignificantTermsAggregatorBuilder.BACKGROUND_FILTER, filter);
|
||||
otherOptions.put(SignificantTermsAggregationBuilder.BACKGROUND_FILTER, filter);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -99,6 +101,6 @@ public class SignificantTermsParser extends AbstractTermsParser {
|
|||
|
||||
@Override
|
||||
protected BucketCountThresholds getDefaultBucketCountThresholds() {
|
||||
return new TermsAggregator.BucketCountThresholds(SignificantTermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
return new TermsAggregator.BucketCountThresholds(SignificantTermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,7 +60,7 @@ public class UnmappedSignificantTerms extends InternalSignificantTerms<UnmappedS
|
|||
public UnmappedSignificantTerms(String name, int requiredSize, long minDocCount, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
//We pass zero for index/subset sizes because for the purpose of significant term analysis
|
||||
// we assume an unmapped index's size is irrelevant to the proceedings.
|
||||
super(0, 0, name, DocValueFormat.RAW, requiredSize, minDocCount, SignificantTermsAggregatorBuilder.DEFAULT_SIGNIFICANCE_HEURISTIC,
|
||||
super(0, 0, name, DocValueFormat.RAW, requiredSize, minDocCount, SignificantTermsAggregationBuilder.DEFAULT_SIGNIFICANCE_HEURISTIC,
|
||||
BUCKETS, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude
|
|||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.AnyValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -50,8 +50,10 @@ public abstract class AbstractTermsParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected final ValuesSourceAggregatorBuilder<ValuesSource, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
protected final ValuesSourceAggregationBuilder<ValuesSource, ?> createFactory(String aggregationName,
|
||||
ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType,
|
||||
Map<ParseField, Object> otherOptions) {
|
||||
BucketCountThresholds bucketCountThresholds = getDefaultBucketCountThresholds();
|
||||
Integer requiredSize = (Integer) otherOptions.get(REQUIRED_SIZE_FIELD_NAME);
|
||||
if (requiredSize != null && requiredSize != -1) {
|
||||
|
@ -77,10 +79,14 @@ public abstract class AbstractTermsParser extends AnyValuesSourceParser {
|
|||
otherOptions);
|
||||
}
|
||||
|
||||
protected abstract ValuesSourceAggregatorBuilder<ValuesSource, ?> doCreateFactory(String aggregationName,
|
||||
ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions);
|
||||
protected abstract ValuesSourceAggregationBuilder<ValuesSource, ?> doCreateFactory(String aggregationName,
|
||||
ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType,
|
||||
BucketCountThresholds bucketCountThresholds,
|
||||
SubAggCollectionMode collectMode,
|
||||
String executionHint,
|
||||
IncludeExclude incExc,
|
||||
Map<ParseField, Object> otherOptions);
|
||||
|
||||
@Override
|
||||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
|
|
|
@ -30,7 +30,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -38,7 +38,7 @@ import java.io.IOException;
|
|||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource, TermsAggregatorBuilder> {
|
||||
public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource, TermsAggregationBuilder> {
|
||||
public static final String NAME = StringTerms.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -61,14 +61,14 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
private boolean showTermDocCountError = false;
|
||||
|
||||
public TermsAggregatorBuilder(String name, ValueType valueType) {
|
||||
public TermsAggregationBuilder(String name, ValueType valueType) {
|
||||
super(name, StringTerms.TYPE, ValuesSourceType.ANY, valueType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public TermsAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public TermsAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, StringTerms.TYPE, ValuesSourceType.ANY);
|
||||
bucketCountThresholds = new BucketCountThresholds(in);
|
||||
collectMode = SubAggCollectionMode.readFromStream(in);
|
||||
|
@ -97,7 +97,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
return bucketCountThresholds;
|
||||
}
|
||||
|
||||
public TermsAggregatorBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) {
|
||||
public TermsAggregationBuilder bucketCountThresholds(TermsAggregator.BucketCountThresholds bucketCountThresholds) {
|
||||
if (bucketCountThresholds == null) {
|
||||
throw new IllegalArgumentException("[bucketCountThresholds] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
* Sets the size - indicating how many term buckets should be returned
|
||||
* (defaults to 10)
|
||||
*/
|
||||
public TermsAggregatorBuilder size(int size) {
|
||||
public TermsAggregationBuilder size(int size) {
|
||||
if (size < 0) {
|
||||
throw new IllegalArgumentException("[size] must be greater than or equal to 0. Found [" + size + "] in [" + name + "]");
|
||||
}
|
||||
|
@ -123,7 +123,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
* search execution). The higher the shard size is, the more accurate the
|
||||
* results are.
|
||||
*/
|
||||
public TermsAggregatorBuilder shardSize(int shardSize) {
|
||||
public TermsAggregationBuilder shardSize(int shardSize) {
|
||||
if (shardSize < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardSize] must be greater than or equal to 0. Found [" + shardSize + "] in [" + name + "]");
|
||||
|
@ -136,7 +136,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
* Set the minimum document count terms should have in order to appear in
|
||||
* the response.
|
||||
*/
|
||||
public TermsAggregatorBuilder minDocCount(long minDocCount) {
|
||||
public TermsAggregationBuilder minDocCount(long minDocCount) {
|
||||
if (minDocCount < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[minDocCount] must be greater than or equal to 0. Found [" + minDocCount + "] in [" + name + "]");
|
||||
|
@ -149,7 +149,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
* Set the minimum document count terms should have on the shard in order to
|
||||
* appear in the response.
|
||||
*/
|
||||
public TermsAggregatorBuilder shardMinDocCount(long shardMinDocCount) {
|
||||
public TermsAggregationBuilder shardMinDocCount(long shardMinDocCount) {
|
||||
if (shardMinDocCount < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[shardMinDocCount] must be greater than or equal to 0. Found [" + shardMinDocCount + "] in [" + name + "]");
|
||||
|
@ -161,7 +161,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Sets the order in which the buckets will be returned.
|
||||
*/
|
||||
public TermsAggregatorBuilder order(Terms.Order order) {
|
||||
public TermsAggregationBuilder order(Terms.Order order) {
|
||||
if (order == null) {
|
||||
throw new IllegalArgumentException("[order] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -172,7 +172,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Sets the order in which the buckets will be returned.
|
||||
*/
|
||||
public TermsAggregatorBuilder order(List<Terms.Order> orders) {
|
||||
public TermsAggregationBuilder order(List<Terms.Order> orders) {
|
||||
if (orders == null) {
|
||||
throw new IllegalArgumentException("[orders] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -190,7 +190,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Expert: sets an execution hint to the aggregation.
|
||||
*/
|
||||
public TermsAggregatorBuilder executionHint(String executionHint) {
|
||||
public TermsAggregationBuilder executionHint(String executionHint) {
|
||||
this.executionHint = executionHint;
|
||||
return this;
|
||||
}
|
||||
|
@ -205,7 +205,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Expert: set the collection mode.
|
||||
*/
|
||||
public TermsAggregatorBuilder collectMode(SubAggCollectionMode collectMode) {
|
||||
public TermsAggregationBuilder collectMode(SubAggCollectionMode collectMode) {
|
||||
if (collectMode == null) {
|
||||
throw new IllegalArgumentException("[collectMode] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -223,7 +223,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Set terms to include and exclude from the aggregation results
|
||||
*/
|
||||
public TermsAggregatorBuilder includeExclude(IncludeExclude includeExclude) {
|
||||
public TermsAggregationBuilder includeExclude(IncludeExclude includeExclude) {
|
||||
this.includeExclude = includeExclude;
|
||||
return this;
|
||||
}
|
||||
|
@ -245,7 +245,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
/**
|
||||
* Set whether doc count error will be return for individual terms
|
||||
*/
|
||||
public TermsAggregatorBuilder showTermDocCountError(boolean showTermDocCountError) {
|
||||
public TermsAggregationBuilder showTermDocCountError(boolean showTermDocCountError) {
|
||||
this.showTermDocCountError = showTermDocCountError;
|
||||
return this;
|
||||
}
|
||||
|
@ -262,7 +262,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
bucketCountThresholds.toXContent(builder, params);
|
||||
builder.field(SHOW_TERM_DOC_COUNT_ERROR.getPreferredName(), showTermDocCountError);
|
||||
if (executionHint != null) {
|
||||
builder.field(TermsAggregatorBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
|
||||
builder.field(TermsAggregationBuilder.EXECUTION_HINT_FIELD_NAME.getPreferredName(), executionHint);
|
||||
}
|
||||
builder.field(ORDER_FIELD.getPreferredName());
|
||||
order.toXContent(builder, params);
|
||||
|
@ -280,7 +280,7 @@ public class TermsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Values
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
TermsAggregatorBuilder other = (TermsAggregatorBuilder) obj;
|
||||
TermsAggregationBuilder other = (TermsAggregationBuilder) obj;
|
||||
return Objects.equals(bucketCountThresholds, other.bucketCountThresholds)
|
||||
&& Objects.equals(collectMode, other.collectMode)
|
||||
&& Objects.equals(executionHint, other.executionHint)
|
|
@ -139,10 +139,10 @@ public abstract class TermsAggregator extends BucketsAggregator {
|
|||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(TermsAggregatorBuilder.REQUIRED_SIZE_FIELD_NAME.getPreferredName(), requiredSize);
|
||||
builder.field(TermsAggregatorBuilder.SHARD_SIZE_FIELD_NAME.getPreferredName(), shardSize);
|
||||
builder.field(TermsAggregatorBuilder.MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), minDocCount);
|
||||
builder.field(TermsAggregatorBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), shardMinDocCount);
|
||||
builder.field(TermsAggregationBuilder.REQUIRED_SIZE_FIELD_NAME.getPreferredName(), requiredSize);
|
||||
builder.field(TermsAggregationBuilder.SHARD_SIZE_FIELD_NAME.getPreferredName(), shardSize);
|
||||
builder.field(TermsAggregationBuilder.MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), minDocCount);
|
||||
builder.field(TermsAggregationBuilder.SHARD_MIN_DOC_COUNT_FIELD_NAME.getPreferredName(), shardMinDocCount);
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
|
|
@ -93,7 +93,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
}
|
||||
BucketCountThresholds bucketCountThresholds = new BucketCountThresholds(this.bucketCountThresholds);
|
||||
if (!(order == InternalOrder.TERM_ASC || order == InternalOrder.TERM_DESC)
|
||||
&& bucketCountThresholds.getShardSize() == TermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) {
|
||||
&& bucketCountThresholds.getShardSize() == TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS.getShardSize()) {
|
||||
// The user has not made a shardSize selection. Use default
|
||||
// heuristic to avoid any wrong-ranking caused by distributed
|
||||
// counting
|
||||
|
@ -150,8 +150,13 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
}
|
||||
}
|
||||
}
|
||||
DocValueFormat format = config.format();
|
||||
if ((includeExclude != null) && (includeExclude.isRegexBased()) && format != DocValueFormat.RAW) {
|
||||
throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude "
|
||||
+ "settings as they can only be applied to string fields. Use an array of values for include/exclude clauses");
|
||||
}
|
||||
|
||||
return execution.create(name, factories, valuesSource, order, config.format(), bucketCountThresholds, includeExclude, context, parent,
|
||||
return execution.create(name, factories, valuesSource, order, format, bucketCountThresholds, includeExclude, context, parent,
|
||||
collectMode, showTermDocCountError, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
|
@ -171,7 +176,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
pipelineAggregators, metaData);
|
||||
}
|
||||
if (includeExclude != null) {
|
||||
longFilter = includeExclude.convertToLongFilter();
|
||||
longFilter = includeExclude.convertToLongFilter(config.format());
|
||||
}
|
||||
return new LongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), order,
|
||||
bucketCountThresholds, context, parent, collectMode, showTermDocCountError, longFilter, pipelineAggregators,
|
||||
|
@ -192,7 +197,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter();
|
||||
final IncludeExclude.StringFilter filter = includeExclude == null ? null : includeExclude.convertToStringFilter(format);
|
||||
return new StringTermsAggregator(name, factories, valuesSource, order, format, bucketCountThresholds, filter,
|
||||
aggregationContext, parent, subAggCollectMode, showTermDocCountError, pipelineAggregators, metaData);
|
||||
}
|
||||
|
@ -211,7 +216,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
|
||||
return new GlobalOrdinalsStringTermsAggregator(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource, order,
|
||||
format, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError,
|
||||
pipelineAggregators, metaData);
|
||||
|
@ -231,7 +236,7 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
|||
AggregationContext aggregationContext, Aggregator parent, SubAggCollectionMode subAggCollectMode,
|
||||
boolean showTermDocCountError, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
|
||||
throws IOException {
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter();
|
||||
final IncludeExclude.OrdinalsFilter filter = includeExclude == null ? null : includeExclude.convertToOrdinalsFilter(format);
|
||||
return new GlobalOrdinalsStringTermsAggregator.WithHash(name, factories, (ValuesSource.Bytes.WithOrdinals) valuesSource,
|
||||
order, format, bucketCountThresholds, filter, aggregationContext, parent, subAggCollectMode, showTermDocCountError,
|
||||
pipelineAggregators, metaData);
|
||||
|
|
|
@ -41,12 +41,13 @@ import java.util.Map;
|
|||
*/
|
||||
public class TermsParser extends AbstractTermsParser {
|
||||
@Override
|
||||
protected TermsAggregatorBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds, SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions) {
|
||||
TermsAggregatorBuilder factory = new TermsAggregatorBuilder(aggregationName, targetValueType);
|
||||
protected TermsAggregationBuilder doCreateFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, BucketCountThresholds bucketCountThresholds,
|
||||
SubAggCollectionMode collectMode, String executionHint,
|
||||
IncludeExclude incExc, Map<ParseField, Object> otherOptions) {
|
||||
TermsAggregationBuilder factory = new TermsAggregationBuilder(aggregationName, targetValueType);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<OrderElement> orderElements = (List<OrderElement>) otherOptions.get(TermsAggregatorBuilder.ORDER_FIELD);
|
||||
List<OrderElement> orderElements = (List<OrderElement>) otherOptions.get(TermsAggregationBuilder.ORDER_FIELD);
|
||||
if (orderElements != null) {
|
||||
List<Terms.Order> orders = new ArrayList<>(orderElements.size());
|
||||
for (OrderElement orderElement : orderElements) {
|
||||
|
@ -66,7 +67,7 @@ public class TermsParser extends AbstractTermsParser {
|
|||
if (incExc != null) {
|
||||
factory.includeExclude(incExc);
|
||||
}
|
||||
Boolean showTermDocCountError = (Boolean) otherOptions.get(TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR);
|
||||
Boolean showTermDocCountError = (Boolean) otherOptions.get(TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR);
|
||||
if (showTermDocCountError != null) {
|
||||
factory.showTermDocCountError(showTermDocCountError);
|
||||
}
|
||||
|
@ -77,12 +78,12 @@ public class TermsParser extends AbstractTermsParser {
|
|||
public boolean parseSpecial(String aggregationName, XContentParser parser, ParseFieldMatcher parseFieldMatcher, Token token,
|
||||
String currentFieldName, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.ORDER_FIELD)) {
|
||||
otherOptions.put(TermsAggregatorBuilder.ORDER_FIELD, Collections.singletonList(parseOrderParam(aggregationName, parser)));
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) {
|
||||
otherOptions.put(TermsAggregationBuilder.ORDER_FIELD, Collections.singletonList(parseOrderParam(aggregationName, parser)));
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.ORDER_FIELD)) {
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.ORDER_FIELD)) {
|
||||
List<OrderElement> orderElements = new ArrayList<>();
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
|
@ -93,12 +94,12 @@ public class TermsParser extends AbstractTermsParser {
|
|||
"Order elements must be of type object in [" + aggregationName + "] found token of type [" + token + "].");
|
||||
}
|
||||
}
|
||||
otherOptions.put(TermsAggregatorBuilder.ORDER_FIELD, orderElements);
|
||||
otherOptions.put(TermsAggregationBuilder.ORDER_FIELD, orderElements);
|
||||
return true;
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR)) {
|
||||
otherOptions.put(TermsAggregatorBuilder.SHOW_TERM_DOC_COUNT_ERROR, parser.booleanValue());
|
||||
if (parseFieldMatcher.match(currentFieldName, TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR)) {
|
||||
otherOptions.put(TermsAggregationBuilder.SHOW_TERM_DOC_COUNT_ERROR, parser.booleanValue());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -158,7 +159,7 @@ public class TermsParser extends AbstractTermsParser {
|
|||
|
||||
@Override
|
||||
public TermsAggregator.BucketCountThresholds getDefaultBucketCountThresholds() {
|
||||
return new TermsAggregator.BucketCountThresholds(TermsAggregatorBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
return new TermsAggregator.BucketCountThresholds(TermsAggregationBuilder.DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
}
|
||||
|
||||
static Terms.Order resolveOrder(String key, boolean asc) {
|
||||
|
|
|
@ -43,6 +43,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Bytes.WithOrdinals;
|
||||
|
||||
|
@ -135,7 +136,8 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
}
|
||||
|
||||
public static abstract class OrdinalsFilter {
|
||||
public abstract LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) throws IOException;
|
||||
public abstract LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource)
|
||||
throws IOException;
|
||||
|
||||
}
|
||||
|
||||
|
@ -152,7 +154,8 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
*
|
||||
*/
|
||||
@Override
|
||||
public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource) throws IOException {
|
||||
public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, ValuesSource.Bytes.WithOrdinals valueSource)
|
||||
throws IOException {
|
||||
LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getValueCount());
|
||||
TermsEnum globalTermsEnum;
|
||||
Terms globalTerms = new DocValuesTerms(globalOrdinals);
|
||||
|
@ -179,7 +182,7 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
@Override
|
||||
public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals, WithOrdinals valueSource) throws IOException {
|
||||
LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getValueCount());
|
||||
if(includeValues!=null){
|
||||
if (includeValues != null) {
|
||||
for (BytesRef term : includeValues) {
|
||||
long ord = globalOrdinals.lookupTerm(term);
|
||||
if (ord >= 0) {
|
||||
|
@ -534,33 +537,46 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
return a;
|
||||
}
|
||||
|
||||
public StringFilter convertToStringFilter() {
|
||||
public StringFilter convertToStringFilter(DocValueFormat format) {
|
||||
if (isRegexBased()) {
|
||||
return new AutomatonBackedStringFilter(toAutomaton());
|
||||
}
|
||||
return new TermListBackedStringFilter(includeValues, excludeValues);
|
||||
return new TermListBackedStringFilter(parseForDocValues(includeValues, format), parseForDocValues(excludeValues, format));
|
||||
}
|
||||
|
||||
public OrdinalsFilter convertToOrdinalsFilter() {
|
||||
private static SortedSet<BytesRef> parseForDocValues(SortedSet<BytesRef> endUserFormattedValues, DocValueFormat format) {
|
||||
SortedSet<BytesRef> result = endUserFormattedValues;
|
||||
if (endUserFormattedValues != null) {
|
||||
if (format != DocValueFormat.RAW) {
|
||||
result = new TreeSet<>();
|
||||
for (BytesRef formattedVal : endUserFormattedValues) {
|
||||
result.add(format.parseBytesRef(formattedVal.utf8ToString()));
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public OrdinalsFilter convertToOrdinalsFilter(DocValueFormat format) {
|
||||
|
||||
if (isRegexBased()) {
|
||||
return new AutomatonBackedOrdinalsFilter(toAutomaton());
|
||||
}
|
||||
return new TermListBackedOrdinalsFilter(includeValues, excludeValues);
|
||||
return new TermListBackedOrdinalsFilter(parseForDocValues(includeValues, format), parseForDocValues(excludeValues, format));
|
||||
}
|
||||
|
||||
public LongFilter convertToLongFilter() {
|
||||
public LongFilter convertToLongFilter(DocValueFormat format) {
|
||||
int numValids = includeValues == null ? 0 : includeValues.size();
|
||||
int numInvalids = excludeValues == null ? 0 : excludeValues.size();
|
||||
LongFilter result = new LongFilter(numValids, numInvalids);
|
||||
if (includeValues != null) {
|
||||
for (BytesRef val : includeValues) {
|
||||
result.addAccept(Long.parseLong(val.utf8ToString()));
|
||||
result.addAccept(format.parseLong(val.utf8ToString(), false, null));
|
||||
}
|
||||
}
|
||||
if (excludeValues != null) {
|
||||
for (BytesRef val : excludeValues) {
|
||||
result.addReject(Long.parseLong(val.utf8ToString()));
|
||||
result.addReject(format.parseLong(val.utf8ToString(), false, null));
|
||||
}
|
||||
}
|
||||
return result;
|
||||
|
@ -572,13 +588,13 @@ public class IncludeExclude implements Writeable, ToXContent {
|
|||
LongFilter result = new LongFilter(numValids, numInvalids);
|
||||
if (includeValues != null) {
|
||||
for (BytesRef val : includeValues) {
|
||||
double dval=Double.parseDouble(val.utf8ToString());
|
||||
double dval = Double.parseDouble(val.utf8ToString());
|
||||
result.addAccept(NumericUtils.doubleToSortableLong(dval));
|
||||
}
|
||||
}
|
||||
if (excludeValues != null) {
|
||||
for (BytesRef val : excludeValues) {
|
||||
double dval=Double.parseDouble(val.utf8ToString());
|
||||
double dval = Double.parseDouble(val.utf8ToString());
|
||||
result.addReject(NumericUtils.doubleToSortableLong(dval));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class AvgAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource.Numeric, AvgAggregatorBuilder> {
|
||||
public class AvgAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, AvgAggregationBuilder> {
|
||||
public static final String NAME = InternalAvg.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public AvgAggregatorBuilder(String name) {
|
||||
public AvgAggregationBuilder(String name) {
|
||||
super(name, InternalAvg.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public AvgAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public AvgAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalAvg.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
|
@ -80,4 +80,4 @@ public class AvgAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -44,8 +44,8 @@ public class AvgParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected AvgAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new AvgAggregatorBuilder(aggregationName);
|
||||
protected AvgAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new AvgAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,14 +28,16 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource, CardinalityAggregatorBuilder> {
|
||||
public final class CardinalityAggregationBuilder
|
||||
extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource, CardinalityAggregationBuilder> {
|
||||
|
||||
public static final String NAME = InternalCardinality.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -43,14 +45,14 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu
|
|||
|
||||
private Long precisionThreshold = null;
|
||||
|
||||
public CardinalityAggregatorBuilder(String name, ValueType targetValueType) {
|
||||
public CardinalityAggregationBuilder(String name, ValueType targetValueType) {
|
||||
super(name, InternalCardinality.TYPE, ValuesSourceType.ANY, targetValueType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public CardinalityAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public CardinalityAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalCardinality.TYPE, ValuesSourceType.ANY);
|
||||
if (in.readBoolean()) {
|
||||
precisionThreshold = in.readLong();
|
||||
|
@ -75,7 +77,7 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu
|
|||
* Set a precision threshold. Higher values improve accuracy but also
|
||||
* increase memory usage.
|
||||
*/
|
||||
public CardinalityAggregatorBuilder precisionThreshold(long precisionThreshold) {
|
||||
public CardinalityAggregationBuilder precisionThreshold(long precisionThreshold) {
|
||||
if (precisionThreshold < 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[precisionThreshold] must be greater than or equal to 0. Found [" + precisionThreshold + "] in [" + name + "]");
|
||||
|
@ -122,7 +124,7 @@ public final class CardinalityAggregatorBuilder extends ValuesSourceAggregatorBu
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
CardinalityAggregatorBuilder other = (CardinalityAggregatorBuilder) obj;
|
||||
CardinalityAggregationBuilder other = (CardinalityAggregationBuilder) obj;
|
||||
return Objects.equals(precisionThreshold, other.precisionThreshold);
|
||||
}
|
||||
|
|
@ -40,10 +40,10 @@ public class CardinalityParser extends AnyValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected CardinalityAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
CardinalityAggregatorBuilder factory = new CardinalityAggregatorBuilder(aggregationName, targetValueType);
|
||||
Long precisionThreshold = (Long) otherOptions.get(CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD);
|
||||
protected CardinalityAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
CardinalityAggregationBuilder factory = new CardinalityAggregationBuilder(aggregationName, targetValueType);
|
||||
Long precisionThreshold = (Long) otherOptions.get(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD);
|
||||
if (precisionThreshold != null) {
|
||||
factory.precisionThreshold(precisionThreshold);
|
||||
}
|
||||
|
@ -54,8 +54,8 @@ public class CardinalityParser extends AnyValuesSourceParser {
|
|||
protected boolean token(String aggregationName, String currentFieldName, Token token, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, Map<ParseField, Object> otherOptions) throws IOException {
|
||||
if (token.isValue()) {
|
||||
if (parseFieldMatcher.match(currentFieldName, CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD)) {
|
||||
otherOptions.put(CardinalityAggregatorBuilder.PRECISION_THRESHOLD_FIELD, parser.longValue());
|
||||
if (parseFieldMatcher.match(currentFieldName, CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD)) {
|
||||
otherOptions.put(CardinalityAggregationBuilder.PRECISION_THRESHOLD_FIELD, parser.longValue());
|
||||
return true;
|
||||
} else if (parseFieldMatcher.match(currentFieldName, REHASH)) {
|
||||
// ignore
|
||||
|
|
|
@ -28,27 +28,27 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class GeoBoundsAggregatorBuilder extends ValuesSourceAggregatorBuilder<ValuesSource.GeoPoint, GeoBoundsAggregatorBuilder> {
|
||||
public class GeoBoundsAggregationBuilder extends ValuesSourceAggregationBuilder<ValuesSource.GeoPoint, GeoBoundsAggregationBuilder> {
|
||||
public static final String NAME = InternalGeoBounds.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIED = new ParseField(NAME);
|
||||
|
||||
private boolean wrapLongitude = true;
|
||||
|
||||
public GeoBoundsAggregatorBuilder(String name) {
|
||||
public GeoBoundsAggregationBuilder(String name) {
|
||||
super(name, InternalGeoBounds.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public GeoBoundsAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public GeoBoundsAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalGeoBounds.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
wrapLongitude = in.readBoolean();
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ public class GeoBoundsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Va
|
|||
/**
|
||||
* Set whether to wrap longitudes. Defaults to true.
|
||||
*/
|
||||
public GeoBoundsAggregatorBuilder wrapLongitude(boolean wrapLongitude) {
|
||||
public GeoBoundsAggregationBuilder wrapLongitude(boolean wrapLongitude) {
|
||||
this.wrapLongitude = wrapLongitude;
|
||||
return this;
|
||||
}
|
||||
|
@ -92,7 +92,7 @@ public class GeoBoundsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Va
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
GeoBoundsAggregatorBuilder other = (GeoBoundsAggregatorBuilder) obj;
|
||||
GeoBoundsAggregationBuilder other = (GeoBoundsAggregationBuilder) obj;
|
||||
return Objects.equals(wrapLongitude, other.wrapLongitude);
|
||||
}
|
||||
|
||||
|
@ -100,4 +100,4 @@ public class GeoBoundsAggregatorBuilder extends ValuesSourceAggregatorBuilder<Va
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -37,9 +37,9 @@ public class GeoBoundsParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected GeoBoundsAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
GeoBoundsAggregatorBuilder factory = new GeoBoundsAggregatorBuilder(aggregationName);
|
||||
protected GeoBoundsAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
GeoBoundsAggregationBuilder factory = new GeoBoundsAggregationBuilder(aggregationName);
|
||||
Boolean wrapLongitude = (Boolean) otherOptions.get(GeoBoundsAggregator.WRAP_LONGITUDE_FIELD);
|
||||
if (wrapLongitude != null) {
|
||||
factory.wrapLongitude(wrapLongitude);
|
||||
|
|
|
@ -28,25 +28,25 @@ import org.elasticsearch.search.aggregations.AggregatorFactory;
|
|||
import org.elasticsearch.search.aggregations.support.AggregationContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class GeoCentroidAggregatorBuilder
|
||||
extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource.GeoPoint, GeoCentroidAggregatorBuilder> {
|
||||
public class GeoCentroidAggregationBuilder
|
||||
extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.GeoPoint, GeoCentroidAggregationBuilder> {
|
||||
public static final String NAME = InternalGeoCentroid.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public GeoCentroidAggregatorBuilder(String name) {
|
||||
public GeoCentroidAggregationBuilder(String name) {
|
||||
super(name, InternalGeoCentroid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public GeoCentroidAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public GeoCentroidAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalGeoCentroid.TYPE, ValuesSourceType.GEOPOINT, ValueType.GEOPOINT);
|
||||
}
|
||||
|
||||
|
@ -80,4 +80,4 @@ public class GeoCentroidAggregatorBuilder
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -46,8 +46,8 @@ public class GeoCentroidParser extends GeoPointValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected GeoCentroidAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new GeoCentroidAggregatorBuilder(aggregationName);
|
||||
protected GeoCentroidAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new GeoCentroidAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class MaxAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource.Numeric, MaxAggregatorBuilder> {
|
||||
public class MaxAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MaxAggregationBuilder> {
|
||||
public static final String NAME = InternalMax.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public MaxAggregatorBuilder(String name) {
|
||||
public MaxAggregationBuilder(String name) {
|
||||
super(name, InternalMax.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public MaxAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public MaxAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalMax.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
|
@ -80,4 +80,4 @@ public class MaxAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -44,8 +44,8 @@ public class MaxParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected MaxAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MaxAggregatorBuilder(aggregationName);
|
||||
protected MaxAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MaxAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,24 +29,24 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class MinAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly<ValuesSource.Numeric, MinAggregatorBuilder> {
|
||||
public class MinAggregationBuilder extends ValuesSourceAggregationBuilder.LeafOnly<ValuesSource.Numeric, MinAggregationBuilder> {
|
||||
public static final String NAME = InternalMin.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public MinAggregatorBuilder(String name) {
|
||||
public MinAggregationBuilder(String name) {
|
||||
super(name, InternalMin.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public MinAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public MinAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalMin.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
|
@ -80,4 +80,4 @@ public class MinAggregatorBuilder extends ValuesSourceAggregatorBuilder.LeafOnly
|
|||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -45,8 +45,8 @@ public class MinParser extends NumericValuesSourceParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected MinAggregatorBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MinAggregatorBuilder(aggregationName);
|
||||
protected MinAggregationBuilder createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
return new MinAggregationBuilder(aggregationName);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
|
|||
import org.elasticsearch.search.aggregations.support.AbstractValuesSourceParser.NumericValuesSourceParser;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -115,8 +115,8 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregatorBuilder<Numeric, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
protected ValuesSourceAggregationBuilder<Numeric, ?> createFactory(String aggregationName, ValuesSourceType valuesSourceType,
|
||||
ValueType targetValueType, Map<ParseField, Object> otherOptions) {
|
||||
PercentilesMethod method = (PercentilesMethod) otherOptions.getOrDefault(METHOD_FIELD, PercentilesMethod.TDIGEST);
|
||||
|
||||
double[] cdfValues = (double[]) otherOptions.get(keysField());
|
||||
|
@ -126,10 +126,10 @@ public abstract class AbstractPercentilesParser extends NumericValuesSourceParse
|
|||
return buildFactory(aggregationName, cdfValues, method, compression, numberOfSignificantValueDigits, keyed);
|
||||
}
|
||||
|
||||
protected abstract ValuesSourceAggregatorBuilder<Numeric, ?> buildFactory(String aggregationName, double[] cdfValues,
|
||||
PercentilesMethod method,
|
||||
Double compression,
|
||||
Integer numberOfSignificantValueDigits, Boolean keyed);
|
||||
protected abstract ValuesSourceAggregationBuilder<Numeric, ?> buildFactory(String aggregationName, double[] cdfValues,
|
||||
PercentilesMethod method,
|
||||
Double compression,
|
||||
Integer numberOfSignificantValueDigits, Boolean keyed);
|
||||
|
||||
protected abstract ParseField keysField();
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder.LeafOnly;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder.LeafOnly;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -41,7 +41,7 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Numeric, PercentileRanksAggregatorBuilder> {
|
||||
public class PercentileRanksAggregationBuilder extends LeafOnly<ValuesSource.Numeric, PercentileRanksAggregationBuilder> {
|
||||
public static final String NAME = InternalTDigestPercentileRanks.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -51,14 +51,14 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
private double compression = 100.0;
|
||||
private boolean keyed = true;
|
||||
|
||||
public PercentileRanksAggregatorBuilder(String name) {
|
||||
public PercentileRanksAggregationBuilder(String name) {
|
||||
super(name, InternalTDigestPercentileRanks.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public PercentileRanksAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public PercentileRanksAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalTDigestPercentileRanks.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
values = in.readDoubleArray();
|
||||
keyed = in.readBoolean();
|
||||
|
@ -79,7 +79,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
/**
|
||||
* Set the values to compute percentiles from.
|
||||
*/
|
||||
public PercentileRanksAggregatorBuilder values(double... values) {
|
||||
public PercentileRanksAggregationBuilder values(double... values) {
|
||||
if (values == null) {
|
||||
throw new IllegalArgumentException("[values] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
/**
|
||||
* Set whether the XContent response should be keyed
|
||||
*/
|
||||
public PercentileRanksAggregatorBuilder keyed(boolean keyed) {
|
||||
public PercentileRanksAggregationBuilder keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
* Expert: set the number of significant digits in the values. Only relevant
|
||||
* when using {@link PercentilesMethod#HDR}.
|
||||
*/
|
||||
public PercentileRanksAggregatorBuilder numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
public PercentileRanksAggregationBuilder numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
if (numberOfSignificantValueDigits < 0 || numberOfSignificantValueDigits > 5) {
|
||||
throw new IllegalArgumentException("[numberOfSignificantValueDigits] must be between 0 and 5: [" + name + "]");
|
||||
}
|
||||
|
@ -135,7 +135,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
* Expert: set the compression. Higher values improve accuracy but also
|
||||
* memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}.
|
||||
*/
|
||||
public PercentileRanksAggregatorBuilder compression(double compression) {
|
||||
public PercentileRanksAggregationBuilder compression(double compression) {
|
||||
if (compression < 0.0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[compression] must be greater than or equal to 0. Found [" + compression + "] in [" + name + "]");
|
||||
|
@ -152,7 +152,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
return compression;
|
||||
}
|
||||
|
||||
public PercentileRanksAggregatorBuilder method(PercentilesMethod method) {
|
||||
public PercentileRanksAggregationBuilder method(PercentilesMethod method) {
|
||||
if (method == null) {
|
||||
throw new IllegalArgumentException("[method] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -195,7 +195,7 @@ public class PercentileRanksAggregatorBuilder extends LeafOnly<ValuesSource.Nume
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
PercentileRanksAggregatorBuilder other = (PercentileRanksAggregatorBuilder) obj;
|
||||
PercentileRanksAggregationBuilder other = (PercentileRanksAggregationBuilder) obj;
|
||||
if (!Objects.equals(method, other.method)) {
|
||||
return false;
|
||||
}
|
|
@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.metrics.percentiles;
|
|||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -39,9 +39,10 @@ public class PercentileRanksParser extends AbstractPercentilesParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregatorBuilder<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) {
|
||||
PercentileRanksAggregatorBuilder factory = new PercentileRanksAggregatorBuilder(aggregationName);
|
||||
protected ValuesSourceAggregationBuilder<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits,
|
||||
Boolean keyed) {
|
||||
PercentileRanksAggregationBuilder factory = new PercentileRanksAggregationBuilder(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.values(keys);
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext;
|
|||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder.LeafOnly;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder.LeafOnly;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
@ -41,7 +41,7 @@ import java.io.IOException;
|
|||
import java.util.Arrays;
|
||||
import java.util.Objects;
|
||||
|
||||
public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric, PercentilesAggregatorBuilder> {
|
||||
public class PercentilesAggregationBuilder extends LeafOnly<ValuesSource.Numeric, PercentilesAggregationBuilder> {
|
||||
public static final String NAME = InternalTDigestPercentiles.TYPE.name();
|
||||
public static final ParseField AGGREGATION_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
|
@ -51,14 +51,14 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
private double compression = 100.0;
|
||||
private boolean keyed = true;
|
||||
|
||||
public PercentilesAggregatorBuilder(String name) {
|
||||
public PercentilesAggregationBuilder(String name) {
|
||||
super(name, InternalTDigestPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read from a stream.
|
||||
*/
|
||||
public PercentilesAggregatorBuilder(StreamInput in) throws IOException {
|
||||
public PercentilesAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, InternalTDigestPercentiles.TYPE, ValuesSourceType.NUMERIC, ValueType.NUMERIC);
|
||||
percents = in.readDoubleArray();
|
||||
keyed = in.readBoolean();
|
||||
|
@ -79,7 +79,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
/**
|
||||
* Set the values to compute percentiles from.
|
||||
*/
|
||||
public PercentilesAggregatorBuilder percentiles(double... percents) {
|
||||
public PercentilesAggregationBuilder percentiles(double... percents) {
|
||||
if (percents == null) {
|
||||
throw new IllegalArgumentException("[percents] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
/**
|
||||
* Set whether the XContent response should be keyed
|
||||
*/
|
||||
public PercentilesAggregatorBuilder keyed(boolean keyed) {
|
||||
public PercentilesAggregationBuilder keyed(boolean keyed) {
|
||||
this.keyed = keyed;
|
||||
return this;
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
* Expert: set the number of significant digits in the values. Only relevant
|
||||
* when using {@link PercentilesMethod#HDR}.
|
||||
*/
|
||||
public PercentilesAggregatorBuilder numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
public PercentilesAggregationBuilder numberOfSignificantValueDigits(int numberOfSignificantValueDigits) {
|
||||
if (numberOfSignificantValueDigits < 0 || numberOfSignificantValueDigits > 5) {
|
||||
throw new IllegalArgumentException("[numberOfSignificantValueDigits] must be between 0 and 5: [" + name + "]");
|
||||
}
|
||||
|
@ -135,7 +135,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
* Expert: set the compression. Higher values improve accuracy but also
|
||||
* memory usage. Only relevant when using {@link PercentilesMethod#TDIGEST}.
|
||||
*/
|
||||
public PercentilesAggregatorBuilder compression(double compression) {
|
||||
public PercentilesAggregationBuilder compression(double compression) {
|
||||
if (compression < 0.0) {
|
||||
throw new IllegalArgumentException(
|
||||
"[compression] must be greater than or equal to 0. Found [" + compression + "] in [" + name + "]");
|
||||
|
@ -152,7 +152,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
return compression;
|
||||
}
|
||||
|
||||
public PercentilesAggregatorBuilder method(PercentilesMethod method) {
|
||||
public PercentilesAggregationBuilder method(PercentilesMethod method) {
|
||||
if (method == null) {
|
||||
throw new IllegalArgumentException("[method] must not be null: [" + name + "]");
|
||||
}
|
||||
|
@ -195,7 +195,7 @@ public class PercentilesAggregatorBuilder extends LeafOnly<ValuesSource.Numeric,
|
|||
|
||||
@Override
|
||||
protected boolean innerEquals(Object obj) {
|
||||
PercentilesAggregatorBuilder other = (PercentilesAggregatorBuilder) obj;
|
||||
PercentilesAggregationBuilder other = (PercentilesAggregationBuilder) obj;
|
||||
if (!Objects.equals(method, other.method)) {
|
||||
return false;
|
||||
}
|
|
@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.metrics.percentiles;
|
|||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorBuilder;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -41,9 +41,10 @@ public class PercentilesParser extends AbstractPercentilesParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected ValuesSourceAggregatorBuilder<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits, Boolean keyed) {
|
||||
PercentilesAggregatorBuilder factory = new PercentilesAggregatorBuilder(aggregationName);
|
||||
protected ValuesSourceAggregationBuilder<Numeric, ?> buildFactory(String aggregationName, double[] keys, PercentilesMethod method,
|
||||
Double compression, Integer numberOfSignificantValueDigits,
|
||||
Boolean keyed) {
|
||||
PercentilesAggregationBuilder factory = new PercentilesAggregationBuilder(aggregationName);
|
||||
if (keys != null) {
|
||||
factory.percentiles(keys);
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue