mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-25 09:28:27 +00:00
Merge branch 'master' into plugin_name_api
This commit is contained in:
commit
8196cf01e3
@ -753,7 +753,6 @@
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]AbstractClientHeadersTestCase.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterHealthIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterInfoServiceIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterModuleTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateDiffIT.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]ClusterStateTests.java" checks="LineLength" />
|
||||
<suppress files="core[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]cluster[/\\]DiskUsageTests.java" checks="LineLength" />
|
||||
|
@ -38,9 +38,9 @@ import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.tasks.PersistedTaskInfo;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskId;
|
||||
import org.elasticsearch.tasks.PersistedTaskInfo;
|
||||
import org.elasticsearch.tasks.TaskPersistenceService;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.BaseTransportResponseHandler;
|
||||
@ -51,7 +51,6 @@ import org.elasticsearch.transport.TransportService;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction.waitForCompletionTimeout;
|
||||
import static org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction.waitForTaskCompletion;
|
||||
|
||||
/**
|
||||
* Action to get a single task. If the task isn't running then it'll try to request the status from request index.
|
||||
@ -148,7 +147,7 @@ public class TransportGetTaskAction extends HandledTransportAction<GetTaskReques
|
||||
threadPool.generic().execute(new AbstractRunnable() {
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
waitForTaskCompletion(taskManager, runningTask, waitForCompletionTimeout(request.getTimeout()));
|
||||
taskManager.waitForTaskCompletion(runningTask, waitForCompletionTimeout(request.getTimeout()));
|
||||
// TODO look up the task's result from the .tasks index now that it is done
|
||||
listener.onResponse(
|
||||
new GetTaskResponse(new PersistedTaskInfo(runningTask.taskInfo(clusterService.localNode(), true))));
|
||||
|
@ -19,8 +19,6 @@
|
||||
|
||||
package org.elasticsearch.action.admin.cluster.node.tasks.list;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.action.FailedNodeException;
|
||||
import org.elasticsearch.action.TaskOperationFailure;
|
||||
import org.elasticsearch.action.support.ActionFilters;
|
||||
@ -34,7 +32,6 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.tasks.TaskInfo;
|
||||
import org.elasticsearch.tasks.TaskManager;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
@ -42,26 +39,12 @@ import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class TransportListTasksAction extends TransportTasksAction<Task, ListTasksRequest, ListTasksResponse, TaskInfo> {
|
||||
public static void waitForTaskCompletion(TaskManager taskManager, Task task, long untilInNanos) {
|
||||
while (System.nanoTime() - untilInNanos < 0) {
|
||||
if (taskManager.getTask(task.getId()) == null) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
Thread.sleep(WAIT_FOR_COMPLETION_POLL.millis());
|
||||
} catch (InterruptedException e) {
|
||||
throw new ElasticsearchException("Interrupted waiting for completion of [{}]", e, task);
|
||||
}
|
||||
}
|
||||
throw new ElasticsearchTimeoutException("Timed out waiting for completion of [{}]", task);
|
||||
}
|
||||
public static long waitForCompletionTimeout(TimeValue timeout) {
|
||||
if (timeout == null) {
|
||||
timeout = DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT;
|
||||
@ -69,7 +52,6 @@ public class TransportListTasksAction extends TransportTasksAction<Task, ListTas
|
||||
return System.nanoTime() + timeout.nanos();
|
||||
}
|
||||
|
||||
private static final TimeValue WAIT_FOR_COMPLETION_POLL = timeValueMillis(100);
|
||||
private static final TimeValue DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT = timeValueSeconds(30);
|
||||
|
||||
@Inject
|
||||
@ -105,7 +87,7 @@ public class TransportListTasksAction extends TransportTasksAction<Task, ListTas
|
||||
// for itself or one of its child tasks
|
||||
return;
|
||||
}
|
||||
waitForTaskCompletion(taskManager, task, timeoutNanos);
|
||||
taskManager.waitForTaskCompletion(task, timeoutNanos);
|
||||
});
|
||||
}
|
||||
super.processTasks(request, operation);
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.action.search;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
@ -28,6 +29,7 @@ import org.elasticsearch.common.io.stream.Streamable;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
@ -155,8 +157,10 @@ public class MultiSearchResponse extends ActionResponse implements Iterable<Mult
|
||||
builder.startObject();
|
||||
if (item.isFailure()) {
|
||||
ElasticsearchException.renderThrowable(builder, params, item.getFailure());
|
||||
builder.field(Fields.STATUS, ExceptionsHelper.status(item.getFailure()).getStatus());
|
||||
} else {
|
||||
item.getResponse().toXContent(builder, params);
|
||||
builder.field(Fields.STATUS, item.getResponse().status().getStatus());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
@ -166,6 +170,7 @@ public class MultiSearchResponse extends ActionResponse implements Iterable<Mult
|
||||
|
||||
static final class Fields {
|
||||
static final String RESPONSES = "responses";
|
||||
static final String STATUS = "status";
|
||||
static final String ERROR = "error";
|
||||
static final String ROOT_CAUSE = "root_cause";
|
||||
}
|
||||
|
@ -37,19 +37,21 @@ import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.network.NetworkService;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.monitor.MonitorService;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.PluginsModule;
|
||||
import org.elasticsearch.plugins.PluginsService;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.threadpool.ExecutorBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.transport.netty.NettyTransport;
|
||||
|
||||
@ -134,10 +136,9 @@ public class TransportClient extends AbstractClient {
|
||||
modules.add(pluginModule);
|
||||
}
|
||||
modules.add(new PluginsModule(pluginsService));
|
||||
modules.add(new SettingsModule(settings));
|
||||
modules.add(new NetworkModule(networkService, settings, true, namedWriteableRegistry));
|
||||
modules.add(new ClusterNameModule(settings));
|
||||
modules.add(new ThreadPoolModule(threadPool));
|
||||
modules.add(b -> b.bind(ThreadPool.class).toInstance(threadPool));
|
||||
modules.add(new SearchModule(settings, namedWriteableRegistry) {
|
||||
@Override
|
||||
protected void configure() {
|
||||
@ -145,9 +146,20 @@ public class TransportClient extends AbstractClient {
|
||||
}
|
||||
});
|
||||
modules.add(new ActionModule(false, true));
|
||||
modules.add(new CircuitBreakerModule(settings));
|
||||
|
||||
pluginsService.processModules(modules);
|
||||
final List<Setting<?>> additionalSettings = new ArrayList<>();
|
||||
final List<String> additionalSettingsFilter = new ArrayList<>();
|
||||
additionalSettings.addAll(pluginsService.getPluginSettings());
|
||||
additionalSettingsFilter.addAll(pluginsService.getPluginSettingsFilter());
|
||||
for (final ExecutorBuilder<?> builder : threadPool.builders()) {
|
||||
additionalSettings.addAll(builder.getRegisteredSettings());
|
||||
}
|
||||
SettingsModule settingsModule = new SettingsModule(settings, additionalSettings, additionalSettingsFilter);
|
||||
CircuitBreakerService circuitBreakerService = Node.createCircuitBreakerService(settingsModule.getSettings(),
|
||||
settingsModule.getClusterSettings());
|
||||
modules.add(settingsModule);
|
||||
modules.add((b -> b.bind(CircuitBreakerService.class).toInstance(circuitBreakerService)));
|
||||
|
||||
Injector injector = modules.createInjector();
|
||||
final TransportService transportService = injector.getInstance(TransportService.class);
|
||||
|
@ -420,6 +420,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
||||
ResourceWatcherService.RELOAD_INTERVAL_MEDIUM,
|
||||
ResourceWatcherService.RELOAD_INTERVAL_LOW,
|
||||
SearchModule.INDICES_MAX_CLAUSE_COUNT_SETTING,
|
||||
ThreadPool.ESTIMATED_TIME_INTERVAL_SETTING
|
||||
ThreadPool.ESTIMATED_TIME_INTERVAL_SETTING,
|
||||
Node.BREAKER_TYPE_KEY
|
||||
)));
|
||||
}
|
||||
|
@ -81,6 +81,11 @@ public class Setting<T> extends ToXContentToBytes {
|
||||
*/
|
||||
Filtered,
|
||||
|
||||
/**
|
||||
* iff this setting is shared with more than one module ie. can be defined multiple times.
|
||||
*/
|
||||
Shared,
|
||||
|
||||
/**
|
||||
* iff this setting can be dynamically updateable
|
||||
*/
|
||||
@ -247,6 +252,13 @@ public class Setting<T> extends ToXContentToBytes {
|
||||
return properties.contains(Property.Deprecated);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if this setting is shared with more than one other module or plugin, otherwise <code>false</code>
|
||||
*/
|
||||
public boolean isShared() {
|
||||
return properties.contains(Property.Shared);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> iff this setting is a group setting. Group settings represent a set of settings rather than a single value.
|
||||
* The key, see {@link #getKey()}, in contrast to non-group settings is a prefix like <tt>cluster.store.</tt> that matches all settings
|
||||
|
@ -19,7 +19,8 @@
|
||||
|
||||
package org.elasticsearch.common.settings;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Binder;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
@ -28,9 +29,11 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.tribe.TribeService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Predicate;
|
||||
@ -40,7 +43,7 @@ import java.util.stream.IntStream;
|
||||
/**
|
||||
* A module that binds the provided settings to the {@link Settings} interface.
|
||||
*/
|
||||
public class SettingsModule extends AbstractModule {
|
||||
public class SettingsModule implements Module {
|
||||
|
||||
private final Settings settings;
|
||||
private final Set<String> settingsFilterPattern = new HashSet<>();
|
||||
@ -49,8 +52,14 @@ public class SettingsModule extends AbstractModule {
|
||||
private static final Predicate<String> TRIBE_CLIENT_NODE_SETTINGS_PREDICATE = (s) -> s.startsWith("tribe.")
|
||||
&& TribeService.TRIBE_SETTING_KEYS.contains(s) == false;
|
||||
private final ESLogger logger;
|
||||
private final IndexScopedSettings indexScopedSettings;
|
||||
private final ClusterSettings clusterSettings;
|
||||
|
||||
public SettingsModule(Settings settings) {
|
||||
public SettingsModule(Settings settings, Setting<?>... additionalSettings) {
|
||||
this(settings, Arrays.asList(additionalSettings), Collections.emptyList());
|
||||
}
|
||||
|
||||
public SettingsModule(Settings settings, List<Setting<?>> additionalSettings, List<String> settingsFilter) {
|
||||
logger = Loggers.getLogger(getClass(), settings);
|
||||
this.settings = settings;
|
||||
for (Setting<?> setting : ClusterSettings.BUILT_IN_CLUSTER_SETTINGS) {
|
||||
@ -59,12 +68,16 @@ public class SettingsModule extends AbstractModule {
|
||||
for (Setting<?> setting : IndexScopedSettings.BUILT_IN_INDEX_SETTINGS) {
|
||||
registerSetting(setting);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
final IndexScopedSettings indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values()));
|
||||
final ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values()));
|
||||
for (Setting<?> setting : additionalSettings) {
|
||||
registerSetting(setting);
|
||||
}
|
||||
|
||||
for (String filter : settingsFilter) {
|
||||
registerSettingsFilter(filter);
|
||||
}
|
||||
this.indexScopedSettings = new IndexScopedSettings(settings, new HashSet<>(this.indexSettings.values()));
|
||||
this.clusterSettings = new ClusterSettings(settings, new HashSet<>(this.nodeSettings.values()));
|
||||
Settings indexSettings = settings.filter((s) -> (s.startsWith("index.") &&
|
||||
// special case - we want to get Did you mean indices.query.bool.max_clause_count
|
||||
// which means we need to by-pass this check for this setting
|
||||
@ -87,7 +100,7 @@ public class SettingsModule extends AbstractModule {
|
||||
"In order to upgrade all indices the settings must be updated via the /${index}/_settings API. " +
|
||||
"Unless all settings are dynamic all indices must be closed in order to apply the upgrade" +
|
||||
"Indices created in the future should use index templates to set default values."
|
||||
).split(" ")) {
|
||||
).split(" ")) {
|
||||
if (count + word.length() > 85) {
|
||||
builder.append(System.lineSeparator());
|
||||
count = 0;
|
||||
@ -124,19 +137,23 @@ public class SettingsModule extends AbstractModule {
|
||||
final Predicate<String> acceptOnlyClusterSettings = TRIBE_CLIENT_NODE_SETTINGS_PREDICATE.negate();
|
||||
clusterSettings.validate(settings.filter(acceptOnlyClusterSettings));
|
||||
validateTribeSettings(settings, clusterSettings);
|
||||
bind(Settings.class).toInstance(settings);
|
||||
bind(SettingsFilter.class).toInstance(new SettingsFilter(settings, settingsFilterPattern));
|
||||
}
|
||||
|
||||
bind(ClusterSettings.class).toInstance(clusterSettings);
|
||||
bind(IndexScopedSettings.class).toInstance(indexScopedSettings);
|
||||
@Override
|
||||
public void configure(Binder binder) {
|
||||
binder.bind(Settings.class).toInstance(settings);
|
||||
binder.bind(SettingsFilter.class).toInstance(new SettingsFilter(settings, settingsFilterPattern));
|
||||
binder.bind(ClusterSettings.class).toInstance(clusterSettings);
|
||||
binder.bind(IndexScopedSettings.class).toInstance(indexScopedSettings);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Registers a new setting. This method should be used by plugins in order to expose any custom settings the plugin defines.
|
||||
* Unless a setting is registered the setting is unusable. If a setting is never the less specified the node will reject
|
||||
* the setting during startup.
|
||||
*/
|
||||
public void registerSetting(Setting<?> setting) {
|
||||
private void registerSetting(Setting<?> setting) {
|
||||
if (setting.isFiltered()) {
|
||||
if (settingsFilterPattern.contains(setting.getKey()) == false) {
|
||||
registerSettingsFilter(setting.getKey());
|
||||
@ -144,13 +161,15 @@ public class SettingsModule extends AbstractModule {
|
||||
}
|
||||
if (setting.hasNodeScope() || setting.hasIndexScope()) {
|
||||
if (setting.hasNodeScope()) {
|
||||
if (nodeSettings.containsKey(setting.getKey())) {
|
||||
Setting<?> existingSetting = nodeSettings.get(setting.getKey());
|
||||
if (existingSetting != null && (setting.isShared() == false || existingSetting.isShared() == false)) {
|
||||
throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice");
|
||||
}
|
||||
nodeSettings.put(setting.getKey(), setting);
|
||||
}
|
||||
if (setting.hasIndexScope()) {
|
||||
if (indexSettings.containsKey(setting.getKey())) {
|
||||
Setting<?> existingSetting = indexSettings.get(setting.getKey());
|
||||
if (existingSetting != null && (setting.isShared() == false || existingSetting.isShared() == false)) {
|
||||
throw new IllegalArgumentException("Cannot register setting [" + setting.getKey() + "] twice");
|
||||
}
|
||||
indexSettings.put(setting.getKey(), setting);
|
||||
@ -164,7 +183,7 @@ public class SettingsModule extends AbstractModule {
|
||||
* Registers a settings filter pattern that allows to filter out certain settings that for instance contain sensitive information
|
||||
* or if a setting is for internal purposes only. The given pattern must either be a valid settings key or a simple regexp pattern.
|
||||
*/
|
||||
public void registerSettingsFilter(String filter) {
|
||||
private void registerSettingsFilter(String filter) {
|
||||
if (SettingsFilter.isValidPattern(filter) == false) {
|
||||
throw new IllegalArgumentException("filter [" + filter +"] is invalid must be either a key or a regex pattern");
|
||||
}
|
||||
@ -174,19 +193,6 @@ public class SettingsModule extends AbstractModule {
|
||||
settingsFilterPattern.add(filter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a setting has already been registered
|
||||
*/
|
||||
public boolean exists(Setting<?> setting) {
|
||||
if (setting.hasNodeScope()) {
|
||||
return nodeSettings.containsKey(setting.getKey());
|
||||
}
|
||||
if (setting.hasIndexScope()) {
|
||||
return indexSettings.containsKey(setting.getKey());
|
||||
}
|
||||
throw new IllegalArgumentException("setting scope is unknown. This should never happen!");
|
||||
}
|
||||
|
||||
private void validateTribeSettings(Settings settings, ClusterSettings clusterSettings) {
|
||||
Map<String, Settings> groups = settings.filter(TRIBE_CLIENT_NODE_SETTINGS_PREDICATE).getGroups("tribe.", true);
|
||||
for (Map.Entry<String, Settings> tribeSettings : groups.entrySet()) {
|
||||
@ -200,4 +206,16 @@ public class SettingsModule extends AbstractModule {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Settings getSettings() {
|
||||
return settings;
|
||||
}
|
||||
|
||||
public IndexScopedSettings getIndexScopedSettings() {
|
||||
return indexScopedSettings;
|
||||
}
|
||||
|
||||
public ClusterSettings getClusterSettings() {
|
||||
return clusterSettings;
|
||||
}
|
||||
}
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.env;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -27,13 +28,16 @@ import org.elasticsearch.common.inject.AbstractModule;
|
||||
public class EnvironmentModule extends AbstractModule {
|
||||
|
||||
private final Environment environment;
|
||||
private final ThreadPool threadPool;
|
||||
|
||||
public EnvironmentModule(Environment environment) {
|
||||
public EnvironmentModule(Environment environment, ThreadPool threadPool) {
|
||||
this.threadPool = threadPool;
|
||||
this.environment = environment;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(ThreadPool.class).toInstance(threadPool);
|
||||
bind(Environment.class).toInstance(environment);
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,7 @@ public interface IndexNumericFieldData extends IndexFieldData<AtomicNumericField
|
||||
SHORT(false),
|
||||
INT(false),
|
||||
LONG(false),
|
||||
HALF_FLOAT(true),
|
||||
FLOAT(true),
|
||||
DOUBLE(true);
|
||||
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.document.HalfFloatPoint;
|
||||
import org.apache.lucene.index.DocValues;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
@ -61,6 +62,7 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple
|
||||
@Override
|
||||
public org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource comparatorSource(Object missingValue, MultiValueMode sortMode, Nested nested) {
|
||||
switch (numericType) {
|
||||
case HALF_FLOAT:
|
||||
case FLOAT:
|
||||
return new FloatValuesComparatorSource(this, missingValue, sortMode, nested);
|
||||
case DOUBLE:
|
||||
@ -87,6 +89,8 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple
|
||||
final String field = fieldName;
|
||||
|
||||
switch (numericType) {
|
||||
case HALF_FLOAT:
|
||||
return new SortedNumericHalfFloatFieldData(reader, field);
|
||||
case FLOAT:
|
||||
return new SortedNumericFloatFieldData(reader, field);
|
||||
case DOUBLE:
|
||||
@ -134,6 +138,95 @@ public class SortedNumericDVIndexFieldData extends DocValuesIndexFieldData imple
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* FieldData implementation for 16-bit float values.
|
||||
* <p>
|
||||
* Order of values within a document is consistent with
|
||||
* {@link Float#compareTo(Float)}, hence the following reversible
|
||||
* transformation is applied at both index and search:
|
||||
* {@code bits ^ (bits >> 15) & 0x7fff}
|
||||
* <p>
|
||||
* Although the API is multi-valued, most codecs in Lucene specialize
|
||||
* for the case where documents have at most one value. In this case
|
||||
* {@link FieldData#unwrapSingleton(SortedNumericDoubleValues)} will return
|
||||
* the underlying single-valued NumericDoubleValues representation, and
|
||||
* {@link FieldData#unwrapSingletonBits(SortedNumericDoubleValues)} will return
|
||||
* a Bits matching documents that have a real value (as opposed to missing).
|
||||
*/
|
||||
static final class SortedNumericHalfFloatFieldData extends AtomicDoubleFieldData {
|
||||
final LeafReader reader;
|
||||
final String field;
|
||||
|
||||
SortedNumericHalfFloatFieldData(LeafReader reader, String field) {
|
||||
super(0L);
|
||||
this.reader = reader;
|
||||
this.field = field;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SortedNumericDoubleValues getDoubleValues() {
|
||||
try {
|
||||
SortedNumericDocValues raw = DocValues.getSortedNumeric(reader, field);
|
||||
|
||||
NumericDocValues single = DocValues.unwrapSingleton(raw);
|
||||
if (single != null) {
|
||||
return FieldData.singleton(new SingleHalfFloatValues(single), DocValues.unwrapSingletonBits(raw));
|
||||
} else {
|
||||
return new MultiHalfFloatValues(raw);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("Cannot load doc values", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Collection<Accountable> getChildResources() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wraps a NumericDocValues and exposes a single 16-bit float per document.
|
||||
*/
|
||||
static final class SingleHalfFloatValues extends NumericDoubleValues {
|
||||
final NumericDocValues in;
|
||||
|
||||
SingleHalfFloatValues(NumericDocValues in) {
|
||||
this.in = in;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double get(int docID) {
|
||||
return HalfFloatPoint.sortableShortToHalfFloat((short) in.get(docID));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Wraps a SortedNumericDocValues and exposes multiple 16-bit floats per document.
|
||||
*/
|
||||
static final class MultiHalfFloatValues extends SortedNumericDoubleValues {
|
||||
final SortedNumericDocValues in;
|
||||
|
||||
MultiHalfFloatValues(SortedNumericDocValues in) {
|
||||
this.in = in;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setDocument(int doc) {
|
||||
in.setDocument(doc);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double valueAt(int index) {
|
||||
return HalfFloatPoint.sortableShortToHalfFloat((short) in.valueAt(index));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int count() {
|
||||
return in.count();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* FieldData implementation for 32-bit float values.
|
||||
* <p>
|
||||
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper.core;
|
||||
import org.apache.lucene.document.DoublePoint;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FloatPoint;
|
||||
import org.apache.lucene.document.HalfFloatPoint;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
@ -180,6 +181,86 @@ public class NumberFieldMapper extends FieldMapper implements AllFieldMapper.Inc
|
||||
}
|
||||
|
||||
public enum NumberType {
|
||||
HALF_FLOAT("half_float", NumericType.HALF_FLOAT) {
|
||||
@Override
|
||||
Float parse(Object value) {
|
||||
return (Float) FLOAT.parse(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
Float parse(XContentParser parser, boolean coerce) throws IOException {
|
||||
return parser.floatValue(coerce);
|
||||
}
|
||||
|
||||
@Override
|
||||
Query termQuery(String field, Object value) {
|
||||
float v = parse(value);
|
||||
return HalfFloatPoint.newExactQuery(field, v);
|
||||
}
|
||||
|
||||
@Override
|
||||
Query termsQuery(String field, List<Object> values) {
|
||||
float[] v = new float[values.size()];
|
||||
for (int i = 0; i < values.size(); ++i) {
|
||||
v[i] = parse(values.get(i));
|
||||
}
|
||||
return HalfFloatPoint.newSetQuery(field, v);
|
||||
}
|
||||
|
||||
@Override
|
||||
Query rangeQuery(String field, Object lowerTerm, Object upperTerm,
|
||||
boolean includeLower, boolean includeUpper) {
|
||||
float l = Float.NEGATIVE_INFINITY;
|
||||
float u = Float.POSITIVE_INFINITY;
|
||||
if (lowerTerm != null) {
|
||||
l = parse(lowerTerm);
|
||||
if (includeLower) {
|
||||
l = Math.nextDown(l);
|
||||
}
|
||||
l = HalfFloatPoint.nextUp(l);
|
||||
}
|
||||
if (upperTerm != null) {
|
||||
u = parse(upperTerm);
|
||||
if (includeUpper) {
|
||||
u = Math.nextUp(u);
|
||||
}
|
||||
u = HalfFloatPoint.nextDown(u);
|
||||
}
|
||||
return HalfFloatPoint.newRangeQuery(field, l, u);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Field> createFields(String name, Number value,
|
||||
boolean indexed, boolean docValued, boolean stored) {
|
||||
List<Field> fields = new ArrayList<>();
|
||||
if (indexed) {
|
||||
fields.add(new HalfFloatPoint(name, value.floatValue()));
|
||||
}
|
||||
if (docValued) {
|
||||
fields.add(new SortedNumericDocValuesField(name,
|
||||
HalfFloatPoint.halfFloatToSortableShort(value.floatValue())));
|
||||
}
|
||||
if (stored) {
|
||||
fields.add(new StoredField(name, value.floatValue()));
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
@Override
|
||||
FieldStats.Double stats(IndexReader reader, String fieldName,
|
||||
boolean isSearchable, boolean isAggregatable) throws IOException {
|
||||
long size = XPointValues.size(reader, fieldName);
|
||||
if (size == 0) {
|
||||
return null;
|
||||
}
|
||||
int docCount = XPointValues.getDocCount(reader, fieldName);
|
||||
byte[] min = XPointValues.getMinPackedValue(reader, fieldName);
|
||||
byte[] max = XPointValues.getMaxPackedValue(reader, fieldName);
|
||||
return new FieldStats.Double(reader.maxDoc(),docCount, -1L, size,
|
||||
isSearchable, isAggregatable,
|
||||
HalfFloatPoint.decodeDimension(min, 0), HalfFloatPoint.decodeDimension(max, 0));
|
||||
}
|
||||
},
|
||||
FLOAT("float", NumericType.FLOAT) {
|
||||
@Override
|
||||
Float parse(Object value) {
|
||||
|
@ -38,11 +38,4 @@ public interface QueryParser<QB extends QueryBuilder> {
|
||||
* @return the new QueryBuilder
|
||||
*/
|
||||
Optional<QB> fromXContent(QueryParseContext parseContext) throws IOException;
|
||||
|
||||
/**
|
||||
* @return an empty {@link QueryBuilder} instance for this parser that can be used for deserialization
|
||||
*/
|
||||
default QB getBuilderPrototype() { // TODO remove this when nothing implements it
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
}
|
||||
|
@ -1,48 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.indices.breaker;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
public class CircuitBreakerModule extends AbstractModule {
|
||||
|
||||
public static final String TYPE_KEY = "indices.breaker.type";
|
||||
|
||||
private final Settings settings;
|
||||
|
||||
public CircuitBreakerModule(Settings settings) {
|
||||
this.settings = settings;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
String type = settings.get(TYPE_KEY);
|
||||
Class<? extends CircuitBreakerService> impl;
|
||||
if (type == null || type.equals("hierarchy")) {
|
||||
impl = HierarchyCircuitBreakerService.class;
|
||||
} else if (type.equals("none")) {
|
||||
impl = NoneCircuitBreakerService.class;
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unknown circuit breaker type [" + type + "]");
|
||||
}
|
||||
bind(CircuitBreakerService.class).to(impl).asEagerSingleton();
|
||||
}
|
||||
}
|
@ -64,4 +64,5 @@ public abstract class CircuitBreakerService extends AbstractLifecycleComponent<C
|
||||
@Override
|
||||
protected void doClose() {
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -23,7 +23,6 @@ import org.elasticsearch.common.breaker.ChildMemoryCircuitBreaker;
|
||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||
import org.elasticsearch.common.breaker.CircuitBreakingException;
|
||||
import org.elasticsearch.common.breaker.NoopCircuitBreaker;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
@ -79,7 +78,6 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
|
||||
// Tripped count for when redistribution was attempted but wasn't successful
|
||||
private final AtomicLong parentTripCount = new AtomicLong(0);
|
||||
|
||||
@Inject
|
||||
public HierarchyCircuitBreakerService(Settings settings, ClusterSettings clusterSettings) {
|
||||
super(settings);
|
||||
this.fielddataSettings = new BreakerSettings(CircuitBreaker.FIELDDATA,
|
||||
|
@ -78,7 +78,9 @@ import org.elasticsearch.http.HttpServerTransport;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.indices.analysis.AnalysisModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.cluster.IndicesClusterStateService;
|
||||
import org.elasticsearch.indices.store.IndicesStore;
|
||||
import org.elasticsearch.indices.ttl.IndicesTTLService;
|
||||
@ -89,6 +91,7 @@ import org.elasticsearch.node.service.NodeService;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.PluginsModule;
|
||||
import org.elasticsearch.plugins.PluginsService;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.repositories.RepositoriesModule;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
@ -100,7 +103,6 @@ import org.elasticsearch.snapshots.SnapshotsService;
|
||||
import org.elasticsearch.tasks.TaskPersistenceService;
|
||||
import org.elasticsearch.threadpool.ExecutorBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.tribe.TribeModule;
|
||||
import org.elasticsearch.tribe.TribeService;
|
||||
@ -132,6 +134,7 @@ import java.util.function.Function;
|
||||
*/
|
||||
public class Node implements Closeable {
|
||||
|
||||
|
||||
public static final Setting<Boolean> WRITE_PORTS_FIELD_SETTING =
|
||||
Setting.boolSetting("node.portsfile", false, Property.NodeScope);
|
||||
public static final Setting<Boolean> NODE_DATA_SETTING = Setting.boolSetting("node.data", true, Property.NodeScope);
|
||||
@ -145,6 +148,16 @@ public class Node implements Closeable {
|
||||
Setting.boolSetting("node.ingest", true, Property.NodeScope);
|
||||
public static final Setting<String> NODE_NAME_SETTING = Setting.simpleString("node.name", Property.NodeScope);
|
||||
public static final Setting<Settings> NODE_ATTRIBUTES = Setting.groupSetting("node.attr.", Property.NodeScope);
|
||||
public static final Setting<String> BREAKER_TYPE_KEY = new Setting<>("indices.breaker.type", "hierarchy", (s) -> {
|
||||
switch (s) {
|
||||
case "hierarchy":
|
||||
case "none":
|
||||
return s;
|
||||
default:
|
||||
throw new IllegalArgumentException("indices.breaker.type must be one of [hierarchy, none] but was: " + s);
|
||||
}
|
||||
}, Setting.Property.NodeScope);
|
||||
|
||||
|
||||
|
||||
private static final String CLIENT_TYPE = "node";
|
||||
@ -167,8 +180,9 @@ public class Node implements Closeable {
|
||||
protected Node(Environment tmpEnv, Version version, Collection<Class<? extends Plugin>> classpathPlugins) {
|
||||
Settings tmpSettings = Settings.builder().put(tmpEnv.settings())
|
||||
.put(Client.CLIENT_TYPE_SETTING_S.getKey(), CLIENT_TYPE).build();
|
||||
tmpSettings = TribeService.processSettings(tmpSettings);
|
||||
final List<Closeable> resourcesToClose = new ArrayList<>(); // register everything we need to release in the case of an error
|
||||
|
||||
tmpSettings = TribeService.processSettings(tmpSettings);
|
||||
ESLogger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(tmpSettings));
|
||||
final String displayVersion = version + (Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : "");
|
||||
final JvmInfo jvmInfo = JvmInfo.jvmInfo();
|
||||
@ -202,41 +216,48 @@ public class Node implements Closeable {
|
||||
this.pluginsService = new PluginsService(tmpSettings, tmpEnv.modulesFile(), tmpEnv.pluginsFile(), classpathPlugins);
|
||||
this.settings = pluginsService.updatedSettings();
|
||||
// create the environment based on the finalized (processed) view of the settings
|
||||
this.environment = new Environment(this.settings());
|
||||
|
||||
final NodeEnvironment nodeEnvironment;
|
||||
try {
|
||||
nodeEnvironment = new NodeEnvironment(this.settings, this.environment);
|
||||
} catch (IOException ex) {
|
||||
throw new IllegalStateException("Failed to created node environment", ex);
|
||||
}
|
||||
final NetworkService networkService = new NetworkService(settings);
|
||||
this.environment = new Environment(this.settings);
|
||||
final List<ExecutorBuilder<?>> executorBuilders = pluginsService.getExecutorBuilders(settings);
|
||||
final ThreadPool threadPool = new ThreadPool(settings, executorBuilders.toArray(new ExecutorBuilder[0]));
|
||||
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry();
|
||||
boolean success = false;
|
||||
try {
|
||||
final ThreadPool threadPool = new ThreadPool(settings, executorBuilders.toArray(new ExecutorBuilder[0]));
|
||||
resourcesToClose.add(() -> ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS));
|
||||
final List<Setting<?>> additionalSettings = new ArrayList<>();
|
||||
final List<String> additionalSettingsFilter = new ArrayList<>();
|
||||
additionalSettings.addAll(pluginsService.getPluginSettings());
|
||||
additionalSettingsFilter.addAll(pluginsService.getPluginSettingsFilter());
|
||||
for (final ExecutorBuilder<?> builder : threadPool.builders()) {
|
||||
additionalSettings.addAll(builder.getRegisteredSettings());
|
||||
}
|
||||
final ScriptModule scriptModule = ScriptModule.create(settings, pluginsService.filterPlugins(ScriptPlugin.class));
|
||||
additionalSettings.addAll(scriptModule.getSettings());
|
||||
// this is as early as we can validate settings at this point. we already pass them to ScriptModule as well as ThreadPool
|
||||
// so we might be late here already
|
||||
final SettingsModule settingsModule = new SettingsModule(this.settings, additionalSettings, additionalSettingsFilter);
|
||||
final NodeEnvironment nodeEnvironment;
|
||||
try {
|
||||
nodeEnvironment = new NodeEnvironment(this.settings, this.environment);
|
||||
resourcesToClose.add(nodeEnvironment);
|
||||
} catch (IOException ex) {
|
||||
throw new IllegalStateException("Failed to created node environment", ex);
|
||||
}
|
||||
final NetworkService networkService = new NetworkService(settings);
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry();
|
||||
ModulesBuilder modules = new ModulesBuilder();
|
||||
modules.add(new Version.Module(version));
|
||||
modules.add(new CircuitBreakerModule(settings));
|
||||
// plugin modules must be added here, before others or we can get crazy injection errors...
|
||||
for (Module pluginModule : pluginsService.nodeModules()) {
|
||||
modules.add(pluginModule);
|
||||
}
|
||||
final MonitorService monitorService = new MonitorService(settings, nodeEnvironment, threadPool);
|
||||
modules.add(new PluginsModule(pluginsService));
|
||||
SettingsModule settingsModule = new SettingsModule(this.settings);
|
||||
modules.add(settingsModule);
|
||||
modules.add(new EnvironmentModule(environment));
|
||||
modules.add(new EnvironmentModule(environment, threadPool));
|
||||
modules.add(new NodeModule(this, monitorService));
|
||||
modules.add(new NetworkModule(networkService, settings, false, namedWriteableRegistry));
|
||||
ScriptModule scriptModule = new ScriptModule();
|
||||
modules.add(scriptModule);
|
||||
modules.add(new NodeEnvironmentModule(nodeEnvironment));
|
||||
modules.add(new ClusterNameModule(this.settings));
|
||||
final ThreadPoolModule threadPoolModule = new ThreadPoolModule(threadPool);
|
||||
modules.add(threadPoolModule);
|
||||
modules.add(new DiscoveryModule(this.settings));
|
||||
modules.add(new ClusterModule(this.settings));
|
||||
modules.add(new IndicesModule());
|
||||
@ -248,23 +269,20 @@ public class Node implements Closeable {
|
||||
modules.add(new RepositoriesModule());
|
||||
modules.add(new TribeModule());
|
||||
modules.add(new AnalysisModule(environment));
|
||||
|
||||
pluginsService.processModules(modules);
|
||||
|
||||
scriptModule.prepareSettings(settingsModule);
|
||||
|
||||
threadPoolModule.prepareSettings(settingsModule);
|
||||
|
||||
CircuitBreakerService circuitBreakerService = createCircuitBreakerService(settingsModule.getSettings(),
|
||||
settingsModule.getClusterSettings());
|
||||
resourcesToClose.add(circuitBreakerService);
|
||||
modules.add(settingsModule);
|
||||
modules.add(b -> b.bind(CircuitBreakerService.class).toInstance(circuitBreakerService));
|
||||
injector = modules.createInjector();
|
||||
|
||||
client = injector.getInstance(Client.class);
|
||||
success = true;
|
||||
} catch (IOException ex) {
|
||||
throw new ElasticsearchException("failed to bind service", ex);
|
||||
} finally {
|
||||
if (!success) {
|
||||
nodeEnvironment.close();
|
||||
ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS);
|
||||
IOUtils.closeWhileHandlingException(resourcesToClose);
|
||||
}
|
||||
}
|
||||
|
||||
@ -590,4 +608,19 @@ public class Node implements Closeable {
|
||||
throw new RuntimeException("Failed to rename ports file", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link CircuitBreakerService} based on the settings provided.
|
||||
* @see #BREAKER_TYPE_KEY
|
||||
*/
|
||||
public static CircuitBreakerService createCircuitBreakerService(Settings settings, ClusterSettings clusterSettings) {
|
||||
String type = BREAKER_TYPE_KEY.get(settings);
|
||||
if (type.equals("hierarchy")) {
|
||||
return new HierarchyCircuitBreakerService(settings, clusterSettings);
|
||||
} else if (type.equals("none")) {
|
||||
return new NoneCircuitBreakerService();
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unknown circuit breaker type [" + type + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -21,10 +21,12 @@ package org.elasticsearch.plugins;
|
||||
|
||||
import org.elasticsearch.common.component.LifecycleComponent;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.threadpool.ExecutorBuilder;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
@ -66,6 +68,16 @@ public abstract class Plugin {
|
||||
*/
|
||||
public void onIndexModule(IndexModule indexModule) {}
|
||||
|
||||
/**
|
||||
* Returns a list of additional {@link Setting} definitions for this plugin.
|
||||
*/
|
||||
public List<Setting<?>> getSettings() { return Collections.emptyList(); }
|
||||
|
||||
/**
|
||||
* Returns a list of additional settings filter for this plugin
|
||||
*/
|
||||
public List<String> getSettingsFilter() { return Collections.emptyList(); }
|
||||
|
||||
/**
|
||||
* Old-style guice index level extension point.
|
||||
*
|
||||
@ -74,6 +86,23 @@ public abstract class Plugin {
|
||||
@Deprecated
|
||||
public final void onModule(IndexModule indexModule) {}
|
||||
|
||||
|
||||
/**
|
||||
* Old-style guice settings extension point.
|
||||
*
|
||||
* @deprecated use #getSettings and #getSettingsFilter instead
|
||||
*/
|
||||
@Deprecated
|
||||
public final void onModule(SettingsModule settingsModule) {}
|
||||
|
||||
/**
|
||||
* Old-style guice scripting extension point.
|
||||
*
|
||||
* @deprecated implement {@link ScriptPlugin} instead
|
||||
*/
|
||||
@Deprecated
|
||||
public final void onModule(ScriptModule module) {}
|
||||
|
||||
/**
|
||||
* Provides the list of this plugin's custom thread pools, empty if
|
||||
* none.
|
||||
|
@ -39,7 +39,11 @@ import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.script.NativeScriptFactory;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.threadpool.ExecutorBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -60,6 +64,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.io.FileSystemUtils.isAccessibleDirectory;
|
||||
|
||||
@ -78,6 +83,14 @@ public class PluginsService extends AbstractComponent {
|
||||
|
||||
private final Map<Plugin, List<OnModuleReference>> onModuleReferences;
|
||||
|
||||
public List<Setting<?>> getPluginSettings() {
|
||||
return plugins.stream().flatMap(p -> p.v2().getSettings().stream()).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public List<String> getPluginSettingsFilter() {
|
||||
return plugins.stream().flatMap(p -> p.v2().getSettingsFilter().stream()).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
static class OnModuleReference {
|
||||
public final Class<? extends Module> moduleClass;
|
||||
public final Method onModuleMethod;
|
||||
@ -283,6 +296,7 @@ public class PluginsService extends AbstractComponent {
|
||||
plugin.v2().onIndexModule(indexModule);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get information about plugins and modules
|
||||
*/
|
||||
@ -440,4 +454,9 @@ public class PluginsService extends AbstractComponent {
|
||||
throw new ElasticsearchException("Failed to load plugin class [" + pluginClass.getName() + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
public <T> List<T> filterPlugins(Class<T> type) {
|
||||
return plugins.stream().filter(x -> type.isAssignableFrom(x.v2().getClass()))
|
||||
.map(p -> ((T)p.v2())).collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,55 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.NativeScriptFactory;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* An additional extension point to {@link Plugin}. Plugins extending the scripting functionality must implement this inteface
|
||||
* to provide access to script engines or script factories.
|
||||
*/
|
||||
public interface ScriptPlugin {
|
||||
|
||||
/**
|
||||
* Returns a {@link ScriptEngineService} instance or <code>null</code> if this plugin doesn't add a new script engine
|
||||
*/
|
||||
default ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of {@link NativeScriptFactory} instances.
|
||||
*/
|
||||
default List<NativeScriptFactory> getNativeScripts() {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a {@link ScriptContext.Plugin} instance or <code>null</code> if this plugin doesn't add a new script context plugin
|
||||
*/
|
||||
default ScriptContext.Plugin getCustomScriptContexts() {
|
||||
return null;
|
||||
}
|
||||
}
|
@ -275,7 +275,7 @@ public class RestActions {
|
||||
|
||||
@Override
|
||||
public RestResponse buildResponse(NodesResponse response, XContentBuilder builder) throws Exception {
|
||||
return RestActions.nodesResponse(builder, ToXContent.EMPTY_PARAMS, response);
|
||||
return RestActions.nodesResponse(builder, channel.request(), response);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -22,13 +22,10 @@ package org.elasticsearch.script;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
@ -42,7 +39,6 @@ public class NativeScriptEngineService extends AbstractComponent implements Scri
|
||||
|
||||
private final Map<String, NativeScriptFactory> scripts;
|
||||
|
||||
@Inject
|
||||
public NativeScriptEngineService(Settings settings, Map<String, NativeScriptFactory> scripts) {
|
||||
super(settings);
|
||||
this.scripts = unmodifiableMap(scripts);
|
||||
@ -98,4 +94,9 @@ public class NativeScriptEngineService extends AbstractComponent implements Scri
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
// Nothing to do here
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -43,8 +43,13 @@ public interface NativeScriptFactory {
|
||||
|
||||
/**
|
||||
* Indicates if document scores may be needed by the produced scripts.
|
||||
*
|
||||
*
|
||||
* @return {@code true} if scores are needed.
|
||||
*/
|
||||
boolean needsScores();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name of the script factory
|
||||
*/
|
||||
String getName();
|
||||
}
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
@ -30,7 +32,8 @@ import static java.util.Collections.unmodifiableSet;
|
||||
|
||||
/**
|
||||
* Registry for operations that use scripts as part of their execution. Can be standard operations of custom defined ones (via plugin).
|
||||
* Allows plugins to register custom operations that they use scripts for, via {@link ScriptModule#registerScriptContext(org.elasticsearch.script.ScriptContext.Plugin)}.
|
||||
* Allows plugins to register custom operations that they use scripts for,
|
||||
* via {@link org.elasticsearch.plugins.ScriptPlugin}
|
||||
* Scripts can be enabled/disabled via fine-grained settings for each single registered operation.
|
||||
*/
|
||||
public final class ScriptContextRegistry {
|
||||
|
@ -29,29 +29,29 @@ import org.elasticsearch.common.Strings;
|
||||
public class ScriptEngineRegistry {
|
||||
|
||||
private final Map<Class<? extends ScriptEngineService>, String> registeredScriptEngineServices;
|
||||
private final Map<String, Class<? extends ScriptEngineService>> registeredLanguages;
|
||||
private final Map<String, ScriptEngineService> registeredLanguages;
|
||||
private final Map<String, Boolean> defaultInlineScriptEnableds;
|
||||
|
||||
public ScriptEngineRegistry(Iterable<ScriptEngineRegistration> registrations) {
|
||||
public ScriptEngineRegistry(Iterable<ScriptEngineService> registrations) {
|
||||
Objects.requireNonNull(registrations);
|
||||
Map<Class<? extends ScriptEngineService>, String> registeredScriptEngineServices = new HashMap<>();
|
||||
Map<String, Class<? extends ScriptEngineService>> registeredLanguages = new HashMap<>();
|
||||
Map<String, ScriptEngineService> registeredLanguages = new HashMap<>();
|
||||
Map<String, Boolean> inlineScriptEnableds = new HashMap<>();
|
||||
for (ScriptEngineRegistration registration : registrations) {
|
||||
String oldLanguage = registeredScriptEngineServices.putIfAbsent(registration.getScriptEngineService(),
|
||||
registration.getScriptEngineLanguage());
|
||||
for (ScriptEngineService service : registrations) {
|
||||
String oldLanguage = registeredScriptEngineServices.putIfAbsent(service.getClass(),
|
||||
service.getType());
|
||||
if (oldLanguage != null) {
|
||||
throw new IllegalArgumentException("script engine service [" + registration.getScriptEngineService() +
|
||||
throw new IllegalArgumentException("script engine service [" + service.getClass() +
|
||||
"] already registered for language [" + oldLanguage + "]");
|
||||
}
|
||||
String language = registration.getScriptEngineLanguage();
|
||||
Class<? extends ScriptEngineService> scriptEngineServiceClazz =
|
||||
registeredLanguages.putIfAbsent(language, registration.getScriptEngineService());
|
||||
if (scriptEngineServiceClazz != null) {
|
||||
String language = service.getType();
|
||||
ScriptEngineService scriptEngineService =
|
||||
registeredLanguages.putIfAbsent(language, service);
|
||||
if (scriptEngineService != null) {
|
||||
throw new IllegalArgumentException("scripting language [" + language + "] already registered for script engine service [" +
|
||||
scriptEngineServiceClazz.getCanonicalName() + "]");
|
||||
scriptEngineService.getClass().getCanonicalName() + "]");
|
||||
}
|
||||
inlineScriptEnableds.put(language, registration.getDefaultInlineScriptEnabled());
|
||||
inlineScriptEnableds.put(language, service.isInlineScriptEnabled());
|
||||
}
|
||||
|
||||
this.registeredScriptEngineServices = Collections.unmodifiableMap(registeredScriptEngineServices);
|
||||
@ -68,52 +68,12 @@ public class ScriptEngineRegistry {
|
||||
return registeredScriptEngineServices.get(scriptEngineService);
|
||||
}
|
||||
|
||||
Map<String, Class<? extends ScriptEngineService>> getRegisteredLanguages() {
|
||||
public Map<String, ScriptEngineService> getRegisteredLanguages() {
|
||||
return registeredLanguages;
|
||||
}
|
||||
|
||||
Map<String, Boolean> getDefaultInlineScriptEnableds() {
|
||||
public Map<String, Boolean> getDefaultInlineScriptEnableds() {
|
||||
return this.defaultInlineScriptEnableds;
|
||||
}
|
||||
|
||||
public static class ScriptEngineRegistration {
|
||||
private final Class<? extends ScriptEngineService> scriptEngineService;
|
||||
private final String scriptEngineLanguage;
|
||||
private final boolean defaultInlineScriptEnabled;
|
||||
|
||||
/**
|
||||
* Register a script engine service with the default of inline scripts disabled
|
||||
*/
|
||||
public ScriptEngineRegistration(Class<? extends ScriptEngineService> scriptEngineService, String scriptEngineLanguage) {
|
||||
this(scriptEngineService, scriptEngineLanguage, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a script engine service with the given default mode for inline scripts
|
||||
*/
|
||||
public ScriptEngineRegistration(Class<? extends ScriptEngineService> scriptEngineService, String scriptEngineLanguage,
|
||||
boolean defaultInlineScriptEnabled) {
|
||||
Objects.requireNonNull(scriptEngineService);
|
||||
if (Strings.hasText(scriptEngineLanguage) == false) {
|
||||
throw new IllegalArgumentException("languages for script engine service [" +
|
||||
scriptEngineService.getCanonicalName() + "] should be a non-empty string");
|
||||
}
|
||||
this.scriptEngineService = scriptEngineService;
|
||||
this.scriptEngineLanguage = scriptEngineLanguage;
|
||||
this.defaultInlineScriptEnabled = defaultInlineScriptEnabled;
|
||||
}
|
||||
|
||||
Class<? extends ScriptEngineService> getScriptEngineService() {
|
||||
return scriptEngineService;
|
||||
}
|
||||
|
||||
String getScriptEngineLanguage() {
|
||||
return scriptEngineLanguage;
|
||||
}
|
||||
|
||||
boolean getDefaultInlineScriptEnabled() {
|
||||
return defaultInlineScriptEnabled;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ public interface ScriptEngineService extends Closeable {
|
||||
|
||||
/**
|
||||
* Compiles a script.
|
||||
* @param scriptName name of the script. {@code null} if it is anonymous (inline).
|
||||
* @param scriptName name of the script. {@code null} if it is anonymous (inline).
|
||||
* For a file script, its the file name (with extension).
|
||||
* For a stored script, its the identifier.
|
||||
* @param scriptSource actual source of the script
|
||||
@ -55,4 +55,11 @@ public interface ScriptEngineService extends Closeable {
|
||||
* The passed script may be null if it has already been garbage collected.
|
||||
* */
|
||||
void scriptRemoved(@Nullable CompiledScript script);
|
||||
|
||||
/**
|
||||
* Returns <code>true</code> if this scripting engine can safely accept inline scripts by default. The default is <code>false</code>
|
||||
*/
|
||||
default boolean isInlineScriptEnabled() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -20,16 +20,18 @@
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.multibindings.MapBinder;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* An {@link org.elasticsearch.common.inject.Module} which manages {@link ScriptEngineService}s, as well
|
||||
@ -37,73 +39,52 @@ import java.util.Objects;
|
||||
*/
|
||||
public class ScriptModule extends AbstractModule {
|
||||
|
||||
private final List<ScriptEngineRegistry.ScriptEngineRegistration> scriptEngineRegistrations = new ArrayList<>();
|
||||
protected final ScriptContextRegistry scriptContextRegistry;
|
||||
protected final ScriptEngineRegistry scriptEngineRegistry;
|
||||
protected final ScriptSettings scriptSettings;
|
||||
|
||||
{
|
||||
scriptEngineRegistrations.add(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class,
|
||||
NativeScriptEngineService.NAME, true));
|
||||
public ScriptModule(ScriptEngineService... services) {
|
||||
this(Arrays.asList(services), Collections.emptyList());
|
||||
}
|
||||
|
||||
private final Map<String, Class<? extends NativeScriptFactory>> scripts = new HashMap<>();
|
||||
|
||||
private final List<ScriptContext.Plugin> customScriptContexts = new ArrayList<>();
|
||||
|
||||
|
||||
public void addScriptEngine(ScriptEngineRegistry.ScriptEngineRegistration scriptEngineRegistration) {
|
||||
Objects.requireNonNull(scriptEngineRegistration);
|
||||
scriptEngineRegistrations.add(scriptEngineRegistration);
|
||||
}
|
||||
|
||||
public void registerScript(String name, Class<? extends NativeScriptFactory> script) {
|
||||
scripts.put(name, script);
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a custom script context that can be used by plugins to categorize the different operations that they use scripts for.
|
||||
* Fine-grained settings allow to enable/disable scripts per context.
|
||||
*/
|
||||
public void registerScriptContext(ScriptContext.Plugin scriptContext) {
|
||||
customScriptContexts.add(scriptContext);
|
||||
public ScriptModule(List<ScriptEngineService> scriptEngineServices,
|
||||
List<ScriptContext.Plugin> customScriptContexts) {
|
||||
this.scriptContextRegistry = new ScriptContextRegistry(customScriptContexts);
|
||||
this.scriptEngineRegistry = new ScriptEngineRegistry(scriptEngineServices);
|
||||
this.scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is called after all modules have been processed but before we actually validate all settings. This allows the
|
||||
* script extensions to add all their settings.
|
||||
*/
|
||||
public void prepareSettings(SettingsModule settingsModule) {
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customScriptContexts);
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(scriptEngineRegistrations);
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
|
||||
scriptSettings.getScriptTypeSettings().forEach(settingsModule::registerSetting);
|
||||
scriptSettings.getScriptContextSettings().forEach(settingsModule::registerSetting);
|
||||
scriptSettings.getScriptLanguageSettings().forEach(settingsModule::registerSetting);
|
||||
settingsModule.registerSetting(scriptSettings.getDefaultScriptLanguageSetting());
|
||||
public List<Setting<?>> getSettings() {
|
||||
ArrayList<Setting<?>> settings = new ArrayList<>();
|
||||
scriptSettings.getScriptTypeSettings().forEach(settings::add);
|
||||
scriptSettings.getScriptContextSettings().forEach(settings::add);
|
||||
scriptSettings.getScriptLanguageSettings().forEach(settings::add);
|
||||
settings.add(scriptSettings.getDefaultScriptLanguageSetting());
|
||||
return settings;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
MapBinder<String, NativeScriptFactory> scriptsBinder
|
||||
= MapBinder.newMapBinder(binder(), String.class, NativeScriptFactory.class);
|
||||
for (Map.Entry<String, Class<? extends NativeScriptFactory>> entry : scripts.entrySet()) {
|
||||
scriptsBinder.addBinding(entry.getKey()).to(entry.getValue()).asEagerSingleton();
|
||||
}
|
||||
|
||||
Multibinder<ScriptEngineService> multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class);
|
||||
multibinder.addBinding().to(NativeScriptEngineService.class);
|
||||
|
||||
for (ScriptEngineRegistry.ScriptEngineRegistration scriptEngineRegistration : scriptEngineRegistrations) {
|
||||
multibinder.addBinding().to(scriptEngineRegistration.getScriptEngineService()).asEagerSingleton();
|
||||
}
|
||||
|
||||
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customScriptContexts);
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(scriptEngineRegistrations);
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
|
||||
bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry);
|
||||
bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry);
|
||||
bind(ScriptSettings.class).toInstance(scriptSettings);
|
||||
bind(ScriptService.class).asEagerSingleton();
|
||||
}
|
||||
|
||||
public static ScriptModule create(Settings settings, List<ScriptPlugin> scriptPlugins) {
|
||||
Map<String, NativeScriptFactory> factoryMap = scriptPlugins.stream().flatMap(x -> x.getNativeScripts().stream())
|
||||
.collect(Collectors.toMap(NativeScriptFactory::getName, Function.identity()));
|
||||
NativeScriptEngineService nativeScriptEngineService = new NativeScriptEngineService(settings, factoryMap);
|
||||
List<ScriptEngineService> scriptEngineServices = scriptPlugins.stream().map(x -> x.getScriptEngineService(settings))
|
||||
.filter(Objects::nonNull).collect(Collectors.toList());
|
||||
scriptEngineServices.add(nativeScriptEngineService);
|
||||
return new ScriptModule(scriptEngineServices, scriptPlugins.stream().map(x -> x.getCustomScriptContexts())
|
||||
.filter(Objects::nonNull).collect(Collectors.toList()));
|
||||
}
|
||||
}
|
||||
|
@ -66,6 +66,7 @@ import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
@ -91,7 +92,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
|
||||
private final String defaultLang;
|
||||
|
||||
private final Set<ScriptEngineService> scriptEngines;
|
||||
private final Collection<ScriptEngineService> scriptEngines;
|
||||
private final Map<String, ScriptEngineService> scriptEnginesByLang;
|
||||
private final Map<String, ScriptEngineService> scriptEnginesByExt;
|
||||
|
||||
@ -132,7 +133,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
public static final ParseField SCRIPT_INLINE = new ParseField("script");
|
||||
|
||||
@Inject
|
||||
public ScriptService(Settings settings, Environment env, Set<ScriptEngineService> scriptEngines,
|
||||
public ScriptService(Settings settings, Environment env,
|
||||
ResourceWatcherService resourceWatcherService, ScriptEngineRegistry scriptEngineRegistry,
|
||||
ScriptContextRegistry scriptContextRegistry, ScriptSettings scriptSettings) throws IOException {
|
||||
super(settings);
|
||||
@ -145,7 +146,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
"Dynamic scripts can be enabled for all languages and all operations by replacing `script.disable_dynamic: false` with `script.inline: true` and `script.stored: true` in elasticsearch.yml");
|
||||
}
|
||||
|
||||
this.scriptEngines = scriptEngines;
|
||||
this.scriptEngines = scriptEngineRegistry.getRegisteredLanguages().values();
|
||||
this.scriptContextRegistry = scriptContextRegistry;
|
||||
int cacheMaxSize = SCRIPT_CACHE_SIZE_SETTING.get(settings);
|
||||
|
||||
|
@ -166,16 +166,16 @@ public class BestBucketsDeferringCollector extends DeferringBucketCollector {
|
||||
int doc = 0;
|
||||
for (long i = 0, end = entry.docDeltas.size(); i < end; ++i) {
|
||||
doc += docDeltaIterator.next();
|
||||
if (needsScores) {
|
||||
if (docIt.docID() < doc) {
|
||||
docIt.advance(doc);
|
||||
}
|
||||
// aggregations should only be replayed on matching documents
|
||||
assert docIt.docID() == doc;
|
||||
}
|
||||
final long bucket = buckets.next();
|
||||
final long rebasedBucket = hash.find(bucket);
|
||||
if (rebasedBucket != -1) {
|
||||
if (needsScores) {
|
||||
if (docIt.docID() < doc) {
|
||||
docIt.advance(doc);
|
||||
}
|
||||
// aggregations should only be replayed on matching documents
|
||||
assert docIt.docID() == doc;
|
||||
}
|
||||
leafCollector.collect(doc, rebasedBucket);
|
||||
}
|
||||
}
|
||||
|
@ -56,7 +56,7 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
|
||||
private Terms.Order order = Terms.Order.compound(Terms.Order.count(false), Terms.Order.term(true));
|
||||
private IncludeExclude includeExclude = null;
|
||||
private String executionHint = null;
|
||||
private SubAggCollectionMode collectMode = SubAggCollectionMode.DEPTH_FIRST;
|
||||
private SubAggCollectionMode collectMode = null;
|
||||
private TermsAggregator.BucketCountThresholds bucketCountThresholds = new TermsAggregator.BucketCountThresholds(
|
||||
DEFAULT_BUCKET_COUNT_THRESHOLDS);
|
||||
private boolean showTermDocCountError = false;
|
||||
@ -71,7 +71,7 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
|
||||
public TermsAggregationBuilder(StreamInput in) throws IOException {
|
||||
super(in, StringTerms.TYPE, ValuesSourceType.ANY);
|
||||
bucketCountThresholds = new BucketCountThresholds(in);
|
||||
collectMode = SubAggCollectionMode.readFromStream(in);
|
||||
collectMode = in.readOptionalWriteable(SubAggCollectionMode::readFromStream);
|
||||
executionHint = in.readOptionalString();
|
||||
includeExclude = in.readOptionalWriteable(IncludeExclude::new);
|
||||
order = InternalOrder.Streams.readOrder(in);
|
||||
@ -86,7 +86,7 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
|
||||
@Override
|
||||
protected void innerWriteTo(StreamOutput out) throws IOException {
|
||||
bucketCountThresholds.writeTo(out);
|
||||
collectMode.writeTo(out);
|
||||
out.writeOptionalWriteable(collectMode);
|
||||
out.writeOptionalString(executionHint);
|
||||
out.writeOptionalWriteable(includeExclude);
|
||||
InternalOrder.Streams.writeOrder(order, out);
|
||||
@ -266,7 +266,9 @@ public class TermsAggregationBuilder extends ValuesSourceAggregationBuilder<Valu
|
||||
}
|
||||
builder.field(ORDER_FIELD.getPreferredName());
|
||||
order.toXContent(builder, params);
|
||||
builder.field(SubAggCollectionMode.KEY.getPreferredName(), collectMode.parseField().getPreferredName());
|
||||
if (collectMode != null) {
|
||||
builder.field(SubAggCollectionMode.KEY.getPreferredName(), collectMode.parseField().getPreferredName());
|
||||
}
|
||||
if (includeExclude != null) {
|
||||
includeExclude.toXContent(builder, params);
|
||||
}
|
||||
|
@ -150,14 +150,22 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||
}
|
||||
}
|
||||
}
|
||||
SubAggCollectionMode cm = collectMode;
|
||||
if (cm == null) {
|
||||
cm = SubAggCollectionMode.DEPTH_FIRST;
|
||||
if (factories != AggregatorFactories.EMPTY) {
|
||||
cm = subAggCollectionMode(bucketCountThresholds.getShardSize(), maxOrd);
|
||||
}
|
||||
}
|
||||
|
||||
DocValueFormat format = config.format();
|
||||
if ((includeExclude != null) && (includeExclude.isRegexBased()) && format != DocValueFormat.RAW) {
|
||||
throw new AggregationExecutionException("Aggregation [" + name + "] cannot support regular expression style include/exclude "
|
||||
+ "settings as they can only be applied to string fields. Use an array of values for include/exclude clauses");
|
||||
}
|
||||
|
||||
return execution.create(name, factories, valuesSource, order, format, bucketCountThresholds, includeExclude, context, parent,
|
||||
collectMode, showTermDocCountError, pipelineAggregators, metaData);
|
||||
return execution.create(name, factories, valuesSource, order, format, bucketCountThresholds, includeExclude, context, parent,
|
||||
cm, showTermDocCountError, pipelineAggregators, metaData);
|
||||
}
|
||||
|
||||
if ((includeExclude != null) && (includeExclude.isRegexBased())) {
|
||||
@ -167,19 +175,27 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||
|
||||
if (valuesSource instanceof ValuesSource.Numeric) {
|
||||
IncludeExclude.LongFilter longFilter = null;
|
||||
SubAggCollectionMode cm = collectMode;
|
||||
if (cm == null) {
|
||||
if (factories != AggregatorFactories.EMPTY) {
|
||||
cm = subAggCollectionMode(bucketCountThresholds.getShardSize(), -1);
|
||||
} else {
|
||||
cm = SubAggCollectionMode.DEPTH_FIRST;
|
||||
}
|
||||
}
|
||||
if (((ValuesSource.Numeric) valuesSource).isFloatingPoint()) {
|
||||
if (includeExclude != null) {
|
||||
longFilter = includeExclude.convertToDoubleFilter();
|
||||
}
|
||||
return new DoubleTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), order,
|
||||
bucketCountThresholds, context, parent, collectMode, showTermDocCountError, longFilter,
|
||||
bucketCountThresholds, context, parent, cm, showTermDocCountError, longFilter,
|
||||
pipelineAggregators, metaData);
|
||||
}
|
||||
if (includeExclude != null) {
|
||||
longFilter = includeExclude.convertToLongFilter(config.format());
|
||||
}
|
||||
return new LongTermsAggregator(name, factories, (ValuesSource.Numeric) valuesSource, config.format(), order,
|
||||
bucketCountThresholds, context, parent, collectMode, showTermDocCountError, longFilter, pipelineAggregators,
|
||||
bucketCountThresholds, context, parent, cm, showTermDocCountError, longFilter, pipelineAggregators,
|
||||
metaData);
|
||||
}
|
||||
|
||||
@ -187,6 +203,20 @@ public class TermsAggregatorFactory extends ValuesSourceAggregatorFactory<Values
|
||||
+ "]. It can only be applied to numeric or string fields.");
|
||||
}
|
||||
|
||||
// return the SubAggCollectionMode that this aggregation should use based on the expected size
|
||||
// and the cardinality of the field
|
||||
static SubAggCollectionMode subAggCollectionMode(int expectedSize, long maxOrd) {
|
||||
if (expectedSize == Integer.MAX_VALUE) {
|
||||
// return all buckets
|
||||
return SubAggCollectionMode.DEPTH_FIRST;
|
||||
}
|
||||
if (maxOrd == -1 || maxOrd > expectedSize) {
|
||||
// use breadth_first if the cardinality is bigger than the expected size or unknown (-1)
|
||||
return SubAggCollectionMode.BREADTH_FIRST;
|
||||
}
|
||||
return SubAggCollectionMode.DEPTH_FIRST;
|
||||
}
|
||||
|
||||
public enum ExecutionMode {
|
||||
|
||||
MAP(new ParseField("map")) {
|
||||
|
@ -19,6 +19,8 @@
|
||||
|
||||
package org.elasticsearch.tasks;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchTimeoutException;
|
||||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
@ -28,6 +30,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
|
||||
import org.elasticsearch.common.util.concurrent.ConcurrentMapLong;
|
||||
import org.elasticsearch.transport.TransportRequest;
|
||||
@ -43,10 +46,13 @@ import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
|
||||
|
||||
/**
|
||||
* Task Manager service for keeping track of currently running tasks on the nodes
|
||||
*/
|
||||
public class TaskManager extends AbstractComponent implements ClusterStateListener {
|
||||
private static final TimeValue WAIT_FOR_COMPLETION_POLL = timeValueMillis(100);
|
||||
|
||||
private final ConcurrentMapLong<Task> tasks = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency();
|
||||
|
||||
@ -341,6 +347,23 @@ public class TaskManager extends AbstractComponent implements ClusterStateListen
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Blocks the calling thread, waiting for the task to vanish from the TaskManager.
|
||||
*/
|
||||
public void waitForTaskCompletion(Task task, long untilInNanos) {
|
||||
while (System.nanoTime() - untilInNanos < 0) {
|
||||
if (getTask(task.getId()) == null) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
Thread.sleep(WAIT_FOR_COMPLETION_POLL.millis());
|
||||
} catch (InterruptedException e) {
|
||||
throw new ElasticsearchException("Interrupted waiting for completion of [{}]", e, task);
|
||||
}
|
||||
}
|
||||
throw new ElasticsearchTimeoutException("Timed out waiting for completion of [{}]", task);
|
||||
}
|
||||
|
||||
private static class CancellableTaskHolder {
|
||||
|
||||
private static final String TASK_FINISHED_MARKER = "task finished";
|
||||
|
@ -51,7 +51,7 @@ public abstract class ExecutorBuilder<U extends ExecutorBuilder.ExecutorSettings
|
||||
*
|
||||
* @return the list of registered settings
|
||||
*/
|
||||
abstract List<Setting<?>> getRegisteredSettings();
|
||||
public abstract List<Setting<?>> getRegisteredSettings();
|
||||
|
||||
/**
|
||||
* Return an executor settings object from the node-level settings.
|
||||
|
@ -86,7 +86,7 @@ public final class FixedExecutorBuilder extends ExecutorBuilder<FixedExecutorBui
|
||||
}
|
||||
|
||||
@Override
|
||||
List<Setting<?>> getRegisteredSettings() {
|
||||
public List<Setting<?>> getRegisteredSettings() {
|
||||
return Arrays.asList(sizeSetting, queueSizeSetting);
|
||||
}
|
||||
|
||||
|
@ -77,7 +77,7 @@ public final class ScalingExecutorBuilder extends ExecutorBuilder<ScalingExecuto
|
||||
}
|
||||
|
||||
@Override
|
||||
List<Setting<?>> getRegisteredSettings() {
|
||||
public List<Setting<?>> getRegisteredSettings() {
|
||||
return Arrays.asList(coreSetting, maxSetting, keepAliveSetting);
|
||||
}
|
||||
|
||||
|
@ -1,44 +0,0 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.threadpool;
|
||||
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
|
||||
public class ThreadPoolModule extends AbstractModule {
|
||||
|
||||
private final ThreadPool threadPool;
|
||||
|
||||
public ThreadPoolModule(final ThreadPool threadPool) {
|
||||
this.threadPool = threadPool;
|
||||
}
|
||||
|
||||
public void prepareSettings(SettingsModule settingsModule) {
|
||||
for (final ExecutorBuilder<?> builder : threadPool.builders()) {
|
||||
builder.getRegisteredSettings().forEach(settingsModule::registerSetting);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
bind(ThreadPool.class).toInstance(threadPool);
|
||||
}
|
||||
|
||||
}
|
@ -60,6 +60,10 @@ public class RecordingTaskManagerListener implements MockTaskManagerListener {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void waitForTaskCompletion(Task task) {
|
||||
}
|
||||
|
||||
public synchronized List<Tuple<Boolean, TaskInfo>> getEvents() {
|
||||
return Collections.unmodifiableList(new ArrayList<>(events));
|
||||
}
|
||||
|
@ -363,6 +363,10 @@ public class TasksIT extends ESIntegTestCase {
|
||||
taskFinishLock.lock();
|
||||
taskFinishLock.unlock();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void waitForTaskCompletion(Task task) {
|
||||
}
|
||||
});
|
||||
}
|
||||
indexFuture = client().prepareIndex("test", "test").setSource("test", "test").execute();
|
||||
@ -470,8 +474,30 @@ public class TasksIT extends ESIntegTestCase {
|
||||
// Wait for the task to start
|
||||
assertBusy(() -> client().admin().cluster().prepareGetTask(taskId).get());
|
||||
|
||||
// Spin up a request to wait for that task to finish
|
||||
// Register listeners so we can be sure the waiting started
|
||||
CountDownLatch waitForWaitingToStart = new CountDownLatch(1);
|
||||
for (TransportService transportService : internalCluster().getInstances(TransportService.class)) {
|
||||
((MockTaskManager) transportService.getTaskManager()).addListener(new MockTaskManagerListener() {
|
||||
@Override
|
||||
public void waitForTaskCompletion(Task task) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTaskRegistered(Task task) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTaskUnregistered(Task task) {
|
||||
waitForWaitingToStart.countDown();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Spin up a request to wait for the test task to finish
|
||||
waitResponseFuture = wait.apply(taskId);
|
||||
|
||||
// Wait for the wait to start
|
||||
waitForWaitingToStart.await();
|
||||
} finally {
|
||||
// Unblock the request so the wait for completion request can finish
|
||||
TestTaskPlugin.UnblockTestTasksAction.INSTANCE.newRequestBuilder(client()).get();
|
||||
|
@ -160,7 +160,7 @@ public class MultiSearchRequestTests extends ESTestCase {
|
||||
MultiSearchResponse response = new MultiSearchResponse(new MultiSearchResponse.Item[]{new MultiSearchResponse.Item(null, new IllegalStateException("foobar")), new MultiSearchResponse.Item(null, new IllegalStateException("baaaaaazzzz"))});
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
response.toXContent(builder, ToXContent.EMPTY_PARAMS);
|
||||
assertEquals("\"responses\"[{\"error\":{\"root_cause\":[{\"type\":\"illegal_state_exception\",\"reason\":\"foobar\"}],\"type\":\"illegal_state_exception\",\"reason\":\"foobar\"}},{\"error\":{\"root_cause\":[{\"type\":\"illegal_state_exception\",\"reason\":\"baaaaaazzzz\"}],\"type\":\"illegal_state_exception\",\"reason\":\"baaaaaazzzz\"}}]",
|
||||
assertEquals("\"responses\"[{\"error\":{\"root_cause\":[{\"type\":\"illegal_state_exception\",\"reason\":\"foobar\"}],\"type\":\"illegal_state_exception\",\"reason\":\"foobar\"},\"status\":500},{\"error\":{\"root_cause\":[{\"type\":\"illegal_state_exception\",\"reason\":\"baaaaaazzzz\"}],\"type\":\"illegal_state_exception\",\"reason\":\"baaaaaazzzz\"},\"status\":500}]",
|
||||
builder.string());
|
||||
}
|
||||
|
||||
|
@ -67,32 +67,32 @@ public class ClusterModuleTests extends ModuleTestCase {
|
||||
}
|
||||
|
||||
public void testRegisterClusterDynamicSettingDuplicate() {
|
||||
SettingsModule module = new SettingsModule(Settings.EMPTY);
|
||||
try {
|
||||
module.registerSetting(EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING);
|
||||
new SettingsModule(Settings.EMPTY, EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice");
|
||||
assertEquals(e.getMessage(),
|
||||
"Cannot register setting [" + EnableAllocationDecider.CLUSTER_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice");
|
||||
}
|
||||
}
|
||||
|
||||
public void testRegisterClusterDynamicSetting() {
|
||||
SettingsModule module = new SettingsModule(Settings.EMPTY);
|
||||
module.registerSetting(Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.NodeScope));
|
||||
SettingsModule module = new SettingsModule(Settings.EMPTY,
|
||||
Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.NodeScope));
|
||||
assertInstanceBinding(module, ClusterSettings.class, service -> service.hasDynamicSetting("foo.bar"));
|
||||
}
|
||||
|
||||
public void testRegisterIndexDynamicSettingDuplicate() {
|
||||
SettingsModule module = new SettingsModule(Settings.EMPTY);
|
||||
try {
|
||||
module.registerSetting(EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING);
|
||||
new SettingsModule(Settings.EMPTY, EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals(e.getMessage(), "Cannot register setting [" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice");
|
||||
assertEquals(e.getMessage(),
|
||||
"Cannot register setting [" + EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey() + "] twice");
|
||||
}
|
||||
}
|
||||
|
||||
public void testRegisterIndexDynamicSetting() {
|
||||
SettingsModule module = new SettingsModule(Settings.EMPTY);
|
||||
module.registerSetting(Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope));
|
||||
SettingsModule module = new SettingsModule(Settings.EMPTY,
|
||||
Setting.boolSetting("index.foo.bar", false, Property.Dynamic, Property.IndexScope));
|
||||
assertInstanceBinding(module, IndexScopedSettings.class, service -> service.hasDynamicSetting("index.foo.bar"));
|
||||
}
|
||||
|
||||
@ -101,7 +101,8 @@ public class ClusterModuleTests extends ModuleTestCase {
|
||||
try {
|
||||
module.registerAllocationDecider(EnableAllocationDecider.class);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals(e.getMessage(), "Can't register the same [allocation_decider] more than once for [" + EnableAllocationDecider.class.getName() + "]");
|
||||
assertEquals(e.getMessage(),
|
||||
"Can't register the same [allocation_decider] more than once for [" + EnableAllocationDecider.class.getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
@ -146,7 +147,8 @@ public class ClusterModuleTests extends ModuleTestCase {
|
||||
module.registerIndexTemplateFilter(FakeIndexTemplateFilter.class);
|
||||
module.registerIndexTemplateFilter(FakeIndexTemplateFilter.class);
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals(e.getMessage(), "Can't register the same [index_template_filter] more than once for [" + FakeIndexTemplateFilter.class.getName() + "]");
|
||||
assertEquals(e.getMessage(),
|
||||
"Can't register the same [index_template_filter] more than once for [" + FakeIndexTemplateFilter.class.getName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -30,7 +30,9 @@ import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
@ -56,12 +58,16 @@ public class SettingsFilteringIT extends ESIntegTestCase {
|
||||
return Settings.builder().put("some.node.setting", true).put("some.other.node.setting", true).build();
|
||||
}
|
||||
|
||||
public void onModule(SettingsModule module) {
|
||||
module.registerSetting(SOME_NODE_SETTING);
|
||||
module.registerSetting(SOME_OTHER_NODE_SETTING);
|
||||
module.registerSetting(Setting.groupSetting("index.filter_test.", Property.IndexScope));
|
||||
module.registerSettingsFilter("index.filter_test.foo");
|
||||
module.registerSettingsFilter("index.filter_test.bar*");
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Arrays.asList(SOME_NODE_SETTING,
|
||||
SOME_OTHER_NODE_SETTING,
|
||||
Setting.groupSetting("index.filter_test.", Property.IndexScope));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> getSettingsFilter() {
|
||||
return Arrays.asList("index.filter_test.foo", "index.filter_test.bar*");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,6 +23,9 @@ import org.elasticsearch.common.rounding.TimeZoneRounding.TimeIntervalRounding;
|
||||
import org.elasticsearch.common.rounding.TimeZoneRounding.TimeUnitRounding;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Description;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.TypeSafeMatcher;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeConstants;
|
||||
import org.joda.time.DateTimeZone;
|
||||
@ -38,67 +41,70 @@ import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
/**
|
||||
*/
|
||||
public class TimeZoneRoundingTests extends ESTestCase {
|
||||
final static DateTimeZone JERUSALEM_TIMEZONE = DateTimeZone.forID("Asia/Jerusalem");
|
||||
|
||||
public void testUTCTimeUnitRounding() {
|
||||
Rounding tzRounding = TimeZoneRounding.builder(DateTimeUnit.MONTH_OF_YEAR).build();
|
||||
assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-01T00:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-01T00:00:00.000Z")), equalTo(utc("2009-03-01T00:00:00.000Z")));
|
||||
DateTimeZone tz = DateTimeZone.UTC;
|
||||
assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-01T00:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-01T00:00:00.000Z")), isDate(time("2009-03-01T00:00:00.000Z"), tz));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.WEEK_OF_WEEKYEAR).build();
|
||||
assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-09T00:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2012-01-09T00:00:00.000Z")), equalTo(utc("2012-01-16T00:00:00.000Z")));
|
||||
assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-09T00:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2012-01-09T00:00:00.000Z")), isDate(time("2012-01-16T00:00:00.000Z"), tz));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.WEEK_OF_WEEKYEAR).offset(-TimeValue.timeValueHours(24).millis()).build();
|
||||
assertThat(tzRounding.round(utc("2012-01-10T01:01:01")), equalTo(utc("2012-01-08T00:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2012-01-08T00:00:00.000Z")), equalTo(utc("2012-01-15T00:00:00.000Z")));
|
||||
assertThat(tzRounding.round(time("2012-01-10T01:01:01")), isDate(time("2012-01-08T00:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2012-01-08T00:00:00.000Z")), isDate(time("2012-01-15T00:00:00.000Z"), tz));
|
||||
}
|
||||
|
||||
public void testUTCIntervalRounding() {
|
||||
Rounding tzRounding = TimeZoneRounding.builder(TimeValue.timeValueHours(12)).build();
|
||||
assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-03T00:00:00.000Z")));
|
||||
long roundKey = tzRounding.roundKey(utc("2009-02-03T01:01:01"));
|
||||
assertThat(roundKey, equalTo(tzRounding.roundKey(utc("2009-02-03T00:00:00.000Z"))));
|
||||
assertThat(tzRounding.valueForKey(roundKey), equalTo(utc("2009-02-03T00:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T00:00:00.000Z")), equalTo(utc("2009-02-03T12:00:00.000Z")));
|
||||
assertThat(tzRounding.round(utc("2009-02-03T13:01:01")), equalTo(utc("2009-02-03T12:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T12:00:00.000Z")), equalTo(utc("2009-02-04T00:00:00.000Z")));
|
||||
DateTimeZone tz = DateTimeZone.UTC;
|
||||
assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-03T00:00:00.000Z"), tz));
|
||||
long roundKey = tzRounding.roundKey(time("2009-02-03T01:01:01"));
|
||||
assertThat(roundKey, isDate(tzRounding.roundKey(time("2009-02-03T00:00:00.000Z")), tz));
|
||||
assertThat(tzRounding.valueForKey(roundKey), isDate(time("2009-02-03T00:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-03T00:00:00.000Z")), isDate(time("2009-02-03T12:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.round(time("2009-02-03T13:01:01")), isDate(time("2009-02-03T12:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-03T12:00:00.000Z")), isDate(time("2009-02-04T00:00:00.000Z"), tz));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(TimeValue.timeValueHours(48)).build();
|
||||
assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-03T00:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T00:00:00.000Z")), equalTo(utc("2009-02-05T00:00:00.000Z")));
|
||||
assertThat(tzRounding.round(utc("2009-02-05T13:01:01")), equalTo(utc("2009-02-05T00:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-05T00:00:00.000Z")), equalTo(utc("2009-02-07T00:00:00.000Z")));
|
||||
assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-03T00:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-03T00:00:00.000Z")), isDate(time("2009-02-05T00:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.round(time("2009-02-05T13:01:01")), isDate(time("2009-02-05T00:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-05T00:00:00.000Z")), isDate(time("2009-02-07T00:00:00.000Z"), tz));
|
||||
}
|
||||
|
||||
/**
|
||||
* test TimeIntervalTimeZoneRounding, (interval < 12h) with time zone shift
|
||||
*/
|
||||
public void testTimeIntervalTimeZoneRounding() {
|
||||
Rounding tzRounding = TimeZoneRounding.builder(TimeValue.timeValueHours(6)).timeZone(DateTimeZone.forOffsetHours(-1)).build();
|
||||
assertThat(tzRounding.round(utc("2009-02-03T00:01:01")), equalTo(utc("2009-02-02T19:00:00.000Z")));
|
||||
long roundKey = tzRounding.roundKey(utc("2009-02-03T00:01:01"));
|
||||
assertThat(roundKey, equalTo(tzRounding.roundKey(utc("2009-02-02T19:00:00.000Z"))));
|
||||
assertThat(tzRounding.valueForKey(roundKey), equalTo(utc("2009-02-02T19:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-02T19:00:00.000Z")), equalTo(utc("2009-02-03T01:00:00.000Z")));
|
||||
DateTimeZone tz = DateTimeZone.forOffsetHours(-1);
|
||||
Rounding tzRounding = TimeZoneRounding.builder(TimeValue.timeValueHours(6)).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(time("2009-02-03T00:01:01")), isDate(time("2009-02-02T19:00:00.000Z"), tz));
|
||||
long roundKey = tzRounding.roundKey(time("2009-02-03T00:01:01"));
|
||||
assertThat(roundKey, equalTo(tzRounding.roundKey(time("2009-02-02T19:00:00.000Z"))));
|
||||
assertThat(tzRounding.valueForKey(roundKey), isDate(time("2009-02-02T19:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-02T19:00:00.000Z")), isDate(time("2009-02-03T01:00:00.000Z"), tz));
|
||||
|
||||
assertThat(tzRounding.round(utc("2009-02-03T13:01:01")), equalTo(utc("2009-02-03T13:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T13:00:00.000Z")), equalTo(utc("2009-02-03T19:00:00.000Z")));
|
||||
assertThat(tzRounding.round(time("2009-02-03T13:01:01")), isDate(time("2009-02-03T13:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-03T13:00:00.000Z")), isDate(time("2009-02-03T19:00:00.000Z"), tz));
|
||||
}
|
||||
|
||||
/**
|
||||
* test DayIntervalTimeZoneRounding, (interval >= 12h) with time zone shift
|
||||
*/
|
||||
public void testDayIntervalTimeZoneRounding() {
|
||||
Rounding tzRounding = TimeZoneRounding.builder(TimeValue.timeValueHours(12)).timeZone(DateTimeZone.forOffsetHours(-8)).build();
|
||||
assertThat(tzRounding.round(utc("2009-02-03T00:01:01")), equalTo(utc("2009-02-02T20:00:00.000Z")));
|
||||
long roundKey = tzRounding.roundKey(utc("2009-02-03T00:01:01"));
|
||||
assertThat(roundKey, equalTo(tzRounding.roundKey(utc("2009-02-02T20:00:00.000Z"))));
|
||||
assertThat(tzRounding.valueForKey(roundKey), equalTo(utc("2009-02-02T20:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-02T20:00:00.000Z")), equalTo(utc("2009-02-03T08:00:00.000Z")));
|
||||
DateTimeZone tz = DateTimeZone.forOffsetHours(-8);
|
||||
Rounding tzRounding = TimeZoneRounding.builder(TimeValue.timeValueHours(12)).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(time("2009-02-03T00:01:01")), isDate(time("2009-02-02T20:00:00.000Z"), tz));
|
||||
long roundKey = tzRounding.roundKey(time("2009-02-03T00:01:01"));
|
||||
assertThat(roundKey, isDate(tzRounding.roundKey(time("2009-02-02T20:00:00.000Z")), tz));
|
||||
assertThat(tzRounding.valueForKey(roundKey), isDate(time("2009-02-02T20:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-02T20:00:00.000Z")), isDate(time("2009-02-03T08:00:00.000Z"), tz));
|
||||
|
||||
assertThat(tzRounding.round(utc("2009-02-03T13:01:01")), equalTo(utc("2009-02-03T08:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T08:00:00.000Z")), equalTo(utc("2009-02-03T20:00:00.000Z")));
|
||||
assertThat(tzRounding.round(time("2009-02-03T13:01:01")), isDate(time("2009-02-03T08:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-03T08:00:00.000Z")), isDate(time("2009-02-03T20:00:00.000Z"), tz));
|
||||
}
|
||||
|
||||
public void testDayTimeZoneRounding() {
|
||||
@ -109,106 +115,72 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
assertThat(tzRounding.nextRoundingValue(0L - TimeValue.timeValueHours(24 + timezoneOffset).millis()), equalTo(0L - TimeValue
|
||||
.timeValueHours(timezoneOffset).millis()));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(DateTimeZone.forID("-08:00")).build();
|
||||
assertThat(tzRounding.round(utc("2012-04-01T04:15:30Z")), equalTo(utc("2012-03-31T08:00:00Z")));
|
||||
assertThat(toUTCDateString(tzRounding.nextRoundingValue(utc("2012-03-31T08:00:00Z"))),
|
||||
equalTo(toUTCDateString(utc("2012-04-01T08:0:00Z"))));
|
||||
DateTimeZone tz = DateTimeZone.forID("-08:00");
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(time("2012-04-01T04:15:30Z")), isDate(time("2012-03-31T08:00:00Z"), tz));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.MONTH_OF_YEAR).timeZone(DateTimeZone.forID("-08:00")).build();
|
||||
assertThat(tzRounding.round(utc("2012-04-01T04:15:30Z")), equalTo(utc("2012-03-01T08:00:00Z")));
|
||||
assertThat(toUTCDateString(tzRounding.nextRoundingValue(utc("2012-03-01T08:00:00Z"))),
|
||||
equalTo(toUTCDateString(utc("2012-04-01T08:0:00Z"))));
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.MONTH_OF_YEAR).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(time("2012-04-01T04:15:30Z")), equalTo(time("2012-03-01T08:00:00Z")));
|
||||
|
||||
// date in Feb-3rd, but still in Feb-2nd in -02:00 timezone
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(DateTimeZone.forID("-02:00")).build();
|
||||
assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-02T02:00:00")));
|
||||
long roundKey = tzRounding.roundKey(utc("2009-02-03T01:01:01"));
|
||||
assertThat(roundKey, equalTo(tzRounding.roundKey(utc("2009-02-02T02:00:00.000Z"))));
|
||||
assertThat(tzRounding.valueForKey(roundKey), equalTo(utc("2009-02-02T02:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-02T02:00:00")), equalTo(utc("2009-02-03T02:00:00")));
|
||||
tz = DateTimeZone.forID("-02:00");
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-02T02:00:00"), tz));
|
||||
long roundKey = tzRounding.roundKey(time("2009-02-03T01:01:01"));
|
||||
assertThat(roundKey, isDate(tzRounding.roundKey(time("2009-02-02T02:00:00.000Z")), tz));
|
||||
assertThat(tzRounding.valueForKey(roundKey), isDate(time("2009-02-02T02:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-02T02:00:00")), isDate(time("2009-02-03T02:00:00"), tz));
|
||||
|
||||
// date in Feb-3rd, also in -02:00 timezone
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(DateTimeZone.forID("-02:00")).build();
|
||||
assertThat(tzRounding.round(utc("2009-02-03T02:01:01")), equalTo(utc("2009-02-03T02:00:00")));
|
||||
roundKey = tzRounding.roundKey(utc("2009-02-03T02:01:01"));
|
||||
assertThat(roundKey, equalTo(tzRounding.roundKey(utc("2009-02-03T02:00:00.000Z"))));
|
||||
assertThat(tzRounding.valueForKey(roundKey), equalTo(utc("2009-02-03T02:00:00.000Z")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T02:00:00")), equalTo(utc("2009-02-04T02:00:00")));
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(time("2009-02-03T02:01:01")), isDate(time("2009-02-03T02:00:00"), tz));
|
||||
roundKey = tzRounding.roundKey(time("2009-02-03T02:01:01"));
|
||||
assertThat(roundKey, isDate(tzRounding.roundKey(time("2009-02-03T02:00:00.000Z")), tz));
|
||||
assertThat(tzRounding.valueForKey(roundKey), isDate(time("2009-02-03T02:00:00.000Z"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-03T02:00:00")), isDate(time("2009-02-04T02:00:00"), tz));
|
||||
}
|
||||
|
||||
public void testTimeTimeZoneRounding() {
|
||||
// hour unit
|
||||
Rounding tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forOffsetHours(-2)).build();
|
||||
DateTimeZone tz = DateTimeZone.forOffsetHours(-2);
|
||||
Rounding tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(0), equalTo(0L));
|
||||
assertThat(tzRounding.nextRoundingValue(0L), equalTo(TimeValue.timeValueHours(1L).getMillis()));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forOffsetHours(-2)).build();
|
||||
assertThat(tzRounding.round(utc("2009-02-03T01:01:01")), equalTo(utc("2009-02-03T01:00:00")));
|
||||
assertThat(tzRounding.nextRoundingValue(utc("2009-02-03T01:00:00")), equalTo(utc("2009-02-03T02:00:00")));
|
||||
assertThat(tzRounding.round(time("2009-02-03T01:01:01")), isDate(time("2009-02-03T01:00:00"), tz));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2009-02-03T01:00:00")), isDate(time("2009-02-03T02:00:00"), tz));
|
||||
}
|
||||
|
||||
public void testTimeUnitRoundingDST() {
|
||||
Rounding tzRounding;
|
||||
// testing savings to non savings switch
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
|
||||
assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forOffsetHours(2))), // CEST = UTC+2
|
||||
equalTo(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2))),
|
||||
equalTo(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2))),
|
||||
equalTo(time("2014-10-26T03:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("CET")).build();
|
||||
assertThat(tzRounding.round(time("2014-10-26T01:01:01", DateTimeZone.forOffsetHours(2))), // CEST = UTC+2
|
||||
equalTo(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-10-26T01:00:00", DateTimeZone.forOffsetHours(2))),
|
||||
equalTo(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-10-26T02:00:00", DateTimeZone.forOffsetHours(2))),
|
||||
equalTo(time("2014-10-26T03:00:00", DateTimeZone.forOffsetHours(2))));
|
||||
DateTimeZone cet = DateTimeZone.forID("CET");
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(cet).build();
|
||||
assertThat(tzRounding.round(time("2014-10-26T01:01:01", cet)), isDate(time("2014-10-26T01:00:00+02:00"), cet));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-10-26T01:00:00", cet)),isDate(time("2014-10-26T02:00:00+02:00"), cet));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-10-26T02:00:00", cet)), isDate(time("2014-10-26T02:00:00+01:00"), cet));
|
||||
|
||||
// testing non savings to savings switch
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
|
||||
assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forOffsetHours(1))), // CET = UTC+1
|
||||
equalTo(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1))),
|
||||
equalTo(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1))),
|
||||
equalTo(time("2014-03-30T03:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("CET")).build();
|
||||
assertThat(tzRounding.round(time("2014-03-30T01:01:01", DateTimeZone.forOffsetHours(1))), // CET = UTC+1
|
||||
equalTo(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-03-30T01:00:00", DateTimeZone.forOffsetHours(1))),
|
||||
equalTo(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-03-30T02:00:00", DateTimeZone.forOffsetHours(1))),
|
||||
equalTo(time("2014-03-30T03:00:00", DateTimeZone.forOffsetHours(1))));
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(cet).build();
|
||||
assertThat(tzRounding.round(time("2014-03-30T01:01:01", cet)), isDate(time("2014-03-30T01:00:00+01:00"), cet));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-03-30T01:00:00", cet)), isDate(time("2014-03-30T03:00:00", cet), cet));
|
||||
assertThat(tzRounding.nextRoundingValue(time("2014-03-30T03:00:00", cet)), isDate(time("2014-03-30T04:00:00", cet), cet));
|
||||
|
||||
// testing non savings to savings switch (America/Chicago)
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
|
||||
assertThat(tzRounding.round(time("2014-03-09T03:01:01", DateTimeZone.forID("America/Chicago"))),
|
||||
equalTo(time("2014-03-09T03:00:00", DateTimeZone.forID("America/Chicago"))));
|
||||
DateTimeZone chg = DateTimeZone.forID("America/Chicago");
|
||||
Rounding tzRounding_utc = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.UTC).build();
|
||||
assertThat(tzRounding.round(time("2014-03-09T03:01:01", chg)), isDate(time("2014-03-09T03:00:00", chg), chg));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("America/Chicago")).build();
|
||||
assertThat(tzRounding.round(time("2014-03-09T03:01:01", DateTimeZone.forID("America/Chicago"))),
|
||||
equalTo(time("2014-03-09T03:00:00", DateTimeZone.forID("America/Chicago"))));
|
||||
Rounding tzRounding_chg = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(chg).build();
|
||||
assertThat(tzRounding_chg.round(time("2014-03-09T03:01:01", chg)), isDate(time("2014-03-09T03:00:00", chg), chg));
|
||||
|
||||
// testing savings to non savings switch 2013 (America/Chicago)
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
|
||||
assertThat(tzRounding.round(time("2013-11-03T06:01:01", DateTimeZone.forID("America/Chicago"))),
|
||||
equalTo(time("2013-11-03T06:00:00", DateTimeZone.forID("America/Chicago"))));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("America/Chicago")).build();
|
||||
assertThat(tzRounding.round(time("2013-11-03T06:01:01", DateTimeZone.forID("America/Chicago"))),
|
||||
equalTo(time("2013-11-03T06:00:00", DateTimeZone.forID("America/Chicago"))));
|
||||
assertThat(tzRounding_utc.round(time("2013-11-03T06:01:01", chg)), isDate(time("2013-11-03T06:00:00", chg), chg));
|
||||
assertThat(tzRounding_chg.round(time("2013-11-03T06:01:01", chg)), isDate(time("2013-11-03T06:00:00", chg), chg));
|
||||
|
||||
// testing savings to non savings switch 2014 (America/Chicago)
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("UTC")).build();
|
||||
assertThat(tzRounding.round(time("2014-11-02T06:01:01", DateTimeZone.forID("America/Chicago"))),
|
||||
equalTo(time("2014-11-02T06:00:00", DateTimeZone.forID("America/Chicago"))));
|
||||
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(DateTimeZone.forID("America/Chicago")).build();
|
||||
assertThat(tzRounding.round(time("2014-11-02T06:01:01", DateTimeZone.forID("America/Chicago"))),
|
||||
equalTo(time("2014-11-02T06:00:00", DateTimeZone.forID("America/Chicago"))));
|
||||
assertThat(tzRounding_utc.round(time("2014-11-02T06:01:01", chg)), isDate(time("2014-11-02T06:00:00", chg), chg));
|
||||
assertThat(tzRounding_chg.round(time("2014-11-02T06:01:01", chg)), isDate(time("2014-11-02T06:00:00", chg), chg));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -217,28 +189,28 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
* test dates that are exactly on or close to offset changes (e.g. DST) in the chosen time zone.
|
||||
*
|
||||
* It rounds the test date down and up and performs various checks on the rounding unit interval that is
|
||||
* defined by this. Assumptions tested are described in {@link #assertInterval(long, long, long, TimeZoneRounding)}
|
||||
* defined by this. Assumptions tested are described in {@link #assertInterval(long, long, long, TimeZoneRounding, DateTimeZone)}
|
||||
*/
|
||||
public void testTimeZoneRoundingRandom() {
|
||||
for (int i = 0; i < 1000; ++i) {
|
||||
DateTimeUnit timeUnit = randomTimeUnit();
|
||||
DateTimeZone timezone = randomDateTimeZone();
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(timeUnit, timezone);
|
||||
DateTimeZone tz = randomDateTimeZone();
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(timeUnit, tz);
|
||||
long date = Math.abs(randomLong() % (2 * (long) 10e11)); // 1970-01-01T00:00:00Z - 2033-05-18T05:33:20.000+02:00
|
||||
long unitMillis = timeUnit.field(timezone).getDurationField().getUnitMillis();
|
||||
long unitMillis = timeUnit.field(tz).getDurationField().getUnitMillis();
|
||||
if (randomBoolean()) {
|
||||
nastyDate(date, timezone, unitMillis);
|
||||
nastyDate(date, tz, unitMillis);
|
||||
}
|
||||
final long roundedDate = rounding.round(date);
|
||||
final long nextRoundingValue = rounding.nextRoundingValue(roundedDate);
|
||||
|
||||
assertInterval(roundedDate, date, nextRoundingValue, rounding);
|
||||
assertInterval(roundedDate, date, nextRoundingValue, rounding, tz);
|
||||
|
||||
// check correct unit interval width for units smaller than a day, they should be fixed size except for transitions
|
||||
if (unitMillis <= DateTimeConstants.MILLIS_PER_DAY) {
|
||||
// if the interval defined didn't cross timezone offset transition, it should cover unitMillis width
|
||||
if (timezone.getOffset(roundedDate - 1) == timezone.getOffset(nextRoundingValue + 1)) {
|
||||
assertThat("unit interval width not as expected for [" + timeUnit + "], [" + timezone + "] at "
|
||||
if (tz.getOffset(roundedDate - 1) == tz.getOffset(nextRoundingValue + 1)) {
|
||||
assertThat("unit interval width not as expected for [" + timeUnit + "], [" + tz + "] at "
|
||||
+ new DateTime(roundedDate), nextRoundingValue - roundedDate, equalTo(unitMillis));
|
||||
}
|
||||
}
|
||||
@ -264,17 +236,18 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
*/
|
||||
public void testTimeIntervalCET_DST_End() {
|
||||
long interval = TimeUnit.MINUTES.toMillis(20);
|
||||
TimeZoneRounding rounding = new TimeIntervalRounding(interval, DateTimeZone.forID("CET"));
|
||||
DateTimeZone tz = DateTimeZone.forID("CET");
|
||||
TimeZoneRounding rounding = new TimeIntervalRounding(interval, tz);
|
||||
|
||||
assertThat(rounding.round(time("2015-10-25T01:55:00+02:00")), equalTo(time("2015-10-25T01:40:00+02:00")));
|
||||
assertThat(rounding.round(time("2015-10-25T02:15:00+02:00")), equalTo(time("2015-10-25T02:00:00+02:00")));
|
||||
assertThat(rounding.round(time("2015-10-25T02:35:00+02:00")), equalTo(time("2015-10-25T02:20:00+02:00")));
|
||||
assertThat(rounding.round(time("2015-10-25T02:55:00+02:00")), equalTo(time("2015-10-25T02:40:00+02:00")));
|
||||
assertThat(rounding.round(time("2015-10-25T01:55:00+02:00")), isDate(time("2015-10-25T01:40:00+02:00"), tz));
|
||||
assertThat(rounding.round(time("2015-10-25T02:15:00+02:00")), isDate(time("2015-10-25T02:00:00+02:00"), tz));
|
||||
assertThat(rounding.round(time("2015-10-25T02:35:00+02:00")), isDate(time("2015-10-25T02:20:00+02:00"), tz));
|
||||
assertThat(rounding.round(time("2015-10-25T02:55:00+02:00")), isDate(time("2015-10-25T02:40:00+02:00"), tz));
|
||||
// after DST shift
|
||||
assertThat(rounding.round(time("2015-10-25T02:15:00+01:00")), equalTo(time("2015-10-25T02:00:00+01:00")));
|
||||
assertThat(rounding.round(time("2015-10-25T02:35:00+01:00")), equalTo(time("2015-10-25T02:20:00+01:00")));
|
||||
assertThat(rounding.round(time("2015-10-25T02:55:00+01:00")), equalTo(time("2015-10-25T02:40:00+01:00")));
|
||||
assertThat(rounding.round(time("2015-10-25T03:15:00+01:00")), equalTo(time("2015-10-25T03:00:00+01:00")));
|
||||
assertThat(rounding.round(time("2015-10-25T02:15:00+01:00")), isDate(time("2015-10-25T02:00:00+01:00"), tz));
|
||||
assertThat(rounding.round(time("2015-10-25T02:35:00+01:00")), isDate(time("2015-10-25T02:20:00+01:00"), tz));
|
||||
assertThat(rounding.round(time("2015-10-25T02:55:00+01:00")), isDate(time("2015-10-25T02:40:00+01:00"), tz));
|
||||
assertThat(rounding.round(time("2015-10-25T03:15:00+01:00")), isDate(time("2015-10-25T03:00:00+01:00"), tz));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -283,12 +256,13 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
*/
|
||||
public void testTimeIntervalCET_DST_Start() {
|
||||
long interval = TimeUnit.MINUTES.toMillis(20);
|
||||
TimeZoneRounding rounding = new TimeIntervalRounding(interval, DateTimeZone.forID("CET"));
|
||||
DateTimeZone tz = DateTimeZone.forID("CET");
|
||||
TimeZoneRounding rounding = new TimeIntervalRounding(interval, tz);
|
||||
// test DST start
|
||||
assertThat(rounding.round(time("2016-03-27T01:55:00+01:00")), equalTo(time("2016-03-27T01:40:00+01:00")));
|
||||
assertThat(rounding.round(time("2016-03-27T02:00:00+01:00")), equalTo(time("2016-03-27T03:00:00+02:00")));
|
||||
assertThat(rounding.round(time("2016-03-27T03:15:00+02:00")), equalTo(time("2016-03-27T03:00:00+02:00")));
|
||||
assertThat(rounding.round(time("2016-03-27T03:35:00+02:00")), equalTo(time("2016-03-27T03:20:00+02:00")));
|
||||
assertThat(rounding.round(time("2016-03-27T01:55:00+01:00")), isDate(time("2016-03-27T01:40:00+01:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-27T02:00:00+01:00")), isDate(time("2016-03-27T03:00:00+02:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-27T03:15:00+02:00")), isDate(time("2016-03-27T03:00:00+02:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-27T03:35:00+02:00")), isDate(time("2016-03-27T03:20:00+02:00"), tz));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -299,13 +273,14 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
*/
|
||||
public void testTimeInterval_Kathmandu_DST_Start() {
|
||||
long interval = TimeUnit.MINUTES.toMillis(20);
|
||||
TimeZoneRounding rounding = new TimeIntervalRounding(interval, DateTimeZone.forID("Asia/Kathmandu"));
|
||||
assertThat(rounding.round(time("1985-12-31T23:55:00+05:30")), equalTo(time("1985-12-31T23:40:00+05:30")));
|
||||
assertThat(rounding.round(time("1986-01-01T00:16:00+05:45")), equalTo(time("1986-01-01T00:15:00+05:45")));
|
||||
DateTimeZone tz = DateTimeZone.forID("Asia/Kathmandu");
|
||||
TimeZoneRounding rounding = new TimeIntervalRounding(interval, tz);
|
||||
assertThat(rounding.round(time("1985-12-31T23:55:00+05:30")), isDate(time("1985-12-31T23:40:00+05:30"), tz));
|
||||
assertThat(rounding.round(time("1986-01-01T00:16:00+05:45")), isDate(time("1986-01-01T00:15:00+05:45"), tz));
|
||||
assertThat(time("1986-01-01T00:15:00+05:45") - time("1985-12-31T23:40:00+05:30"), equalTo(TimeUnit.MINUTES.toMillis(20)));
|
||||
assertThat(rounding.round(time("1986-01-01T00:26:00+05:45")), equalTo(time("1986-01-01T00:20:00+05:45")));
|
||||
assertThat(rounding.round(time("1986-01-01T00:26:00+05:45")), isDate(time("1986-01-01T00:20:00+05:45"), tz));
|
||||
assertThat(time("1986-01-01T00:20:00+05:45") - time("1986-01-01T00:15:00+05:45"), equalTo(TimeUnit.MINUTES.toMillis(5)));
|
||||
assertThat(rounding.round(time("1986-01-01T00:46:00+05:45")), equalTo(time("1986-01-01T00:40:00+05:45")));
|
||||
assertThat(rounding.round(time("1986-01-01T00:46:00+05:45")), isDate(time("1986-01-01T00:40:00+05:45"), tz));
|
||||
assertThat(time("1986-01-01T00:40:00+05:45") - time("1986-01-01T00:20:00+05:45"), equalTo(TimeUnit.MINUTES.toMillis(20)));
|
||||
}
|
||||
|
||||
@ -319,13 +294,29 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
long interval = TimeUnit.MINUTES.toMillis(14);
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeIntervalRounding(interval, tz);
|
||||
|
||||
assertThat(rounding.round(time("2016-03-27T01:41:00+01:00")), equalTo(time("2016-03-27T01:30:00+01:00")));
|
||||
assertThat(rounding.round(time("2016-03-27T01:51:00+01:00")), equalTo(time("2016-03-27T01:44:00+01:00")));
|
||||
assertThat(rounding.round(time("2016-03-27T01:59:00+01:00")), equalTo(time("2016-03-27T01:58:00+01:00")));
|
||||
assertThat(rounding.round(time("2016-03-27T03:05:00+02:00")), equalTo(time("2016-03-27T03:00:00+02:00")));
|
||||
assertThat(rounding.round(time("2016-03-27T03:12:00+02:00")), equalTo(time("2016-03-27T03:08:00+02:00")));
|
||||
assertThat(rounding.round(time("2016-03-27T03:25:00+02:00")), equalTo(time("2016-03-27T03:22:00+02:00")));
|
||||
assertThat(rounding.round(time("2016-03-27T03:39:00+02:00")), equalTo(time("2016-03-27T03:36:00+02:00")));
|
||||
assertThat(rounding.round(time("2016-03-27T01:41:00+01:00")), isDate(time("2016-03-27T01:30:00+01:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-27T01:51:00+01:00")), isDate(time("2016-03-27T01:44:00+01:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-27T01:59:00+01:00")), isDate(time("2016-03-27T01:58:00+01:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-27T03:05:00+02:00")), isDate(time("2016-03-27T03:00:00+02:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-27T03:12:00+02:00")), isDate(time("2016-03-27T03:08:00+02:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-27T03:25:00+02:00")), isDate(time("2016-03-27T03:22:00+02:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-27T03:39:00+02:00")), isDate(time("2016-03-27T03:36:00+02:00"), tz));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test for half day rounding intervals scrossing DST.
|
||||
*/
|
||||
public void testIntervalRounding_HalfDay_DST() {
|
||||
DateTimeZone tz = DateTimeZone.forID("CET");
|
||||
long interval = TimeUnit.HOURS.toMillis(12);
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeIntervalRounding(interval, tz);
|
||||
|
||||
assertThat(rounding.round(time("2016-03-26T01:00:00+01:00")), isDate(time("2016-03-26T00:00:00+01:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-26T13:00:00+01:00")), isDate(time("2016-03-26T12:00:00+01:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-27T01:00:00+01:00")), isDate(time("2016-03-27T00:00:00+01:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-27T13:00:00+02:00")), isDate(time("2016-03-27T12:00:00+02:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-28T01:00:00+02:00")), isDate(time("2016-03-28T00:00:00+02:00"), tz));
|
||||
assertThat(rounding.round(time("2016-03-28T13:00:00+02:00")), isDate(time("2016-03-28T12:00:00+02:00"), tz));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -365,36 +356,39 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
*/
|
||||
public void testAmbiguousHoursAfterDSTSwitch() {
|
||||
Rounding tzRounding;
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(JERUSALEM_TIMEZONE).build();
|
||||
// Both timestamps "2014-10-25T22:30:00Z" and "2014-10-25T23:30:00Z" are "2014-10-26T01:30:00" in local time because
|
||||
// of DST switch between them. This test checks that they are both returned to their correct UTC time after rounding.
|
||||
assertThat(tzRounding.round(time("2014-10-25T22:30:00", DateTimeZone.UTC)), equalTo(time("2014-10-25T22:00:00", DateTimeZone.UTC)));
|
||||
assertThat(tzRounding.round(time("2014-10-25T23:30:00", DateTimeZone.UTC)), equalTo(time("2014-10-25T23:00:00", DateTimeZone.UTC)));
|
||||
final DateTimeZone tz = DateTimeZone.forID("Asia/Jerusalem");
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.HOUR_OF_DAY).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(time("2014-10-26T00:30:00+03:00")), isDate(time("2014-10-26T00:00:00+03:00"), tz));
|
||||
assertThat(tzRounding.round(time("2014-10-26T01:30:00+03:00")), isDate(time("2014-10-26T01:00:00+03:00"), tz));
|
||||
// the utc date for "2014-10-25T03:00:00+03:00" and "2014-10-25T03:00:00+02:00" is the same, local time turns back 1h here
|
||||
assertThat(time("2014-10-26T03:00:00+03:00"), isDate(time("2014-10-26T02:00:00+02:00"), tz));
|
||||
assertThat(tzRounding.round(time("2014-10-26T01:30:00+02:00")), isDate(time("2014-10-26T01:00:00+02:00"), tz));
|
||||
assertThat(tzRounding.round(time("2014-10-26T02:30:00+02:00")), isDate(time("2014-10-26T02:00:00+02:00"), tz));
|
||||
|
||||
// Day interval
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(JERUSALEM_TIMEZONE).build();
|
||||
assertThat(tzRounding.round(time("2014-11-11T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-11-11T00:00:00", JERUSALEM_TIMEZONE)));
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.DAY_OF_MONTH).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)), isDate(time("2014-11-11T00:00:00", tz), tz));
|
||||
// DST on
|
||||
assertThat(tzRounding.round(time("2014-08-11T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-08-11T00:00:00", JERUSALEM_TIMEZONE)));
|
||||
assertThat(tzRounding.round(time("2014-08-11T17:00:00", tz)), isDate(time("2014-08-11T00:00:00", tz), tz));
|
||||
// Day of switching DST on -> off
|
||||
assertThat(tzRounding.round(time("2014-10-26T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-10-26T00:00:00", JERUSALEM_TIMEZONE)));
|
||||
assertThat(tzRounding.round(time("2014-10-26T17:00:00", tz)), isDate(time("2014-10-26T00:00:00", tz), tz));
|
||||
// Day of switching DST off -> on
|
||||
assertThat(tzRounding.round(time("2015-03-27T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2015-03-27T00:00:00", JERUSALEM_TIMEZONE)));
|
||||
assertThat(tzRounding.round(time("2015-03-27T17:00:00", tz)), isDate(time("2015-03-27T00:00:00", tz), tz));
|
||||
|
||||
// Month interval
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.MONTH_OF_YEAR).timeZone(JERUSALEM_TIMEZONE).build();
|
||||
assertThat(tzRounding.round(time("2014-11-11T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-11-01T00:00:00", JERUSALEM_TIMEZONE)));
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.MONTH_OF_YEAR).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)), isDate(time("2014-11-01T00:00:00", tz), tz));
|
||||
// DST on
|
||||
assertThat(tzRounding.round(time("2014-10-10T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-10-01T00:00:00", JERUSALEM_TIMEZONE)));
|
||||
assertThat(tzRounding.round(time("2014-10-10T17:00:00", tz)), isDate(time("2014-10-01T00:00:00", tz), tz));
|
||||
|
||||
// Year interval
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.YEAR_OF_CENTURY).timeZone(JERUSALEM_TIMEZONE).build();
|
||||
assertThat(tzRounding.round(time("2014-11-11T17:00:00", JERUSALEM_TIMEZONE)), equalTo(time("2014-01-01T00:00:00", JERUSALEM_TIMEZONE)));
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.YEAR_OF_CENTURY).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)), isDate(time("2014-01-01T00:00:00", tz), tz));
|
||||
|
||||
// Two timestamps in same year and different timezone offset ("Double buckets" issue - #9491)
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.YEAR_OF_CENTURY).timeZone(JERUSALEM_TIMEZONE).build();
|
||||
assertThat(tzRounding.round(time("2014-11-11T17:00:00", JERUSALEM_TIMEZONE)),
|
||||
equalTo(tzRounding.round(time("2014-08-11T17:00:00", JERUSALEM_TIMEZONE))));
|
||||
tzRounding = TimeZoneRounding.builder(DateTimeUnit.YEAR_OF_CENTURY).timeZone(tz).build();
|
||||
assertThat(tzRounding.round(time("2014-11-11T17:00:00", tz)),
|
||||
isDate(tzRounding.round(time("2014-08-11T17:00:00", tz)), tz));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -417,20 +411,20 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
{
|
||||
// standard +/-1 hour DST transition, CET
|
||||
DateTimeUnit timeUnit = DateTimeUnit.HOUR_OF_DAY;
|
||||
DateTimeZone timezone = DateTimeZone.forID("CET");
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(timeUnit, timezone);
|
||||
DateTimeZone tz = DateTimeZone.forID("CET");
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(timeUnit, tz);
|
||||
|
||||
// 29 Mar 2015 - Daylight Saving Time Started
|
||||
// at 02:00:00 clocks were turned forward 1 hour to 03:00:00
|
||||
assertInterval(time("2015-03-29T00:00:00.000+01:00"), time("2015-03-29T01:00:00.000+01:00"), rounding, 60);
|
||||
assertInterval(time("2015-03-29T01:00:00.000+01:00"), time("2015-03-29T03:00:00.000+02:00"), rounding, 60);
|
||||
assertInterval(time("2015-03-29T03:00:00.000+02:00"), time("2015-03-29T04:00:00.000+02:00"), rounding, 60);
|
||||
assertInterval(time("2015-03-29T00:00:00.000+01:00"), time("2015-03-29T01:00:00.000+01:00"), rounding, 60, tz);
|
||||
assertInterval(time("2015-03-29T01:00:00.000+01:00"), time("2015-03-29T03:00:00.000+02:00"), rounding, 60, tz);
|
||||
assertInterval(time("2015-03-29T03:00:00.000+02:00"), time("2015-03-29T04:00:00.000+02:00"), rounding, 60, tz);
|
||||
|
||||
// 25 Oct 2015 - Daylight Saving Time Ended
|
||||
// at 03:00:00 clocks were turned backward 1 hour to 02:00:00
|
||||
assertInterval(time("2015-10-25T01:00:00.000+02:00"), time("2015-10-25T02:00:00.000+02:00"), rounding, 60);
|
||||
assertInterval(time("2015-10-25T02:00:00.000+02:00"), time("2015-10-25T02:00:00.000+01:00"), rounding, 60);
|
||||
assertInterval(time("2015-10-25T02:00:00.000+01:00"), time("2015-10-25T03:00:00.000+01:00"), rounding, 60);
|
||||
assertInterval(time("2015-10-25T01:00:00.000+02:00"), time("2015-10-25T02:00:00.000+02:00"), rounding, 60, tz);
|
||||
assertInterval(time("2015-10-25T02:00:00.000+02:00"), time("2015-10-25T02:00:00.000+01:00"), rounding, 60, tz);
|
||||
assertInterval(time("2015-10-25T02:00:00.000+01:00"), time("2015-10-25T03:00:00.000+01:00"), rounding, 60, tz);
|
||||
}
|
||||
|
||||
{
|
||||
@ -441,12 +435,12 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
// the interval between is 105 minutes long because the hour after transition starts at 00:15
|
||||
// which is not a round value for hourly rounding
|
||||
DateTimeUnit timeUnit = DateTimeUnit.HOUR_OF_DAY;
|
||||
DateTimeZone timezone = DateTimeZone.forID("Asia/Kathmandu");
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(timeUnit, timezone);
|
||||
DateTimeZone tz = DateTimeZone.forID("Asia/Kathmandu");
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(timeUnit, tz);
|
||||
|
||||
assertInterval(time("1985-12-31T22:00:00.000+05:30"), time("1985-12-31T23:00:00.000+05:30"), rounding, 60);
|
||||
assertInterval(time("1985-12-31T23:00:00.000+05:30"), time("1986-01-01T01:00:00.000+05:45"), rounding, 105);
|
||||
assertInterval(time("1986-01-01T01:00:00.000+05:45"), time("1986-01-01T02:00:00.000+05:45"), rounding, 60);
|
||||
assertInterval(time("1985-12-31T22:00:00.000+05:30"), time("1985-12-31T23:00:00.000+05:30"), rounding, 60, tz);
|
||||
assertInterval(time("1985-12-31T23:00:00.000+05:30"), time("1986-01-01T01:00:00.000+05:45"), rounding, 105, tz);
|
||||
assertInterval(time("1986-01-01T01:00:00.000+05:45"), time("1986-01-01T02:00:00.000+05:45"), rounding, 60, tz);
|
||||
}
|
||||
|
||||
{
|
||||
@ -454,19 +448,19 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
// 3 Mar 1991 - Daylight Saving Time Ended
|
||||
// at 02:00:00 clocks were turned backward 0:30 hours to Sunday, 3 March 1991, 01:30:00
|
||||
DateTimeUnit timeUnit = DateTimeUnit.HOUR_OF_DAY;
|
||||
DateTimeZone timezone = DateTimeZone.forID("Australia/Lord_Howe");
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(timeUnit, timezone);
|
||||
DateTimeZone tz = DateTimeZone.forID("Australia/Lord_Howe");
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(timeUnit, tz);
|
||||
|
||||
assertInterval(time("1991-03-03T00:00:00.000+11:00"), time("1991-03-03T01:00:00.000+11:00"), rounding, 60);
|
||||
assertInterval(time("1991-03-03T01:00:00.000+11:00"), time("1991-03-03T02:00:00.000+10:30"), rounding, 90);
|
||||
assertInterval(time("1991-03-03T02:00:00.000+10:30"), time("1991-03-03T03:00:00.000+10:30"), rounding, 60);
|
||||
assertInterval(time("1991-03-03T00:00:00.000+11:00"), time("1991-03-03T01:00:00.000+11:00"), rounding, 60, tz);
|
||||
assertInterval(time("1991-03-03T01:00:00.000+11:00"), time("1991-03-03T02:00:00.000+10:30"), rounding, 90, tz);
|
||||
assertInterval(time("1991-03-03T02:00:00.000+10:30"), time("1991-03-03T03:00:00.000+10:30"), rounding, 60, tz);
|
||||
|
||||
// 27 Oct 1991 - Daylight Saving Time Started
|
||||
// at 02:00:00 clocks were turned forward 0:30 hours to 02:30:00
|
||||
assertInterval(time("1991-10-27T00:00:00.000+10:30"), time("1991-10-27T01:00:00.000+10:30"), rounding, 60);
|
||||
assertInterval(time("1991-10-27T00:00:00.000+10:30"), time("1991-10-27T01:00:00.000+10:30"), rounding, 60, tz);
|
||||
// the interval containing the switch time is 90 minutes long
|
||||
assertInterval(time("1991-10-27T01:00:00.000+10:30"), time("1991-10-27T03:00:00.000+11:00"), rounding, 90);
|
||||
assertInterval(time("1991-10-27T03:00:00.000+11:00"), time("1991-10-27T04:00:00.000+11:00"), rounding, 60);
|
||||
assertInterval(time("1991-10-27T01:00:00.000+10:30"), time("1991-10-27T03:00:00.000+11:00"), rounding, 90, tz);
|
||||
assertInterval(time("1991-10-27T03:00:00.000+11:00"), time("1991-10-27T04:00:00.000+11:00"), rounding, 60, tz);
|
||||
}
|
||||
|
||||
{
|
||||
@ -474,24 +468,25 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
// 5 Apr 2015 - Daylight Saving Time Ended
|
||||
// at 03:45:00 clocks were turned backward 1 hour to 02:45:00
|
||||
DateTimeUnit timeUnit = DateTimeUnit.HOUR_OF_DAY;
|
||||
DateTimeZone timezone = DateTimeZone.forID("Pacific/Chatham");
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(timeUnit, timezone);
|
||||
DateTimeZone tz = DateTimeZone.forID("Pacific/Chatham");
|
||||
TimeZoneRounding rounding = new TimeZoneRounding.TimeUnitRounding(timeUnit, tz);
|
||||
|
||||
assertInterval(time("2015-04-05T02:00:00.000+13:45"), time("2015-04-05T03:00:00.000+13:45"), rounding, 60);
|
||||
assertInterval(time("2015-04-05T03:00:00.000+13:45"), time("2015-04-05T03:00:00.000+12:45"), rounding, 60);
|
||||
assertInterval(time("2015-04-05T03:00:00.000+12:45"), time("2015-04-05T04:00:00.000+12:45"), rounding, 60);
|
||||
assertInterval(time("2015-04-05T02:00:00.000+13:45"), time("2015-04-05T03:00:00.000+13:45"), rounding, 60, tz);
|
||||
assertInterval(time("2015-04-05T03:00:00.000+13:45"), time("2015-04-05T03:00:00.000+12:45"), rounding, 60, tz);
|
||||
assertInterval(time("2015-04-05T03:00:00.000+12:45"), time("2015-04-05T04:00:00.000+12:45"), rounding, 60, tz);
|
||||
|
||||
// 27 Sep 2015 - Daylight Saving Time Started
|
||||
// at 02:45:00 clocks were turned forward 1 hour to 03:45:00
|
||||
|
||||
assertInterval(time("2015-09-27T01:00:00.000+12:45"), time("2015-09-27T02:00:00.000+12:45"), rounding, 60);
|
||||
assertInterval(time("2015-09-27T02:00:00.000+12:45"), time("2015-09-27T04:00:00.000+13:45"), rounding, 60);
|
||||
assertInterval(time("2015-09-27T04:00:00.000+13:45"), time("2015-09-27T05:00:00.000+13:45"), rounding, 60);
|
||||
assertInterval(time("2015-09-27T01:00:00.000+12:45"), time("2015-09-27T02:00:00.000+12:45"), rounding, 60, tz);
|
||||
assertInterval(time("2015-09-27T02:00:00.000+12:45"), time("2015-09-27T04:00:00.000+13:45"), rounding, 60, tz);
|
||||
assertInterval(time("2015-09-27T04:00:00.000+13:45"), time("2015-09-27T05:00:00.000+13:45"), rounding, 60, tz);
|
||||
}
|
||||
}
|
||||
|
||||
private static void assertInterval(long rounded, long nextRoundingValue, TimeZoneRounding rounding, int minutes) {
|
||||
assertInterval(rounded, dateBetween(rounded, nextRoundingValue), nextRoundingValue, rounding);
|
||||
private static void assertInterval(long rounded, long nextRoundingValue, TimeZoneRounding rounding, int minutes,
|
||||
DateTimeZone tz) {
|
||||
assertInterval(rounded, dateBetween(rounded, nextRoundingValue), nextRoundingValue, rounding, tz);
|
||||
assertEquals(DateTimeConstants.MILLIS_PER_MINUTE * minutes, nextRoundingValue - rounded);
|
||||
}
|
||||
|
||||
@ -502,20 +497,21 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
* @param nextRoundingValue the expected upper end of the rounding interval
|
||||
* @param rounding the rounding instance
|
||||
*/
|
||||
private static void assertInterval(long rounded, long unrounded, long nextRoundingValue, TimeZoneRounding rounding) {
|
||||
private static void assertInterval(long rounded, long unrounded, long nextRoundingValue, TimeZoneRounding rounding,
|
||||
DateTimeZone tz) {
|
||||
assert rounded <= unrounded && unrounded <= nextRoundingValue;
|
||||
assertThat("rounding should be idempotent " + rounding, rounded, equalTo(rounding.round(rounded)));
|
||||
assertThat("rounding should be idempotent ", rounding.round(rounded), isDate(rounded, tz));
|
||||
assertThat("rounded value smaller or equal than unrounded" + rounding, rounded, lessThanOrEqualTo(unrounded));
|
||||
assertThat("values less than rounded should round further down" + rounding, rounding.round(rounded - 1), lessThan(rounded));
|
||||
assertThat("nextRounding value should be greater than date" + rounding, nextRoundingValue, greaterThan(unrounded));
|
||||
assertThat("nextRounding value should be a rounded date" + rounding, nextRoundingValue, equalTo(rounding.round(nextRoundingValue)));
|
||||
assertThat("values above nextRounding should round down there" + rounding, rounding.round(nextRoundingValue + 1),
|
||||
equalTo(nextRoundingValue));
|
||||
assertThat("nextRounding value should be a rounded date", rounding.round(nextRoundingValue), isDate(nextRoundingValue, tz));
|
||||
assertThat("values above nextRounding should round down there", rounding.round(nextRoundingValue + 1),
|
||||
isDate(nextRoundingValue, tz));
|
||||
|
||||
long dateBetween = dateBetween(rounded, nextRoundingValue);
|
||||
assertThat("dateBetween should round down to roundedDate" + rounding, rounding.round(dateBetween), equalTo(rounded));
|
||||
assertThat("dateBetween should round up to nextRoundingValue" + rounding, rounding.nextRoundingValue(dateBetween),
|
||||
equalTo(nextRoundingValue));
|
||||
assertThat("dateBetween should round down to roundedDate", rounding.round(dateBetween), isDate(rounded, tz));
|
||||
assertThat("dateBetween should round up to nextRoundingValue", rounding.nextRoundingValue(dateBetween),
|
||||
isDate(nextRoundingValue, tz));
|
||||
}
|
||||
|
||||
private static long dateBetween(long lower, long upper) {
|
||||
@ -529,19 +525,30 @@ public class TimeZoneRoundingTests extends ESTestCase {
|
||||
return DateTimeUnit.resolve(id);
|
||||
}
|
||||
|
||||
private static String toUTCDateString(long time) {
|
||||
return new DateTime(time, DateTimeZone.UTC).toString();
|
||||
}
|
||||
|
||||
private static long utc(String time) {
|
||||
return time(time, DateTimeZone.UTC);
|
||||
}
|
||||
|
||||
private static long time(String time) {
|
||||
return ISODateTimeFormat.dateOptionalTimeParser().parseMillis(time);
|
||||
return time(time, DateTimeZone.UTC);
|
||||
}
|
||||
|
||||
private static long time(String time, DateTimeZone zone) {
|
||||
return ISODateTimeFormat.dateOptionalTimeParser().withZone(zone).parseMillis(time);
|
||||
}
|
||||
|
||||
private static Matcher<Long> isDate(final long expected, DateTimeZone tz) {
|
||||
return new TypeSafeMatcher<Long>() {
|
||||
@Override
|
||||
public boolean matchesSafely(final Long item) {
|
||||
return expected == item.longValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void describeTo(Description description) {
|
||||
description.appendText("Expected: " + new DateTime(expected, tz) + " [" + expected + "] ");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void describeMismatchSafely(final Long actual, final Description mismatchDescription) {
|
||||
mismatchDescription.appendText(" was ").appendValue(new DateTime(actual, tz) + " [" + actual + "]");
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -21,6 +21,10 @@ package org.elasticsearch.common.settings;
|
||||
|
||||
import org.elasticsearch.common.inject.ModuleTestCase;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.joda.time.MonthDay;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
@ -35,18 +39,16 @@ public class SettingsModuleTests extends ModuleTestCase {
|
||||
}
|
||||
{
|
||||
Settings settings = Settings.builder().put("cluster.routing.allocation.balance.shard", "[2.0]").build();
|
||||
SettingsModule module = new SettingsModule(settings);
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> assertInstanceBinding(module, Settings.class, (s) -> s == settings));
|
||||
() -> new SettingsModule(settings));
|
||||
assertEquals("Failed to parse value [[2.0]] for setting [cluster.routing.allocation.balance.shard]", ex.getMessage());
|
||||
}
|
||||
|
||||
{
|
||||
Settings settings = Settings.builder().put("cluster.routing.allocation.balance.shard", "[2.0]")
|
||||
.put("some.foo.bar", 1).build();
|
||||
SettingsModule module = new SettingsModule(settings);
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> assertInstanceBinding(module, Settings.class, (s) -> s == settings));
|
||||
() -> new SettingsModule(settings));
|
||||
assertEquals("Failed to parse value [[2.0]] for setting [cluster.routing.allocation.balance.shard]", ex.getMessage());
|
||||
assertEquals(1, ex.getSuppressed().length);
|
||||
assertEquals("unknown setting [some.foo.bar]", ex.getSuppressed()[0].getMessage());
|
||||
@ -55,9 +57,8 @@ public class SettingsModuleTests extends ModuleTestCase {
|
||||
{
|
||||
Settings settings = Settings.builder().put("index.codec", "default")
|
||||
.put("index.foo.bar", 1).build();
|
||||
SettingsModule module = new SettingsModule(settings);
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> assertInstanceBinding(module, Settings.class, (s) -> s == settings));
|
||||
() -> new SettingsModule(settings));
|
||||
assertEquals("node settings must not contain any index level settings", ex.getMessage());
|
||||
}
|
||||
|
||||
@ -71,16 +72,13 @@ public class SettingsModuleTests extends ModuleTestCase {
|
||||
public void testRegisterSettings() {
|
||||
{
|
||||
Settings settings = Settings.builder().put("some.custom.setting", "2.0").build();
|
||||
SettingsModule module = new SettingsModule(settings);
|
||||
module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, Property.NodeScope));
|
||||
SettingsModule module = new SettingsModule(settings, Setting.floatSetting("some.custom.setting", 1.0f, Property.NodeScope));
|
||||
assertInstanceBinding(module, Settings.class, (s) -> s == settings);
|
||||
}
|
||||
{
|
||||
Settings settings = Settings.builder().put("some.custom.setting", "false").build();
|
||||
SettingsModule module = new SettingsModule(settings);
|
||||
module.registerSetting(Setting.floatSetting("some.custom.setting", 1.0f, Property.NodeScope));
|
||||
try {
|
||||
assertInstanceBinding(module, Settings.class, (s) -> s == settings);
|
||||
new SettingsModule(settings, Setting.floatSetting("some.custom.setting", 1.0f, Property.NodeScope));
|
||||
fail();
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("Failed to parse value [false] for setting [some.custom.setting]", ex.getMessage());
|
||||
@ -96,9 +94,8 @@ public class SettingsModuleTests extends ModuleTestCase {
|
||||
}
|
||||
{
|
||||
Settings settings = Settings.builder().put("tribe.t1.cluster.routing.allocation.balance.shard", "[2.0]").build();
|
||||
SettingsModule module = new SettingsModule(settings);
|
||||
try {
|
||||
assertInstanceBinding(module, Settings.class, (s) -> s == settings);
|
||||
new SettingsModule(settings);
|
||||
fail();
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals(
|
||||
@ -116,9 +113,8 @@ public class SettingsModuleTests extends ModuleTestCase {
|
||||
}
|
||||
{
|
||||
Settings settings = Settings.builder().put("tribe.blocks.write", "BOOM").build();
|
||||
SettingsModule module = new SettingsModule(settings);
|
||||
try {
|
||||
assertInstanceBinding(module, Settings.class, (s) -> s == settings);
|
||||
new SettingsModule(settings);
|
||||
fail();
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("Failed to parse value [BOOM] cannot be parsed to boolean [ true/1/on/yes OR false/0/off/no ]",
|
||||
@ -127,9 +123,8 @@ public class SettingsModuleTests extends ModuleTestCase {
|
||||
}
|
||||
{
|
||||
Settings settings = Settings.builder().put("tribe.blocks.wtf", "BOOM").build();
|
||||
SettingsModule module = new SettingsModule(settings);
|
||||
try {
|
||||
assertInstanceBinding(module, Settings.class, (s) -> s == settings);
|
||||
new SettingsModule(settings);
|
||||
fail();
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("tribe.blocks validation failed: unknown setting [wtf]", ex.getMessage());
|
||||
@ -147,9 +142,8 @@ public class SettingsModuleTests extends ModuleTestCase {
|
||||
|
||||
{
|
||||
Settings settings = Settings.builder().put("logger._root", "BOOM").put("logger.transport", "WOW").build();
|
||||
SettingsModule module = new SettingsModule(settings);
|
||||
try {
|
||||
assertInstanceBinding(module, Settings.class, (s) -> s == settings);
|
||||
new SettingsModule(settings);
|
||||
fail();
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("No enum constant org.elasticsearch.common.logging.ESLoggerFactory.LogLevel.BOOM", ex.getMessage());
|
||||
@ -160,18 +154,17 @@ public class SettingsModuleTests extends ModuleTestCase {
|
||||
|
||||
public void testRegisterSettingsFilter() {
|
||||
Settings settings = Settings.builder().put("foo.bar", "false").put("bar.foo", false).put("bar.baz", false).build();
|
||||
SettingsModule module = new SettingsModule(settings);
|
||||
module.registerSetting(Setting.boolSetting("foo.bar", true, Property.NodeScope));
|
||||
module.registerSetting(Setting.boolSetting("bar.foo", true, Property.NodeScope, Property.Filtered));
|
||||
module.registerSetting(Setting.boolSetting("bar.baz", true, Property.NodeScope));
|
||||
|
||||
module.registerSettingsFilter("foo.*");
|
||||
try {
|
||||
module.registerSettingsFilter("bar.foo");
|
||||
new SettingsModule(settings, Arrays.asList(Setting.boolSetting("foo.bar", true, Property.NodeScope),
|
||||
Setting.boolSetting("bar.foo", true, Property.NodeScope, Property.Filtered),
|
||||
Setting.boolSetting("bar.baz", true, Property.NodeScope)), Arrays.asList("foo.*", "bar.foo"));
|
||||
fail();
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertEquals("filter [bar.foo] has already been registered", ex.getMessage());
|
||||
}
|
||||
SettingsModule module = new SettingsModule(settings, Arrays.asList(Setting.boolSetting("foo.bar", true, Property.NodeScope),
|
||||
Setting.boolSetting("bar.foo", true, Property.NodeScope, Property.Filtered),
|
||||
Setting.boolSetting("bar.baz", true, Property.NodeScope)), Arrays.asList("foo.*"));
|
||||
assertInstanceBinding(module, Settings.class, (s) -> s == settings);
|
||||
assertInstanceBinding(module, SettingsFilter.class, (s) -> s.filter(settings).getAsMap().size() == 1);
|
||||
assertInstanceBinding(module, SettingsFilter.class, (s) -> s.filter(settings).getAsMap().containsKey("bar.baz"));
|
||||
@ -180,29 +173,30 @@ public class SettingsModuleTests extends ModuleTestCase {
|
||||
}
|
||||
|
||||
public void testMutuallyExclusiveScopes() {
|
||||
new SettingsModule(Settings.EMPTY).registerSetting(Setting.simpleString("foo.bar", Property.NodeScope));
|
||||
new SettingsModule(Settings.EMPTY).registerSetting(Setting.simpleString("foo.bar", Property.IndexScope));
|
||||
new SettingsModule(Settings.EMPTY, Setting.simpleString("foo.bar", Property.NodeScope));
|
||||
new SettingsModule(Settings.EMPTY, Setting.simpleString("index.foo.bar", Property.IndexScope));
|
||||
|
||||
// Those should fail
|
||||
try {
|
||||
new SettingsModule(Settings.EMPTY).registerSetting(Setting.simpleString("foo.bar"));
|
||||
new SettingsModule(Settings.EMPTY, Setting.simpleString("foo.bar"));
|
||||
fail("No scope should fail");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("No scope found for setting"));
|
||||
}
|
||||
// Some settings have both scopes - that's fine too if they have per-node defaults
|
||||
SettingsModule module = new SettingsModule(Settings.EMPTY);
|
||||
module.registerSetting(Setting.simpleString("foo.bar", Property.IndexScope, Property.NodeScope));
|
||||
|
||||
try {
|
||||
module.registerSetting(Setting.simpleString("foo.bar", Property.NodeScope));
|
||||
new SettingsModule(Settings.EMPTY,
|
||||
Setting.simpleString("foo.bar", Property.IndexScope, Property.NodeScope),
|
||||
Setting.simpleString("foo.bar", Property.NodeScope));
|
||||
fail("already registered");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("Cannot register setting [foo.bar] twice"));
|
||||
}
|
||||
|
||||
try {
|
||||
module.registerSetting(Setting.simpleString("foo.bar", Property.IndexScope));
|
||||
new SettingsModule(Settings.EMPTY,
|
||||
Setting.simpleString("foo.bar", Property.IndexScope, Property.NodeScope),
|
||||
Setting.simpleString("foo.bar", Property.IndexScope));
|
||||
fail("already registered");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("Cannot register setting [foo.bar] twice"));
|
||||
@ -211,10 +205,28 @@ public class SettingsModuleTests extends ModuleTestCase {
|
||||
|
||||
public void testOldMaxClauseCountSetting() {
|
||||
Settings settings = Settings.builder().put("index.query.bool.max_clause_count", 1024).build();
|
||||
SettingsModule module = new SettingsModule(settings);
|
||||
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
|
||||
() -> assertInstanceBinding(module, Settings.class, (s) -> s == settings));
|
||||
() -> new SettingsModule(settings));
|
||||
assertEquals("unknown setting [index.query.bool.max_clause_count] did you mean [indices.query.bool.max_clause_count]?",
|
||||
ex.getMessage());
|
||||
}
|
||||
|
||||
public void testRegisterShared() {
|
||||
Property scope = randomFrom(Property.NodeScope, Property.IndexScope);
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
new SettingsModule(Settings.EMPTY,
|
||||
Setting.simpleString("index.foo.bar", scope), Setting.simpleString("index.foo.bar", scope))
|
||||
);
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
new SettingsModule(Settings.EMPTY,
|
||||
Setting.simpleString("index.foo.bar", scope, Property.Shared), Setting.simpleString("index.foo.bar", scope))
|
||||
);
|
||||
expectThrows(IllegalArgumentException.class, () ->
|
||||
new SettingsModule(Settings.EMPTY,
|
||||
Setting.simpleString("index.foo.bar", scope), Setting.simpleString("index.foo.bar", scope, Property.Shared))
|
||||
);
|
||||
new SettingsModule(Settings.EMPTY,
|
||||
Setting.simpleString("index.foo.bar", scope, Property.Shared),
|
||||
Setting.simpleString("index.foo.bar", scope, Property.Shared));
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.fieldstats;
|
||||
|
||||
import org.apache.lucene.document.HalfFloatPoint;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.fieldstats.FieldStats;
|
||||
@ -54,6 +55,7 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||
"string", "type=text",
|
||||
"date", "type=date",
|
||||
"double", "type=double",
|
||||
"half_float", "type=half_float",
|
||||
"float", "type=float",
|
||||
"long", "type=long",
|
||||
"integer", "type=integer",
|
||||
@ -67,6 +69,7 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||
"string", "type=text,index=false",
|
||||
"date", "type=date,index=false",
|
||||
"double", "type=double,index=false",
|
||||
"half_float", "type=half_float",
|
||||
"float", "type=float,index=false",
|
||||
"long", "type=long,index=false",
|
||||
"integer", "type=integer,index=false",
|
||||
@ -81,6 +84,7 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||
"string", "type=text,index=false",
|
||||
"date", "type=date,index=false",
|
||||
"double", "type=double,index=false",
|
||||
"half_float", "type=half_float",
|
||||
"float", "type=float,index=false",
|
||||
"long", "type=long,index=false",
|
||||
"integer", "type=integer,index=false",
|
||||
@ -97,10 +101,12 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||
long maxInt = Integer.MIN_VALUE;
|
||||
long minLong = Long.MAX_VALUE;
|
||||
long maxLong = Long.MIN_VALUE;
|
||||
double minFloat = Float.MAX_VALUE;
|
||||
double maxFloat = Float.MIN_VALUE;
|
||||
double minDouble = Double.MAX_VALUE;
|
||||
double maxDouble = Double.MIN_VALUE;
|
||||
double minHalfFloat = Double.POSITIVE_INFINITY;
|
||||
double maxHalfFloat = Double.NEGATIVE_INFINITY;
|
||||
double minFloat = Double.POSITIVE_INFINITY;
|
||||
double maxFloat = Double.NEGATIVE_INFINITY;
|
||||
double minDouble = Double.POSITIVE_INFINITY;
|
||||
double maxDouble = Double.NEGATIVE_INFINITY;
|
||||
String minString = new String(Character.toChars(1114111));
|
||||
String maxString = "0";
|
||||
|
||||
@ -119,6 +125,10 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||
long l = randomLong();
|
||||
minLong = Math.min(minLong, l);
|
||||
maxLong = Math.max(maxLong, l);
|
||||
float hf = randomFloat();
|
||||
hf = HalfFloatPoint.sortableShortToHalfFloat(HalfFloatPoint.halfFloatToSortableShort(hf));
|
||||
minHalfFloat = Math.min(minHalfFloat, hf);
|
||||
maxHalfFloat = Math.max(maxHalfFloat, hf);
|
||||
float f = randomFloat();
|
||||
minFloat = Math.min(minFloat, f);
|
||||
maxFloat = Math.max(maxFloat, f);
|
||||
@ -138,6 +148,7 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||
"short", s,
|
||||
"integer", i,
|
||||
"long", l,
|
||||
"half_float", hf,
|
||||
"float", f,
|
||||
"double", d,
|
||||
"string", str)
|
||||
@ -147,7 +158,7 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||
|
||||
FieldStatsResponse response = client()
|
||||
.prepareFieldStats()
|
||||
.setFields("byte", "short", "integer", "long", "float", "double", "string").get();
|
||||
.setFields("byte", "short", "integer", "long", "half_float", "float", "double", "string").get();
|
||||
assertAllSuccessful(response);
|
||||
|
||||
for (FieldStats<?> stats : response.getAllFieldStats().values()) {
|
||||
@ -164,6 +175,8 @@ public class FieldStatsIntegrationIT extends ESIntegTestCase {
|
||||
assertThat(response.getAllFieldStats().get("integer").getMaxValue(), equalTo(maxInt));
|
||||
assertThat(response.getAllFieldStats().get("long").getMinValue(), equalTo(minLong));
|
||||
assertThat(response.getAllFieldStats().get("long").getMaxValue(), equalTo(maxLong));
|
||||
assertThat(response.getAllFieldStats().get("half_float").getMinValue(), equalTo(minHalfFloat));
|
||||
assertThat(response.getAllFieldStats().get("half_float").getMaxValue(), equalTo(maxHalfFloat));
|
||||
assertThat(response.getAllFieldStats().get("float").getMinValue(), equalTo(minFloat));
|
||||
assertThat(response.getAllFieldStats().get("float").getMaxValue(), equalTo(maxFloat));
|
||||
assertThat(response.getAllFieldStats().get("double").getMinValue(), equalTo(minDouble));
|
||||
|
@ -108,6 +108,24 @@ public class FieldStatsTests extends ESSingleNodeTestCase {
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(), equalTo(Double.toString(-1)));
|
||||
}
|
||||
|
||||
public void testHalfFloat() {
|
||||
String fieldName = "field";
|
||||
createIndex("test", Settings.EMPTY, "test", fieldName, "type=half_float");
|
||||
for (float value = -1; value <= 9; value++) {
|
||||
client().prepareIndex("test", "test").setSource(fieldName, value).get();
|
||||
}
|
||||
client().admin().indices().prepareRefresh().get();
|
||||
|
||||
FieldStatsResponse result = client().prepareFieldStats().setFields(fieldName).get();
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxDoc(), equalTo(11L));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getDocCount(), equalTo(11L));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getDensity(), equalTo(100));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValue(), equalTo(-1d));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValue(), equalTo(9d));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMinValueAsString(), equalTo(Float.toString(-1)));
|
||||
assertThat(result.getAllFieldStats().get(fieldName).getMaxValueAsString(), equalTo(Float.toString(9)));
|
||||
}
|
||||
|
||||
public void testFloat() {
|
||||
String fieldName = "field";
|
||||
createIndex("test", Settings.EMPTY, "test", fieldName, "type=float");
|
||||
|
@ -39,9 +39,12 @@ import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
|
||||
/**
|
||||
* This test checks that in-flight requests are limited on HTTP level and that requests that are excluded from limiting can pass.
|
||||
*
|
||||
* As the same setting is also used to limit in-flight requests on transport level, we avoid transport messages by forcing
|
||||
* a single node "cluster". We also force test infrastructure to use the node client instead of the transport client for the same reason.
|
||||
*/
|
||||
@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numDataNodes = 1)
|
||||
@ClusterScope(scope = Scope.TEST, supportsDedicatedMasters = false, numClientNodes = 0, numDataNodes = 1, transportClientRatio = 0)
|
||||
public class NettyHttpRequestSizeLimitIT extends ESIntegTestCase {
|
||||
private static final ByteSizeValue LIMIT = new ByteSizeValue(2, ByteSizeUnit.KB);
|
||||
|
||||
@ -89,7 +92,6 @@ public class NettyHttpRequestSizeLimitIT extends ESIntegTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
@AwaitsFix(bugUrl = "muted while investigating")
|
||||
public void testDoesNotLimitExcludedRequests() throws Exception {
|
||||
ensureGreen();
|
||||
|
||||
|
@ -114,12 +114,10 @@ public class IndexModuleTests extends ESTestCase {
|
||||
ThreadPool threadPool = new TestThreadPool("test");
|
||||
CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService();
|
||||
BigArrays bigArrays = new BigArrays(settings, circuitBreakerService);
|
||||
Set<ScriptEngineService> scriptEngines = Collections.emptySet();
|
||||
scriptEngines.addAll(Arrays.asList(scriptEngineServices));
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.emptyList());
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(scriptEngineServices));
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
ScriptService scriptService = new ScriptService(settings, environment, scriptEngines, new ResourceWatcherService(settings, threadPool), scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
ScriptService scriptService = new ScriptService(settings, environment, new ResourceWatcherService(settings, threadPool), scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
IndicesQueriesRegistry indicesQueriesRegistry = new IndicesQueriesRegistry();
|
||||
ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool);
|
||||
return new NodeServicesProvider(threadPool, bigArrays, client, scriptService, indicesQueriesRegistry, circuitBreakerService, clusterService);
|
||||
|
@ -23,13 +23,14 @@ import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
@ -47,8 +48,9 @@ public class SettingsListenerIT extends ESIntegTestCase {
|
||||
private static final Setting<Integer> SETTING = Setting.intSetting("index.test.new.setting", 0,
|
||||
Property.Dynamic, Property.IndexScope);
|
||||
|
||||
public void onModule(SettingsModule settingsModule) {
|
||||
settingsModule.registerSetting(SettingsTestingService.VALUE);
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Arrays.asList(SettingsTestingService.VALUE);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -30,6 +30,7 @@ import org.elasticsearch.action.update.UpdateResponse;
|
||||
import org.elasticsearch.common.network.NetworkModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.NativeScriptFactory;
|
||||
@ -40,6 +41,8 @@ import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
@ -171,9 +174,10 @@ public class WaitUntilRefreshIT extends ESIntegTestCase {
|
||||
return singleton(DeletePlzPlugin.class);
|
||||
}
|
||||
|
||||
public static class DeletePlzPlugin extends Plugin {
|
||||
public void onModule(ScriptModule scriptModule) {
|
||||
scriptModule.registerScript("delete_plz", DeletePlzFactory.class);
|
||||
public static class DeletePlzPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public List<NativeScriptFactory> getNativeScripts() {
|
||||
return Collections.singletonList(new DeletePlzFactory());
|
||||
}
|
||||
}
|
||||
|
||||
@ -203,5 +207,10 @@ public class WaitUntilRefreshIT extends ESIntegTestCase {
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "delete_plz";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,82 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.fielddata.plain;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.LeafReader;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.elasticsearch.index.fielddata.FieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class HalfFloatFielddataTests extends ESTestCase {
|
||||
|
||||
public void testSingleValued() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
// we need the default codec to check for singletons
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null).setCodec(TestUtil.getDefaultCodec()));
|
||||
Document doc = new Document();
|
||||
for (IndexableField f : NumberFieldMapper.NumberType.HALF_FLOAT.createFields("half_float", 3f, false, true, false)) {
|
||||
doc.add(f);
|
||||
}
|
||||
w.addDocument(doc);
|
||||
final DirectoryReader dirReader = DirectoryReader.open(w);
|
||||
LeafReader reader = getOnlyLeafReader(dirReader);
|
||||
SortedNumericDoubleValues values = new SortedNumericDVIndexFieldData.SortedNumericHalfFloatFieldData(
|
||||
reader, "half_float").getDoubleValues();
|
||||
assertNotNull(FieldData.unwrapSingleton(values));
|
||||
values.setDocument(0);
|
||||
assertEquals(1, values.count());
|
||||
assertEquals(3f, values.valueAt(0), 0f);
|
||||
IOUtils.close(dirReader, w, dir);
|
||||
}
|
||||
|
||||
public void testMultiValued() throws IOException {
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
|
||||
Document doc = new Document();
|
||||
for (IndexableField f : NumberFieldMapper.NumberType.HALF_FLOAT.createFields("half_float", 3f, false, true, false)) {
|
||||
doc.add(f);
|
||||
}
|
||||
for (IndexableField f : NumberFieldMapper.NumberType.HALF_FLOAT.createFields("half_float", 2f, false, true, false)) {
|
||||
doc.add(f);
|
||||
}
|
||||
w.addDocument(doc);
|
||||
final DirectoryReader dirReader = DirectoryReader.open(w);
|
||||
LeafReader reader = getOnlyLeafReader(dirReader);
|
||||
SortedNumericDoubleValues values = new SortedNumericDVIndexFieldData.SortedNumericHalfFloatFieldData(
|
||||
reader, "half_float").getDoubleValues();
|
||||
assertNull(FieldData.unwrapSingleton(values));
|
||||
values.setDocument(0);
|
||||
assertEquals(2, values.count());
|
||||
assertEquals(2f, values.valueAt(0), 0f);
|
||||
assertEquals(3f, values.valueAt(1), 0f);
|
||||
IOUtils.close(dirReader, w, dir);
|
||||
}
|
||||
}
|
@ -21,8 +21,18 @@ package org.elasticsearch.index.mapper.core;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.FloatPoint;
|
||||
import org.apache.lucene.document.HalfFloatPoint;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.IndexWriterConfig;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.index.mapper.FieldTypeTestCase;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType.Relation;
|
||||
@ -81,6 +91,7 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
|
||||
assertEquals((short) 3, NumberType.SHORT.parse(3d));
|
||||
assertEquals(3, NumberType.INTEGER.parse(3d));
|
||||
assertEquals(3L, NumberType.LONG.parse(3d));
|
||||
assertEquals(3f, NumberType.HALF_FLOAT.parse(3d));
|
||||
assertEquals(3f, NumberType.FLOAT.parse(3d));
|
||||
assertEquals(3d, NumberType.DOUBLE.parse(3d));
|
||||
|
||||
@ -103,7 +114,39 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
|
||||
assertEquals("Value [2147483648] is out of range for an integer", e.getMessage());
|
||||
e = expectThrows(IllegalArgumentException.class, () -> NumberType.LONG.parse(10000000000000000000d));
|
||||
assertEquals("Value [1.0E19] is out of range for a long", e.getMessage());
|
||||
assertEquals(1.1f, NumberType.FLOAT.parse(1.1)); // accuracy loss is expected
|
||||
assertEquals(1.1f, NumberType.HALF_FLOAT.parse(1.1));
|
||||
assertEquals(1.1f, NumberType.FLOAT.parse(1.1));
|
||||
assertEquals(1.1d, NumberType.DOUBLE.parse(1.1));
|
||||
}
|
||||
|
||||
public void testHalfFloatRange() throws IOException {
|
||||
// make sure the accuracy loss of half floats only occurs at index time
|
||||
// this test checks that searching half floats yields the same results as
|
||||
// searching floats that are rounded to the closest half float
|
||||
Directory dir = newDirectory();
|
||||
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null));
|
||||
final int numDocs = 10000;
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
Document doc = new Document();
|
||||
float value = (randomFloat() * 2 - 1) * 70000;
|
||||
float rounded = HalfFloatPoint.sortableShortToHalfFloat(HalfFloatPoint.halfFloatToSortableShort(value));
|
||||
doc.add(new HalfFloatPoint("half_float", value));
|
||||
doc.add(new FloatPoint("float", rounded));
|
||||
w.addDocument(doc);
|
||||
}
|
||||
final DirectoryReader reader = DirectoryReader.open(w);
|
||||
w.close();
|
||||
IndexSearcher searcher = newSearcher(reader);
|
||||
final int numQueries = 1000;
|
||||
for (int i = 0; i < numQueries; ++i) {
|
||||
float l = (randomFloat() * 2 - 1) * 70000;
|
||||
float u = (randomFloat() * 2 - 1) * 70000;
|
||||
boolean includeLower = randomBoolean();
|
||||
boolean includeUpper = randomBoolean();
|
||||
Query floatQ = NumberFieldMapper.NumberType.FLOAT.rangeQuery("float", l, u, includeLower, includeUpper);
|
||||
Query halfFloatQ = NumberFieldMapper.NumberType.HALF_FLOAT.rangeQuery("half_float", l, u, includeLower, includeUpper);
|
||||
assertEquals(searcher.count(floatQ), searcher.count(halfFloatQ));
|
||||
}
|
||||
IOUtils.close(reader, dir);
|
||||
}
|
||||
}
|
||||
|
@ -50,7 +50,9 @@ import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
||||
@ -597,10 +599,9 @@ public class IndicesOptionsIntegrationIT extends ESIntegTestCase {
|
||||
new Setting<>("index.e", "", Function.identity(), Property.IndexScope);
|
||||
|
||||
|
||||
public void onModule(SettingsModule module) {
|
||||
module.registerSetting(INDEX_A);
|
||||
module.registerSetting(INDEX_C);
|
||||
module.registerSetting(INDEX_E);
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Arrays.asList(INDEX_A, INDEX_C, INDEX_E);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -49,6 +49,7 @@ import org.elasticsearch.test.engine.ThrowingLeafReaderWrapper;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
@ -200,9 +201,9 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase {
|
||||
public static final Setting<Double> EXCEPTION_LOW_LEVEL_RATIO_SETTING =
|
||||
Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, Property.IndexScope);
|
||||
public static class TestPlugin extends Plugin {
|
||||
public void onModule(SettingsModule module) {
|
||||
module.registerSetting(EXCEPTION_TOP_LEVEL_RATIO_SETTING);
|
||||
module.registerSetting(EXCEPTION_LOW_LEVEL_RATIO_SETTING);
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Arrays.asList(EXCEPTION_TOP_LEVEL_RATIO_SETTING, EXCEPTION_LOW_LEVEL_RATIO_SETTING);
|
||||
}
|
||||
|
||||
public void onModule(MockEngineFactoryPlugin.MockEngineReaderModule module) {
|
||||
|
@ -19,6 +19,11 @@
|
||||
|
||||
package org.elasticsearch.plugins;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -26,10 +31,6 @@ import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexModule;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
|
||||
public class PluginsServiceTests extends ESTestCase {
|
||||
public static class AdditionalSettingsPlugin1 extends Plugin {
|
||||
@Override
|
||||
@ -50,6 +51,8 @@ public class PluginsServiceTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public static class FilterablePlugin extends Plugin implements ScriptPlugin {}
|
||||
|
||||
public static class BrokenModule extends AbstractModule {
|
||||
|
||||
@Override
|
||||
@ -111,4 +114,15 @@ public class PluginsServiceTests extends ESTestCase {
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("Could not load plugin descriptor for existing plugin"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testFilterPlugins() {
|
||||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.put("my.setting", "test")
|
||||
.put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), IndexModule.Type.SIMPLEFS.getSettingsKey()).build();
|
||||
PluginsService service = newPluginsService(settings, AdditionalSettingsPlugin1.class, FilterablePlugin.class);
|
||||
List<ScriptPlugin> scriptPlugins = service.filterPlugins(ScriptPlugin.class);
|
||||
assertEquals(1, scriptPlugins.size());
|
||||
assertEquals(FilterablePlugin.class, scriptPlugins.get(0).getClass());
|
||||
}
|
||||
}
|
||||
|
@ -44,11 +44,10 @@ public class FileScriptTests extends ESTestCase {
|
||||
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false)
|
||||
.put(settings)
|
||||
.build();
|
||||
Set<ScriptEngineService> engines = new HashSet<>(Collections.singletonList(new MockScriptEngine()));
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, true)));
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singleton(new MockScriptEngine()));
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
return new ScriptService(settings, new Environment(settings), engines, null, scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
return new ScriptService(settings, new Environment(settings), null, scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
}
|
||||
|
||||
public void testFileScriptFound() throws Exception {
|
||||
|
@ -24,20 +24,22 @@ import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.EnvironmentModule;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||
import org.elasticsearch.watcher.ResourceWatcherService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
@ -51,14 +53,14 @@ public class NativeScriptTests extends ESTestCase {
|
||||
.put("node.name", "testNativeScript")
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
.build();
|
||||
SettingsModule settingsModule = new SettingsModule(settings);
|
||||
ScriptModule scriptModule = new ScriptModule();
|
||||
scriptModule.prepareSettings(settingsModule);
|
||||
scriptModule.registerScript("my", MyNativeScriptFactory.class);
|
||||
ScriptModule scriptModule = new ScriptModule(new NativeScriptEngineService(settings,
|
||||
Collections.singletonMap("my", new MyNativeScriptFactory())));
|
||||
List<Setting<?>> scriptSettings = scriptModule.getSettings();
|
||||
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
|
||||
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList());
|
||||
final ThreadPool threadPool = new ThreadPool(settings);
|
||||
Injector injector = new ModulesBuilder().add(
|
||||
new EnvironmentModule(new Environment(settings)),
|
||||
new ThreadPoolModule(threadPool),
|
||||
new EnvironmentModule(new Environment(settings), threadPool),
|
||||
new SettingsModule(settings),
|
||||
scriptModule).createInjector();
|
||||
|
||||
@ -85,11 +87,12 @@ public class NativeScriptTests extends ESTestCase {
|
||||
ResourceWatcherService resourceWatcherService = new ResourceWatcherService(settings, null);
|
||||
Map<String, NativeScriptFactory> nativeScriptFactoryMap = new HashMap<>();
|
||||
nativeScriptFactoryMap.put("my", new MyNativeScriptFactory());
|
||||
Set<ScriptEngineService> scriptEngineServices = singleton(new NativeScriptEngineService(settings, nativeScriptFactoryMap));
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, NativeScriptEngineService.NAME, true)));
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singleton(new NativeScriptEngineService(settings,
|
||||
nativeScriptFactoryMap)));
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(new ArrayList<>());
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
ScriptService scriptService = new ScriptService(settings, environment, scriptEngineServices, resourceWatcherService, scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
ScriptService scriptService = new ScriptService(settings, environment, resourceWatcherService, scriptEngineRegistry,
|
||||
scriptContextRegistry, scriptSettings);
|
||||
|
||||
for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
|
||||
assertThat(scriptService.compile(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), scriptContext,
|
||||
@ -107,6 +110,11 @@ public class NativeScriptTests extends ESTestCase {
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "my";
|
||||
}
|
||||
}
|
||||
|
||||
static class MyScript extends AbstractExecutableScript {
|
||||
|
@ -43,10 +43,7 @@ public class ScriptContextTests extends ESTestCase {
|
||||
.put("script." + PLUGIN_NAME + "_custom_globally_disabled_op", "false")
|
||||
.put("script.engine." + MockScriptEngine.NAME + ".inline." + PLUGIN_NAME + "_custom_exp_disabled_op", "false")
|
||||
.build();
|
||||
Set<ScriptEngineService> engines = new HashSet<>(Collections.singletonList(new MockScriptEngine()));
|
||||
ScriptEngineRegistry.ScriptEngineRegistration registration =
|
||||
new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class, MockScriptEngine.NAME, true);
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(registration));
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new MockScriptEngine()));
|
||||
List<ScriptContext.Plugin> customContexts = Arrays.asList(
|
||||
new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"),
|
||||
new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"),
|
||||
@ -54,7 +51,7 @@ public class ScriptContextTests extends ESTestCase {
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts);
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
|
||||
return new ScriptService(settings, new Environment(settings), engines, null, scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
return new ScriptService(settings, new Environment(settings), null, scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
}
|
||||
|
||||
public void testCustomGlobalScriptContextSettings() throws Exception {
|
||||
|
@ -23,13 +23,16 @@ import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
@ -85,6 +88,11 @@ public class ScriptFieldIT extends ESIntegTestCase {
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "int";
|
||||
}
|
||||
}
|
||||
|
||||
static class IntScript extends AbstractSearchScript {
|
||||
@ -104,6 +112,11 @@ public class ScriptFieldIT extends ESIntegTestCase {
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "long";
|
||||
}
|
||||
}
|
||||
|
||||
static class LongScript extends AbstractSearchScript {
|
||||
@ -123,6 +136,11 @@ public class ScriptFieldIT extends ESIntegTestCase {
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "float";
|
||||
}
|
||||
}
|
||||
|
||||
static class FloatScript extends AbstractSearchScript {
|
||||
@ -142,6 +160,11 @@ public class ScriptFieldIT extends ESIntegTestCase {
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "double";
|
||||
}
|
||||
}
|
||||
|
||||
static class DoubleScript extends AbstractSearchScript {
|
||||
@ -151,14 +174,11 @@ public class ScriptFieldIT extends ESIntegTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public static class CustomScriptPlugin extends Plugin {
|
||||
|
||||
public void onModule(ScriptModule scriptModule) {
|
||||
scriptModule.registerScript("int", IntArrayScriptFactory.class);
|
||||
scriptModule.registerScript("long", LongArrayScriptFactory.class);
|
||||
scriptModule.registerScript("float", FloatArrayScriptFactory.class);
|
||||
scriptModule.registerScript("double", DoubleArrayScriptFactory.class);
|
||||
public static class CustomScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public List<NativeScriptFactory> getNativeScripts() {
|
||||
return Arrays.asList(new IntArrayScriptFactory(), new LongArrayScriptFactory(), new FloatArrayScriptFactory(),
|
||||
new DoubleArrayScriptFactory());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -27,16 +27,13 @@ import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static java.util.Collections.unmodifiableMap;
|
||||
import static java.util.Collections.unmodifiableSet;
|
||||
import static org.elasticsearch.common.util.set.Sets.newHashSet;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.notNullValue;
|
||||
@ -71,9 +68,7 @@ public class ScriptModesTests extends ESTestCase {
|
||||
//add the native engine just to make sure it gets filtered out
|
||||
new NativeScriptEngineService(Settings.EMPTY, Collections.<String, NativeScriptFactory>emptyMap()),
|
||||
new CustomScriptEngineService()));
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(
|
||||
new ScriptEngineRegistry.ScriptEngineRegistration(NativeScriptEngineService.class, NativeScriptEngineService.NAME),
|
||||
new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME)));
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(scriptEngines.values());
|
||||
scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
checkedSettings = new HashSet<>();
|
||||
assertAllSettingsWereChecked = true;
|
||||
|
@ -41,14 +41,11 @@ import org.junit.Before;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
@ -61,7 +58,6 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
private ResourceWatcherService resourceWatcherService;
|
||||
private ScriptEngineService scriptEngineService;
|
||||
private ScriptEngineService dangerousScriptEngineService;
|
||||
private Set<ScriptEngineService> services;
|
||||
private Map<String, ScriptEngineService> scriptEnginesByLangMap;
|
||||
private ScriptEngineRegistry scriptEngineRegistry;
|
||||
private ScriptContextRegistry scriptContextRegistry;
|
||||
@ -89,9 +85,6 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
resourceWatcherService = new ResourceWatcherService(baseSettings, null);
|
||||
scriptEngineService = new TestEngineService();
|
||||
dangerousScriptEngineService = new TestDangerousEngineService();
|
||||
services = new HashSet<>(2);
|
||||
services.add(scriptEngineService);
|
||||
services.add(dangerousScriptEngineService);
|
||||
scriptEnginesByLangMap = ScriptModesTests.buildScriptEnginesByLangMap(Collections.singleton(scriptEngineService));
|
||||
//randomly register custom script contexts
|
||||
int randomInt = randomIntBetween(0, 3);
|
||||
@ -109,10 +102,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
String context = plugin + "_" + operation;
|
||||
contexts.put(context, new ScriptContext.Plugin(plugin, operation));
|
||||
}
|
||||
List<ScriptEngineRegistry.ScriptEngineRegistration> registries = new ArrayList<>(2);
|
||||
registries.add(new ScriptEngineRegistry.ScriptEngineRegistration(TestEngineService.class, TestEngineService.NAME, true));
|
||||
registries.add(new ScriptEngineRegistry.ScriptEngineRegistration(TestDangerousEngineService.class, TestDangerousEngineService.NAME));
|
||||
scriptEngineRegistry = new ScriptEngineRegistry(registries);
|
||||
scriptEngineRegistry = new ScriptEngineRegistry(Arrays.asList(scriptEngineService, dangerousScriptEngineService));
|
||||
scriptContextRegistry = new ScriptContextRegistry(contexts.values());
|
||||
scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
scriptContexts = scriptContextRegistry.scriptContexts().toArray(new ScriptContext[scriptContextRegistry.scriptContexts().size()]);
|
||||
@ -124,7 +114,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
private void buildScriptService(Settings additionalSettings) throws IOException {
|
||||
Settings finalSettings = Settings.builder().put(baseSettings).put(additionalSettings).build();
|
||||
Environment environment = new Environment(finalSettings);
|
||||
scriptService = new ScriptService(finalSettings, environment, services, resourceWatcherService, scriptEngineRegistry, scriptContextRegistry, scriptSettings) {
|
||||
scriptService = new ScriptService(finalSettings, environment, resourceWatcherService, scriptEngineRegistry, scriptContextRegistry, scriptSettings) {
|
||||
@Override
|
||||
String getScriptFromClusterState(ClusterState state, String scriptLang, String id) {
|
||||
//mock the script that gets retrieved from an index
|
||||
@ -532,6 +522,11 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
// Nothing to do here
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public static class TestDangerousEngineService implements ScriptEngineService {
|
||||
|
@ -20,17 +20,13 @@
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
@ -38,7 +34,7 @@ public class ScriptSettingsTests extends ESTestCase {
|
||||
|
||||
public void testDefaultLanguageIsGroovy() {
|
||||
ScriptEngineRegistry scriptEngineRegistry =
|
||||
new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, true)));
|
||||
new ScriptEngineRegistry(Collections.singletonList(new CustomScriptEngineService()));
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
assertThat(scriptSettings.getDefaultScriptLanguageSetting().get(Settings.EMPTY), equalTo("groovy"));
|
||||
@ -46,7 +42,7 @@ public class ScriptSettingsTests extends ESTestCase {
|
||||
|
||||
public void testCustomDefaultLanguage() {
|
||||
ScriptEngineRegistry scriptEngineRegistry =
|
||||
new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, true)));
|
||||
new ScriptEngineRegistry(Collections.singletonList(new CustomScriptEngineService()));
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
String defaultLanguage = CustomScriptEngineService.NAME;
|
||||
@ -56,7 +52,7 @@ public class ScriptSettingsTests extends ESTestCase {
|
||||
|
||||
public void testInvalidDefaultLanguage() {
|
||||
ScriptEngineRegistry scriptEngineRegistry =
|
||||
new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(CustomScriptEngineService.class, CustomScriptEngineService.NAME, true)));
|
||||
new ScriptEngineRegistry(Collections.singletonList(new CustomScriptEngineService()));
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
Settings settings = Settings.builder().put("script.default_lang", "C++").build();
|
||||
|
@ -23,6 +23,7 @@ import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.AbstractSearchScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.NativeScriptFactory;
|
||||
@ -33,6 +34,7 @@ import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@ -64,10 +66,10 @@ public class SearchTimeoutIT extends ESIntegTestCase {
|
||||
assertThat(searchResponse.isTimedOut(), equalTo(true));
|
||||
}
|
||||
|
||||
public static class ScriptedTimeoutPlugin extends Plugin {
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.registerScript(NativeTestScriptedTimeout.TEST_NATIVE_SCRIPT_TIMEOUT, NativeTestScriptedTimeout.Factory.class);
|
||||
public static class ScriptedTimeoutPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public List<NativeScriptFactory> getNativeScripts() {
|
||||
return Collections.singletonList(new NativeTestScriptedTimeout.Factory());
|
||||
}
|
||||
}
|
||||
|
||||
@ -86,6 +88,11 @@ public class SearchTimeoutIT extends ESIntegTestCase {
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return TEST_NATIVE_SCRIPT_TIMEOUT;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -29,9 +29,9 @@ import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
@ -46,31 +46,21 @@ import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptContextRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptSettings;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@ -115,43 +105,12 @@ public class AggregatorParsingTests extends ESTestCase {
|
||||
final ClusterService clusterService = createClusterService(threadPool);
|
||||
setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder()
|
||||
.put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
|
||||
SettingsModule settingsModule = new SettingsModule(settings);
|
||||
settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED);
|
||||
ScriptModule scriptModule = new ScriptModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
// no file watching, so we don't need a
|
||||
// ResourceWatcherService
|
||||
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build();
|
||||
MockScriptEngine mockScriptEngine = new MockScriptEngine();
|
||||
Multibinder<ScriptEngineService> multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class);
|
||||
multibinder.addBinding().toInstance(mockScriptEngine);
|
||||
Set<ScriptEngineService> engines = new HashSet<>();
|
||||
engines.add(mockScriptEngine);
|
||||
List<ScriptContext.Plugin> customContexts = new ArrayList<>();
|
||||
ScriptEngineRegistry scriptEngineRegistry =
|
||||
new ScriptEngineRegistry(Collections
|
||||
.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class,
|
||||
MockScriptEngine.NAME,
|
||||
true)));
|
||||
bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry);
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts);
|
||||
bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry);
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
bind(ScriptSettings.class).toInstance(scriptSettings);
|
||||
try {
|
||||
ScriptService scriptService = new ScriptService(settings, new Environment(settings), engines, null,
|
||||
scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
bind(ScriptService.class).toInstance(scriptService);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("error while binding ScriptService", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
scriptModule.prepareSettings(settingsModule);
|
||||
injector = new ModulesBuilder().add(new EnvironmentModule(new Environment(settings)), settingsModule,
|
||||
new ThreadPoolModule(threadPool), scriptModule, new IndicesModule() {
|
||||
ScriptModule scriptModule = newTestScriptModule();
|
||||
List<Setting<?>> scriptSettings = scriptModule.getSettings();
|
||||
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
|
||||
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList());
|
||||
injector = new ModulesBuilder().add(new EnvironmentModule(new Environment(settings), threadPool), settingsModule
|
||||
, scriptModule, new IndicesModule() {
|
||||
|
||||
@Override
|
||||
protected void configure() {
|
||||
|
@ -28,12 +28,12 @@ import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
@ -50,30 +50,20 @@ import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptContextRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptSettings;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
|
||||
import static org.elasticsearch.test.ClusterServiceUtils.setState;
|
||||
@ -129,45 +119,13 @@ public abstract class BaseAggregationTestCase<AB extends AbstractAggregationBuil
|
||||
final ClusterService clusterService = createClusterService(threadPool);
|
||||
setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder()
|
||||
.put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
|
||||
SettingsModule settingsModule = new SettingsModule(settings);
|
||||
settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED);
|
||||
ScriptModule scriptModule = new ScriptModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
// no file watching, so we don't need a
|
||||
// ResourceWatcherService
|
||||
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build();
|
||||
MockScriptEngine mockScriptEngine = new MockScriptEngine();
|
||||
Multibinder<ScriptEngineService> multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class);
|
||||
multibinder.addBinding().toInstance(mockScriptEngine);
|
||||
Set<ScriptEngineService> engines = new HashSet<>();
|
||||
engines.add(mockScriptEngine);
|
||||
List<ScriptContext.Plugin> customContexts = new ArrayList<>();
|
||||
ScriptEngineRegistry scriptEngineRegistry =
|
||||
new ScriptEngineRegistry(Collections
|
||||
.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class,
|
||||
MockScriptEngine.NAME,
|
||||
true)));
|
||||
bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry);
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts);
|
||||
bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry);
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
bind(ScriptSettings.class).toInstance(scriptSettings);
|
||||
try {
|
||||
ScriptService scriptService = new ScriptService(settings, new Environment(settings), engines, null,
|
||||
scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
bind(ScriptService.class).toInstance(scriptService);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("error while binding ScriptService", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
scriptModule.prepareSettings(settingsModule);
|
||||
ScriptModule scriptModule = newTestScriptModule();
|
||||
List<Setting<?>> scriptSettings = scriptModule.getSettings();
|
||||
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
|
||||
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList());
|
||||
injector = new ModulesBuilder().add(
|
||||
new EnvironmentModule(new Environment(settings)),
|
||||
new EnvironmentModule(new Environment(settings), threadPool),
|
||||
settingsModule,
|
||||
new ThreadPoolModule(threadPool),
|
||||
scriptModule,
|
||||
new IndicesModule() {
|
||||
|
||||
|
@ -28,12 +28,12 @@ import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
@ -44,37 +44,27 @@ import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.env.EnvironmentModule;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptContextRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptSettings;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregatorBuilder;
|
||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
|
||||
import static org.elasticsearch.test.ClusterServiceUtils.setState;
|
||||
@ -129,44 +119,13 @@ public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelin
|
||||
final ClusterService clusterService = createClusterService(threadPool);
|
||||
setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder()
|
||||
.put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
|
||||
SettingsModule settingsModule = new SettingsModule(settings);
|
||||
settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED);
|
||||
ScriptModule scriptModule = new ScriptModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
// no file watching, so we don't need a
|
||||
// ResourceWatcherService
|
||||
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build();
|
||||
MockScriptEngine mockScriptEngine = new MockScriptEngine();
|
||||
Multibinder<ScriptEngineService> multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class);
|
||||
multibinder.addBinding().toInstance(mockScriptEngine);
|
||||
Set<ScriptEngineService> engines = new HashSet<>();
|
||||
engines.add(mockScriptEngine);
|
||||
List<ScriptContext.Plugin> customContexts = new ArrayList<>();
|
||||
ScriptEngineRegistry scriptEngineRegistry =
|
||||
new ScriptEngineRegistry(Collections
|
||||
.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class,
|
||||
MockScriptEngine.NAME, true)));
|
||||
bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry);
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts);
|
||||
bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry);
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
bind(ScriptSettings.class).toInstance(scriptSettings);
|
||||
try {
|
||||
ScriptService scriptService = new ScriptService(settings, new Environment(settings), engines, null,
|
||||
scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
bind(ScriptService.class).toInstance(scriptService);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("error while binding ScriptService", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
scriptModule.prepareSettings(settingsModule);
|
||||
ScriptModule scriptModule = newTestScriptModule();
|
||||
List<Setting<?>> scriptSettings = scriptModule.getSettings();
|
||||
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
|
||||
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList());
|
||||
injector = new ModulesBuilder().add(
|
||||
new EnvironmentModule(new Environment(settings)),
|
||||
new EnvironmentModule(new Environment(settings),threadPool),
|
||||
settingsModule,
|
||||
new ThreadPoolModule(threadPool),
|
||||
scriptModule,
|
||||
new IndicesModule() {
|
||||
|
||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import org.elasticsearch.common.inject.internal.Nullable;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.AbstractSearchScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.NativeScriptFactory;
|
||||
@ -28,7 +29,9 @@ import org.elasticsearch.script.ScriptModule;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@ -39,11 +42,10 @@ public class DateScriptMocks {
|
||||
/**
|
||||
* Mock plugin for the {@link DateScriptMocks.ExtractFieldScript} and {@link DateScriptMocks.PlusOneMonthScript}
|
||||
*/
|
||||
public static class DateScriptsMockPlugin extends Plugin {
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.registerScript(ExtractFieldScript.NAME, ExtractFieldScriptFactory.class);
|
||||
module.registerScript(PlusOneMonthScript.NAME, PlusOneMonthScriptFactory.class);
|
||||
public static class DateScriptsMockPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public List<NativeScriptFactory> getNativeScripts() {
|
||||
return Arrays.asList(new ExtractFieldScriptFactory(), new PlusOneMonthScriptFactory());
|
||||
}
|
||||
}
|
||||
|
||||
@ -56,6 +58,11 @@ public class DateScriptMocks {
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return ExtractFieldScript.NAME;
|
||||
}
|
||||
}
|
||||
|
||||
public static class ExtractFieldScript extends AbstractSearchScript {
|
||||
@ -84,6 +91,11 @@ public class DateScriptMocks {
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return PlusOneMonthScript.NAME;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -25,11 +25,13 @@ import static org.hamcrest.Matchers.containsString;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.inject.internal.Nullable;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.AbstractSearchScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.NativeScriptFactory;
|
||||
@ -216,10 +218,10 @@ public class IpRangeIT extends ESIntegTestCase {
|
||||
assertThat(e.getMessage(), containsString("[ip_range] does not support scripts"));
|
||||
}
|
||||
|
||||
public static class DummyScriptPlugin extends Plugin {
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.registerScript(DummyScript.NAME, DummyScriptFactory.class);
|
||||
public static class DummyScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public List<NativeScriptFactory> getNativeScripts() {
|
||||
return Collections.singletonList(new DummyScriptFactory());
|
||||
}
|
||||
}
|
||||
|
||||
@ -233,6 +235,11 @@ public class IpRangeIT extends ESIntegTestCase {
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return DummyScript.NAME;
|
||||
}
|
||||
}
|
||||
|
||||
private static class DummyScript extends AbstractSearchScript {
|
||||
|
@ -30,6 +30,8 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.NativeScriptFactory;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
@ -52,6 +54,7 @@ import org.elasticsearch.test.search.aggregations.bucket.SharedSignificantTermsT
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
@ -164,15 +167,15 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||
}
|
||||
}
|
||||
|
||||
public static class CustomSignificanceHeuristicPlugin extends Plugin {
|
||||
public static class CustomSignificanceHeuristicPlugin extends Plugin implements ScriptPlugin {
|
||||
|
||||
public void onModule(SearchModule searchModule) {
|
||||
searchModule.registerSignificanceHeuristic(SimpleHeuristic.NAMES_FIELD, SimpleHeuristic::new, SimpleHeuristic::parse);
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.registerScript(NativeSignificanceScoreScriptNoParams.NATIVE_SIGNIFICANCE_SCORE_SCRIPT_NO_PARAMS, NativeSignificanceScoreScriptNoParams.Factory.class);
|
||||
module.registerScript(NativeSignificanceScoreScriptWithParams.NATIVE_SIGNIFICANCE_SCORE_SCRIPT_WITH_PARAMS, NativeSignificanceScoreScriptWithParams.Factory.class);
|
||||
@Override
|
||||
public List<NativeScriptFactory> getNativeScripts() {
|
||||
return Arrays.asList(new NativeSignificanceScoreScriptNoParams.Factory(), new NativeSignificanceScoreScriptWithParams.Factory());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,44 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.aggregations.bucket.terms;
|
||||
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class TermsAggregatorFactoryTests extends ESSingleNodeTestCase {
|
||||
public void testSubAggCollectMode() throws Exception {
|
||||
assertThat(TermsAggregatorFactory.subAggCollectionMode(Integer.MAX_VALUE, -1),
|
||||
equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST));
|
||||
assertThat(TermsAggregatorFactory.subAggCollectionMode(10, -1),
|
||||
equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST));
|
||||
assertThat(TermsAggregatorFactory.subAggCollectionMode(10, 5),
|
||||
equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST));
|
||||
assertThat(TermsAggregatorFactory.subAggCollectionMode(10, 10),
|
||||
equalTo(Aggregator.SubAggCollectionMode.DEPTH_FIRST));
|
||||
assertThat(TermsAggregatorFactory.subAggCollectionMode(10, 100),
|
||||
equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST));
|
||||
assertThat(TermsAggregatorFactory.subAggCollectionMode(1, 2),
|
||||
equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST));
|
||||
assertThat(TermsAggregatorFactory.subAggCollectionMode(1, 100),
|
||||
equalTo(Aggregator.SubAggCollectionMode.BREADTH_FIRST));
|
||||
}
|
||||
}
|
@ -21,14 +21,14 @@ package org.elasticsearch.search.aggregations.metrics;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.LeafSearchScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
|
||||
@ -356,11 +356,11 @@ public class AvgIT extends AbstractNumericTestCase {
|
||||
/**
|
||||
* Mock plugin for the {@link ExtractFieldScriptEngine}
|
||||
*/
|
||||
public static class ExtractFieldScriptPlugin extends Plugin {
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.NAME, true));
|
||||
public static class ExtractFieldScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new ExtractFieldScriptEngine();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@ -465,11 +465,11 @@ public class AvgIT extends AbstractNumericTestCase {
|
||||
/**
|
||||
* Mock plugin for the {@link FieldValueScriptEngine}
|
||||
*/
|
||||
public static class FieldValueScriptPlugin extends Plugin {
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.NAME, true));
|
||||
public static class FieldValueScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new FieldValueScriptEngine();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@ -567,5 +567,10 @@ public class AvgIT extends AbstractNumericTestCase {
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -21,14 +21,14 @@ package org.elasticsearch.search.aggregations.metrics;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.LeafSearchScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
|
||||
@ -44,7 +44,6 @@ import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
@ -351,11 +350,11 @@ public class SumIT extends AbstractNumericTestCase {
|
||||
/**
|
||||
* Mock plugin for the {@link ExtractFieldScriptEngine}
|
||||
*/
|
||||
public static class ExtractFieldScriptPlugin extends Plugin {
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractFieldScriptEngine.class, ExtractFieldScriptEngine.NAME, true));
|
||||
public static class ExtractFieldScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new ExtractFieldScriptEngine();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@ -457,16 +456,21 @@ public class SumIT extends AbstractNumericTestCase {
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock plugin for the {@link FieldValueScriptEngine}
|
||||
*/
|
||||
public static class FieldValueScriptPlugin extends Plugin {
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.NAME, true));
|
||||
public static class FieldValueScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new FieldValueScriptEngine();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@ -572,5 +576,10 @@ public class SumIT extends AbstractNumericTestCase {
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -21,7 +21,9 @@ package org.elasticsearch.search.aggregations.metrics;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.LeafSearchScript;
|
||||
@ -210,11 +212,11 @@ public class ValueCountIT extends ESIntegTestCase {
|
||||
/**
|
||||
* Mock plugin for the {@link FieldValueScriptEngine}
|
||||
*/
|
||||
public static class FieldValueScriptPlugin extends Plugin {
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldValueScriptEngine.class, FieldValueScriptEngine.NAME, true));
|
||||
public static class FieldValueScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new FieldValueScriptEngine();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
@ -319,5 +321,10 @@ public class ValueCountIT extends ESIntegTestCase {
|
||||
@Override
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +44,9 @@ import org.elasticsearch.test.engine.MockEngineSupport;
|
||||
import org.elasticsearch.test.engine.ThrowingLeafReaderWrapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
@ -156,9 +158,9 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase {
|
||||
Setting.doubleSetting(EXCEPTION_TOP_LEVEL_RATIO_KEY, 0.1d, 0.0d, Property.IndexScope);
|
||||
public static final Setting<Double> EXCEPTION_LOW_LEVEL_RATIO_SETTING =
|
||||
Setting.doubleSetting(EXCEPTION_LOW_LEVEL_RATIO_KEY, 0.1d, 0.0d, Property.IndexScope);
|
||||
public void onModule(SettingsModule module) {
|
||||
module.registerSetting(EXCEPTION_TOP_LEVEL_RATIO_SETTING);
|
||||
module.registerSetting(EXCEPTION_LOW_LEVEL_RATIO_SETTING);
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Arrays.asList(EXCEPTION_TOP_LEVEL_RATIO_SETTING, EXCEPTION_LOW_LEVEL_RATIO_SETTING);
|
||||
}
|
||||
public void onModule(MockEngineFactoryPlugin.MockEngineReaderModule module) {
|
||||
module.setReaderClass(RandomExceptionDirectoryReaderWrapper.class);
|
||||
|
@ -30,12 +30,12 @@ import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
@ -54,15 +54,9 @@ import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.script.MockScriptEngine;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptContextRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptSettings;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
@ -85,16 +79,13 @@ import org.elasticsearch.test.IndexSettingsModule;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.test.ClusterServiceUtils.createClusterService;
|
||||
@ -136,43 +127,12 @@ public class SearchSourceBuilderTests extends ESTestCase {
|
||||
final ClusterService clusterService = createClusterService(threadPool);
|
||||
setState(clusterService, new ClusterState.Builder(clusterService.state()).metaData(new MetaData.Builder()
|
||||
.put(new IndexMetaData.Builder(index.getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0))));
|
||||
SettingsModule settingsModule = new SettingsModule(settings);
|
||||
settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED);
|
||||
ScriptModule scriptModule = new ScriptModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
|
||||
// no file watching, so we don't need a
|
||||
// ResourceWatcherService
|
||||
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build();
|
||||
MockScriptEngine mockScriptEngine = new MockScriptEngine();
|
||||
Multibinder<ScriptEngineService> multibinder = Multibinder.newSetBinder(binder(), ScriptEngineService.class);
|
||||
multibinder.addBinding().toInstance(mockScriptEngine);
|
||||
Set<ScriptEngineService> engines = new HashSet<>();
|
||||
engines.add(mockScriptEngine);
|
||||
List<ScriptContext.Plugin> customContexts = new ArrayList<>();
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections
|
||||
.singletonList(new ScriptEngineRegistry.ScriptEngineRegistration(MockScriptEngine.class,
|
||||
MockScriptEngine.NAME,
|
||||
true)));
|
||||
bind(ScriptEngineRegistry.class).toInstance(scriptEngineRegistry);
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(customContexts);
|
||||
bind(ScriptContextRegistry.class).toInstance(scriptContextRegistry);
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
bind(ScriptSettings.class).toInstance(scriptSettings);
|
||||
try {
|
||||
ScriptService scriptService = new ScriptService(settings, new Environment(settings), engines, null,
|
||||
scriptEngineRegistry, scriptContextRegistry, scriptSettings);
|
||||
bind(ScriptService.class).toInstance(scriptService);
|
||||
} catch (IOException e) {
|
||||
throw new IllegalStateException("error while binding ScriptService", e);
|
||||
}
|
||||
}
|
||||
};
|
||||
scriptModule.prepareSettings(settingsModule);
|
||||
ScriptModule scriptModule = newTestScriptModule();
|
||||
List<Setting<?>> scriptSettings = scriptModule.getSettings();
|
||||
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
|
||||
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList());
|
||||
injector = new ModulesBuilder().add(
|
||||
new EnvironmentModule(new Environment(settings)), settingsModule,
|
||||
new ThreadPoolModule(threadPool),
|
||||
new EnvironmentModule(new Environment(settings), threadPool), settingsModule,
|
||||
scriptModule, new IndicesModule() {
|
||||
@Override
|
||||
protected void configure() {
|
||||
|
@ -101,6 +101,11 @@ public class ExplainableScriptIT extends ESIntegTestCase {
|
||||
public boolean needsScores() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "native_explainable_script";
|
||||
}
|
||||
}
|
||||
|
||||
static class MyScript extends AbstractDoubleSearchScript implements ExplainableSearchScript, ExecutableScript {
|
||||
|
@ -20,10 +20,16 @@
|
||||
package org.elasticsearch.search.functionscore;
|
||||
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.NativeScriptFactory;
|
||||
|
||||
public class ExplainableScriptPlugin extends Plugin {
|
||||
public void onModule(ScriptModule module) {
|
||||
module.registerScript("native_explainable_script", ExplainableScriptIT.MyNativeScriptFactory.class);
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public class ExplainableScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public List<NativeScriptFactory> getNativeScripts() {
|
||||
return Collections.singletonList(new ExplainableScriptIT.MyNativeScriptFactory());
|
||||
}
|
||||
}
|
||||
|
@ -95,10 +95,9 @@ public abstract class AbstractSortTestCase<T extends SortBuilder<T>> extends EST
|
||||
.build();
|
||||
Environment environment = new Environment(baseSettings);
|
||||
ScriptContextRegistry scriptContextRegistry = new ScriptContextRegistry(Collections.emptyList());
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new ScriptEngineRegistry
|
||||
.ScriptEngineRegistration(TestEngineService.class, TestEngineService.NAME)));
|
||||
ScriptEngineRegistry scriptEngineRegistry = new ScriptEngineRegistry(Collections.singletonList(new TestEngineService()));
|
||||
ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry);
|
||||
scriptService = new ScriptService(baseSettings, environment, Collections.singleton(new TestEngineService()),
|
||||
scriptService = new ScriptService(baseSettings, environment,
|
||||
new ResourceWatcherService(baseSettings, null), scriptEngineRegistry, scriptContextRegistry, scriptSettings) {
|
||||
@Override
|
||||
public CompiledScript compile(Script script, ScriptContext scriptContext, Map<String, String> params, ClusterState state) {
|
||||
|
@ -48,6 +48,7 @@ import java.io.UnsupportedEncodingException;
|
||||
import java.nio.file.Path;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
@ -66,9 +67,9 @@ public class MockRepository extends FsRepository {
|
||||
repositoriesModule.registerRepository("mock", MockRepository.class, BlobStoreIndexShardRepository.class);
|
||||
}
|
||||
|
||||
public void onModule(SettingsModule module) {
|
||||
module.registerSetting(USERNAME_SETTING);
|
||||
module.registerSetting(PASSWORD_SETTING);
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Arrays.asList(USERNAME_SETTING, PASSWORD_SETTING);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,6 +20,7 @@ package org.elasticsearch.update;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.AbstractExecutableScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.NativeScriptEngineService;
|
||||
@ -32,7 +33,9 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.hamcrest.Matchers.hasKey;
|
||||
@ -65,10 +68,11 @@ public class UpdateByNativeScriptIT extends ESIntegTestCase {
|
||||
assertThat(data.get("foo").toString(), is("SETVALUE"));
|
||||
}
|
||||
|
||||
public static class CustomNativeScriptFactory implements NativeScriptFactory {
|
||||
public static class TestPlugin extends Plugin {
|
||||
public void onModule(ScriptModule scriptModule) {
|
||||
scriptModule.registerScript("custom", CustomNativeScriptFactory.class);
|
||||
public static class CustomNativeScriptFactory implements NativeScriptFactory {
|
||||
public static class TestPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public List<NativeScriptFactory> getNativeScripts() {
|
||||
return Collections.singletonList(new CustomNativeScriptFactory());
|
||||
}
|
||||
}
|
||||
@Override
|
||||
@ -79,6 +83,11 @@ public class UpdateByNativeScriptIT extends ESIntegTestCase {
|
||||
public boolean needsScores() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "custom";
|
||||
}
|
||||
}
|
||||
|
||||
static class CustomScript extends AbstractExecutableScript {
|
||||
|
@ -38,12 +38,11 @@ import org.elasticsearch.index.VersionType;
|
||||
import org.elasticsearch.index.engine.DocumentMissingException;
|
||||
import org.elasticsearch.index.engine.VersionConflictEngineException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.CompiledScript;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
@ -78,11 +77,11 @@ import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class UpdateIT extends ESIntegTestCase {
|
||||
|
||||
public static class PutFieldValuesScriptPlugin extends Plugin {
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(PutFieldValuesScriptEngine.class, PutFieldValuesScriptEngine.NAME, true));
|
||||
public static class PutFieldValuesScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new PutFieldValuesScriptEngine();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class PutFieldValuesScriptEngine implements ScriptEngineService {
|
||||
@ -149,13 +148,17 @@ public class UpdateIT extends ESIntegTestCase {
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
public static class FieldIncrementScriptPlugin extends Plugin {
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(FieldIncrementScriptEngine.class, FieldIncrementScriptEngine.NAME, true));
|
||||
public static class FieldIncrementScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new FieldIncrementScriptEngine();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class FieldIncrementScriptEngine implements ScriptEngineService {
|
||||
@ -215,15 +218,20 @@ public class UpdateIT extends ESIntegTestCase {
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class ScriptedUpsertScriptPlugin extends Plugin {
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ScriptedUpsertScriptEngine.class, ScriptedUpsertScriptEngine.NAME, true));
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class ScriptedUpsertScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new ScriptedUpsertScriptEngine();
|
||||
}
|
||||
}
|
||||
|
||||
public static class ScriptedUpsertScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "scripted_upsert";
|
||||
@ -281,15 +289,20 @@ public class UpdateIT extends ESIntegTestCase {
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class ExtractContextInSourceScriptPlugin extends Plugin {
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExtractContextInSourceScriptEngine.class, ExtractContextInSourceScriptEngine.NAME, true));
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class ExtractContextInSourceScriptPlugin extends Plugin implements ScriptPlugin {
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new ExtractContextInSourceScriptEngine();
|
||||
}
|
||||
}
|
||||
|
||||
public static class ExtractContextInSourceScriptEngine implements ScriptEngineService {
|
||||
|
||||
public static final String NAME = "extract_ctx";
|
||||
@ -348,6 +361,10 @@ public class UpdateIT extends ESIntegTestCase {
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -37,7 +37,7 @@ be "two hop" operations).
|
||||
--------------------------------------------------
|
||||
// on startup
|
||||
|
||||
Client client = TransportClient.builder().build()
|
||||
TransportClient client = TransportClient.builder().build()
|
||||
.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("host1"), 9300))
|
||||
.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("host2"), 9300));
|
||||
|
||||
@ -53,7 +53,7 @@ Note that you have to set the cluster name if you use one different than
|
||||
--------------------------------------------------
|
||||
Settings settings = Settings.builder()
|
||||
.put("cluster.name", "myClusterName").build();
|
||||
Client client = TransportClient.builder().settings(settings).build();
|
||||
TransportClient client = TransportClient.builder().settings(settings).build();
|
||||
//Add transport addresses and do something with the client...
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -577,7 +577,8 @@ Deferring calculation of child aggregations
|
||||
|
||||
For fields with many unique terms and a small number of required results it can be more efficient to delay the calculation
|
||||
of child aggregations until the top parent-level aggs have been pruned. Ordinarily, all branches of the aggregation tree
|
||||
are expanded in one depth-first pass and only then any pruning occurs. In some rare scenarios this can be very wasteful and can hit memory constraints.
|
||||
are expanded in one depth-first pass and only then any pruning occurs.
|
||||
In some scenarios this can be very wasteful and can hit memory constraints.
|
||||
An example problem scenario is querying a movie database for the 10 most popular actors and their 5 most common co-stars:
|
||||
|
||||
[source,js]
|
||||
@ -602,10 +603,13 @@ An example problem scenario is querying a movie database for the 10 most popular
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Even though the number of movies may be comparatively small and we want only 50 result buckets there is a combinatorial explosion of buckets
|
||||
during calculation - a single movie will produce n² buckets where n is the number of actors. The sane option would be to first determine
|
||||
Even though the number of actors may be comparatively small and we want only 50 result buckets there is a combinatorial explosion of buckets
|
||||
during calculation - a single actor can produce n² buckets where n is the number of actors. The sane option would be to first determine
|
||||
the 10 most popular actors and only then examine the top co-stars for these 10 actors. This alternative strategy is what we call the `breadth_first` collection
|
||||
mode as opposed to the default `depth_first` mode:
|
||||
mode as opposed to the `depth_first` mode.
|
||||
|
||||
NOTE: The `breadth_first` is the default mode for fields with a cardinality bigger than the requested size or when the cardinality is unknown (numeric fields or scripts for instance).
|
||||
It is possible to override the default heuristic and to provide a collect mode directly in the request:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
@ -615,7 +619,7 @@ mode as opposed to the default `depth_first` mode:
|
||||
"terms" : {
|
||||
"field" : "actors",
|
||||
"size" : 10,
|
||||
"collect_mode" : "breadth_first"
|
||||
"collect_mode" : "breadth_first" <1>
|
||||
},
|
||||
"aggs" : {
|
||||
"costars" : {
|
||||
@ -630,12 +634,10 @@ mode as opposed to the default `depth_first` mode:
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
<1> the possible values are `breadth_first` and `depth_first`
|
||||
|
||||
When using `breadth_first` mode the set of documents that fall into the uppermost buckets are
|
||||
cached for subsequent replay so there is a memory overhead in doing this which is linear with the number of matching documents.
|
||||
In most requests the volume of buckets generated is smaller than the number of documents that fall into them so the default `depth_first`
|
||||
collection mode is normally the best bet but occasionally the `breadth_first` strategy can be significantly more efficient. Currently
|
||||
elasticsearch will always use the `depth_first` collect_mode unless explicitly instructed to use `breadth_first` as in the above example.
|
||||
Note that the `order` parameter can still be used to refer to data from a child aggregation when using the `breadth_first` setting - the parent
|
||||
aggregation understands that this child aggregation will need to be called first before any of the other child aggregations.
|
||||
|
||||
|
@ -4,12 +4,13 @@
|
||||
The following numeric types are supported:
|
||||
|
||||
[horizontal]
|
||||
`long`:: A signed 64-bit integer with a minimum value of +-2^63^+ and a maximum value of +2^63^-1+.
|
||||
`integer`:: A signed 32-bit integer with a minimum value of +-2^31^+ and a maximum value of +2^31^-1+.
|
||||
`short`:: A signed 16-bit integer with a minimum value of +-32,768+ and a maximum value of +32,767+.
|
||||
`byte`:: A signed 8-bit integer with a minimum value of +-128+ and a maximum value of +127+.
|
||||
`double`:: A double-precision 64-bit IEEE 754 floating point.
|
||||
`float`:: A single-precision 32-bit IEEE 754 floating point.
|
||||
`long`:: A signed 64-bit integer with a minimum value of +-2^63^+ and a maximum value of +2^63^-1+.
|
||||
`integer`:: A signed 32-bit integer with a minimum value of +-2^31^+ and a maximum value of +2^31^-1+.
|
||||
`short`:: A signed 16-bit integer with a minimum value of +-32,768+ and a maximum value of +32,767+.
|
||||
`byte`:: A signed 8-bit integer with a minimum value of +-128+ and a maximum value of +127+.
|
||||
`double`:: A double-precision 64-bit IEEE 754 floating point.
|
||||
`float`:: A single-precision 32-bit IEEE 754 floating point.
|
||||
`half_float`:: A half-precision 16-bit IEEE 754 floating point.
|
||||
|
||||
Below is an example of configuring a mapping with numeric fields:
|
||||
|
||||
@ -33,6 +34,34 @@ PUT my_index
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
==== Which type should I use?
|
||||
|
||||
As far as integer types (`byte`, `short`, `integer` and `long`) are concerned,
|
||||
you should pick the smallest type which is enough for your use-case. This will
|
||||
help indexing and searching be more efficient. Note however that given that
|
||||
storage is optimized based on the actual values that are stored, picking one
|
||||
type over another one will have no impact on storage requirements.
|
||||
|
||||
For floating-point types, picking the smallest type that is enough for the
|
||||
use-case will still help indexing and searching be more efficient. However,
|
||||
given that floating-point data is hard to compress, it might also have a
|
||||
significant impact on storage requirements. Here is a table that compares the
|
||||
3 floating-point types that are available in order to help make a decision.
|
||||
|
||||
[cols="<,<,<,<",options="header",]
|
||||
|=======================================================================
|
||||
|Type |Minimum value |Maximum value |Significant bits / digits
|
||||
|`double`|+2^-1074^+ |+(2-2^-52^)·2^1023^+ |+53+ / +15.95+
|
||||
|`float`|+2^-149^+ |+(2-2^-23^)·2^127^+ |+24+ / +7.22+
|
||||
|`half_float`|+2^-24^+ |+65504+ |+11+ / +3.31+
|
||||
|=======================================================================
|
||||
|
||||
When possible, it is often more efficient to store floating-point data into an
|
||||
integer using a scaling factor. For instance, it is more efficient to store
|
||||
percentages as integers between 0 and 100 than as floating-point numbers between 0
|
||||
and 1. Another example would be prices: it will be more efficient to store prices
|
||||
as a number of cents, which is an integer, than as a floating-point number.
|
||||
|
||||
[[number-params]]
|
||||
==== Parameters for numeric fields
|
||||
|
||||
|
@ -33,6 +33,8 @@ to `painless`.
|
||||
|
||||
* Shortcuts for list, map access using the dot `.` operator
|
||||
|
||||
* Native support for regular expressions with `/pattern/`, `=~`, and `==~`
|
||||
|
||||
|
||||
[[painless-examples]]
|
||||
[float]
|
||||
@ -199,6 +201,79 @@ POST hockey/player/1/_update
|
||||
----------------------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
[float]
|
||||
=== Regular expressions
|
||||
|
||||
Painless's native support for regular expressions has syntax constructs:
|
||||
|
||||
* `/pattern/`: Pattern literals create patterns. This is the only way to create
|
||||
a pattern in painless.
|
||||
* `=~`: The find operator return a `boolean`, `true` if a subsequence of the
|
||||
text matches, `false` otherwise.
|
||||
* `==~`: The match operator returns a `boolean`, `true` if the text matches,
|
||||
`false` if it doesn't.
|
||||
|
||||
Using the find operator (`=~`) you can update all hockey players with "b" in
|
||||
their last name:
|
||||
|
||||
[source,js]
|
||||
----------------------------------------------------------------
|
||||
POST hockey/player/_update_by_query
|
||||
{
|
||||
"script": {
|
||||
"lang": "painless",
|
||||
"inline": "if (ctx._source.last =~ /b/) {ctx._source.last += \"matched\"} else {ctx.op = 'noop'}"
|
||||
}
|
||||
}
|
||||
----------------------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
Using the match operator (`==~`) you can update all the hockey players who's
|
||||
names start with a consonant and end with a vowel:
|
||||
|
||||
[source,js]
|
||||
----------------------------------------------------------------
|
||||
POST hockey/player/_update_by_query
|
||||
{
|
||||
"script": {
|
||||
"lang": "painless",
|
||||
"inline": "if (ctx._source.last ==~ /[^aeiou].*[aeiou]/) {ctx._source.last += \"matched\"} else {ctx.op = 'noop'}"
|
||||
}
|
||||
}
|
||||
----------------------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
Or you can use the `Pattern.matcher` directory to get a `Matcher` instance and
|
||||
remove all of the vowels in all of their names:
|
||||
|
||||
[source,js]
|
||||
----------------------------------------------------------------
|
||||
POST hockey/player/_update_by_query
|
||||
{
|
||||
"script": {
|
||||
"lang": "painless",
|
||||
"inline": "ctx._source.last = /[aeiou]/.matcher(ctx._source.last).replaceAll('')"
|
||||
}
|
||||
}
|
||||
----------------------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
|
||||
Note: all of the `_update_by_query` examples above could really do with a
|
||||
`query` to limit the data that they pull back. While you *could* use a
|
||||
<<query-dsl-script-query>> it wouldn't be as efficient as using any other query
|
||||
because script queries aren't able to use the inverted index to limit the
|
||||
documents that they have to check.
|
||||
|
||||
The pattern syntax is just
|
||||
http://docs.oracle.com/javase/8/docs/api/java/util/regex/Pattern.html[Java regular expressions].
|
||||
We intentionally don't allow scripts to call `Pattern.compile` to get a new
|
||||
pattern on the fly because building a `Pattern` is (comparatively) slow.
|
||||
Pattern literals (`/apattern/`) have fancy constant extraction so no matter
|
||||
where they show up in the painless script they are built only when the script
|
||||
is first used. It is fairly similar to how `String` literals work in Java.
|
||||
|
||||
|
||||
[[painless-api]]
|
||||
[float]
|
||||
== Painless API
|
||||
|
@ -42,10 +42,10 @@ Note, the above includes an example of an empty header (can also be just
|
||||
without any content) which is supported as well.
|
||||
|
||||
The response returns a `responses` array, which includes the search
|
||||
response for each search request matching its order in the original
|
||||
multi search request. If there was a complete failure for that specific
|
||||
search request, an object with `error` message will be returned in place
|
||||
of the actual search response.
|
||||
response and status code for each search request matching its order in
|
||||
the original multi search request. If there was a complete failure for that
|
||||
specific search request, an object with `error` message and corresponding
|
||||
status code will be returned in place of the actual search response.
|
||||
|
||||
The endpoint allows to also search against an index/indices and
|
||||
type/types in the URI itself, in which case it will be used as the
|
||||
|
@ -19,14 +19,17 @@
|
||||
|
||||
package org.elasticsearch.script.expression;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
|
||||
public class ExpressionPlugin extends Plugin {
|
||||
public class ExpressionPlugin extends Plugin implements ScriptPlugin {
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(ExpressionScriptEngineService.class,
|
||||
ExpressionScriptEngineService.NAME, true));
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new ExpressionScriptEngineService(settings);
|
||||
}
|
||||
}
|
||||
|
@ -29,7 +29,6 @@ import org.apache.lucene.search.SortField;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
@ -51,7 +50,6 @@ import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.text.ParseException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@ -63,7 +61,6 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||
|
||||
public static final String NAME = "expression";
|
||||
|
||||
@Inject
|
||||
public ExpressionScriptEngineService(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
@ -122,7 +119,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||
// instead of complicating SimpleBindings (which should stay simple)
|
||||
SimpleBindings bindings = new SimpleBindings();
|
||||
ReplaceableConstValueSource specialValue = null;
|
||||
|
||||
|
||||
for (String variable : expr.variables) {
|
||||
try {
|
||||
if (variable.equals("_score")) {
|
||||
@ -191,10 +188,10 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||
}
|
||||
|
||||
IndexFieldData<?> fieldData = lookup.doc().fieldDataService().getForField(fieldType);
|
||||
|
||||
|
||||
// delegate valuesource creation based on field's type
|
||||
// there are three types of "fields" to expressions, and each one has a different "api" of variables and methods.
|
||||
|
||||
|
||||
final ValueSource valueSource;
|
||||
if (fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType) {
|
||||
// geo
|
||||
@ -203,7 +200,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||
} else {
|
||||
valueSource = GeoField.getMethod(fieldData, fieldname, methodname);
|
||||
}
|
||||
} else if (fieldType instanceof LegacyDateFieldMapper.DateFieldType ||
|
||||
} else if (fieldType instanceof LegacyDateFieldMapper.DateFieldType ||
|
||||
fieldType instanceof DateFieldMapper.DateFieldType) {
|
||||
if (dateAccessor) {
|
||||
// date object
|
||||
@ -230,7 +227,7 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||
} else {
|
||||
throw new ParseException("Field [" + fieldname + "] must be numeric, date, or geopoint", 5);
|
||||
}
|
||||
|
||||
|
||||
bindings.add(variable, valueSource);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
@ -238,11 +235,11 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||
throw convertToScriptException("link error", expr.sourceText, variable, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
final boolean needsScores = expr.getSortField(bindings, false).needsScores();
|
||||
return new ExpressionSearchScript(compiledScript, bindings, specialValue, needsScores);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* converts a ParseException at compile-time or link-time to a ScriptException
|
||||
*/
|
||||
@ -273,4 +270,9 @@ public class ExpressionScriptEngineService extends AbstractComponent implements
|
||||
public void scriptRemoved(CompiledScript script) {
|
||||
// Nothing to do
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -19,13 +19,17 @@
|
||||
|
||||
package org.elasticsearch.script.groovy;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
|
||||
public class GroovyPlugin extends Plugin {
|
||||
public class GroovyPlugin extends Plugin implements ScriptPlugin {
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(GroovyScriptEngineService.class, GroovyScriptEngineService.NAME));
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new GroovyScriptEngineService(settings);
|
||||
}
|
||||
}
|
||||
|
@ -41,7 +41,6 @@ import org.elasticsearch.bootstrap.BootstrapInfo;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.hash.MessageDigests;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.logging.ESLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.script.ClassPermission;
|
||||
@ -61,9 +60,7 @@ import java.nio.charset.StandardCharsets;
|
||||
import java.security.AccessControlContext;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@ -83,7 +80,6 @@ public class GroovyScriptEngineService extends AbstractComponent implements Scri
|
||||
|
||||
private final GroovyClassLoader loader;
|
||||
|
||||
@Inject
|
||||
public GroovyScriptEngineService(Settings settings) {
|
||||
super(settings);
|
||||
|
||||
|
@ -19,14 +19,17 @@
|
||||
|
||||
package org.elasticsearch.script.mustache;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ScriptPlugin;
|
||||
import org.elasticsearch.script.ScriptEngineRegistry;
|
||||
import org.elasticsearch.script.ScriptEngineService;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
|
||||
public class MustachePlugin extends Plugin {
|
||||
public class MustachePlugin extends Plugin implements ScriptPlugin {
|
||||
|
||||
public void onModule(ScriptModule module) {
|
||||
module.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(MustacheScriptEngineService.class,
|
||||
MustacheScriptEngineService.NAME, true));
|
||||
@Override
|
||||
public ScriptEngineService getScriptEngineService(Settings settings) {
|
||||
return new MustacheScriptEngineService(settings);
|
||||
}
|
||||
}
|
||||
|
@ -23,7 +23,6 @@ import com.github.mustachejava.Mustache;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.FastStringReader;
|
||||
import org.elasticsearch.common.io.UTF8StreamWriter;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
@ -40,7 +39,6 @@ import java.lang.ref.SoftReference;
|
||||
import java.security.AccessController;
|
||||
import java.security.PrivilegedAction;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
@ -77,7 +75,6 @@ public final class MustacheScriptEngineService extends AbstractComponent impleme
|
||||
/**
|
||||
* @param settings automatically wired by Guice.
|
||||
* */
|
||||
@Inject
|
||||
public MustacheScriptEngineService(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
@ -190,4 +187,9 @@ public final class MustacheScriptEngineService extends AbstractComponent impleme
|
||||
return result.bytes();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isInlineScriptEnabled() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
import org.elasticsearch.common.inject.multibindings.Multibinder;
|
||||
import org.elasticsearch.common.inject.util.Providers;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.SettingsModule;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
@ -67,13 +68,13 @@ import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.IndexSettingsModule;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPoolModule;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Proxy;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
@ -102,17 +103,15 @@ public class TemplateQueryParserTests extends ESTestCase {
|
||||
});
|
||||
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", settings);
|
||||
Index index = idxSettings.getIndex();
|
||||
SettingsModule settingsModule = new SettingsModule(settings);
|
||||
ScriptModule scriptModule = new ScriptModule();
|
||||
scriptModule.prepareSettings(settingsModule);
|
||||
// TODO: make this use a mock engine instead of mustache and it will no longer be messy!
|
||||
scriptModule.addScriptEngine(new ScriptEngineRegistry.ScriptEngineRegistration(MustacheScriptEngineService.class, MustacheScriptEngineService.NAME, true));
|
||||
settingsModule.registerSetting(InternalSettingsPlugin.VERSION_CREATED);
|
||||
ScriptModule scriptModule = new ScriptModule(new MustacheScriptEngineService(settings));
|
||||
List<Setting<?>> scriptSettings = scriptModule.getSettings();
|
||||
scriptSettings.add(InternalSettingsPlugin.VERSION_CREATED);
|
||||
SettingsModule settingsModule = new SettingsModule(settings, scriptSettings, Collections.emptyList());
|
||||
final ThreadPool threadPool = new ThreadPool(settings);
|
||||
injector = new ModulesBuilder().add(
|
||||
new EnvironmentModule(new Environment(settings)),
|
||||
new EnvironmentModule(new Environment(settings), threadPool),
|
||||
settingsModule,
|
||||
new ThreadPoolModule(threadPool),
|
||||
new SearchModule(settings, new NamedWriteableRegistry()) {
|
||||
@Override
|
||||
protected void configureSearch() {
|
||||
|
@ -80,6 +80,8 @@ COND: '?';
|
||||
COLON: ':';
|
||||
REF: '::';
|
||||
ARROW: '->';
|
||||
FIND: '=~';
|
||||
MATCH: '==~';
|
||||
INCR: '++';
|
||||
DECR: '--';
|
||||
|
||||
|
@ -49,33 +49,35 @@ COND=48
|
||||
COLON=49
|
||||
REF=50
|
||||
ARROW=51
|
||||
INCR=52
|
||||
DECR=53
|
||||
ASSIGN=54
|
||||
AADD=55
|
||||
ASUB=56
|
||||
AMUL=57
|
||||
ADIV=58
|
||||
AREM=59
|
||||
AAND=60
|
||||
AXOR=61
|
||||
AOR=62
|
||||
ALSH=63
|
||||
ARSH=64
|
||||
AUSH=65
|
||||
OCTAL=66
|
||||
HEX=67
|
||||
INTEGER=68
|
||||
DECIMAL=69
|
||||
STRING=70
|
||||
REGEX=71
|
||||
TRUE=72
|
||||
FALSE=73
|
||||
NULL=74
|
||||
TYPE=75
|
||||
ID=76
|
||||
DOTINTEGER=77
|
||||
DOTID=78
|
||||
FIND=52
|
||||
MATCH=53
|
||||
INCR=54
|
||||
DECR=55
|
||||
ASSIGN=56
|
||||
AADD=57
|
||||
ASUB=58
|
||||
AMUL=59
|
||||
ADIV=60
|
||||
AREM=61
|
||||
AAND=62
|
||||
AXOR=63
|
||||
AOR=64
|
||||
ALSH=65
|
||||
ARSH=66
|
||||
AUSH=67
|
||||
OCTAL=68
|
||||
HEX=69
|
||||
INTEGER=70
|
||||
DECIMAL=71
|
||||
STRING=72
|
||||
REGEX=73
|
||||
TRUE=74
|
||||
FALSE=75
|
||||
NULL=76
|
||||
TYPE=77
|
||||
ID=78
|
||||
DOTINTEGER=79
|
||||
DOTID=80
|
||||
'{'=3
|
||||
'}'=4
|
||||
'['=5
|
||||
@ -125,20 +127,22 @@ DOTID=78
|
||||
':'=49
|
||||
'::'=50
|
||||
'->'=51
|
||||
'++'=52
|
||||
'--'=53
|
||||
'='=54
|
||||
'+='=55
|
||||
'-='=56
|
||||
'*='=57
|
||||
'/='=58
|
||||
'%='=59
|
||||
'&='=60
|
||||
'^='=61
|
||||
'|='=62
|
||||
'<<='=63
|
||||
'>>='=64
|
||||
'>>>='=65
|
||||
'true'=72
|
||||
'false'=73
|
||||
'null'=74
|
||||
'=~'=52
|
||||
'==~'=53
|
||||
'++'=54
|
||||
'--'=55
|
||||
'='=56
|
||||
'+='=57
|
||||
'-='=58
|
||||
'*='=59
|
||||
'/='=60
|
||||
'%='=61
|
||||
'&='=62
|
||||
'^='=63
|
||||
'|='=64
|
||||
'<<='=65
|
||||
'>>='=66
|
||||
'>>>='=67
|
||||
'true'=74
|
||||
'false'=75
|
||||
'null'=76
|
||||
|
@ -113,6 +113,7 @@ expression returns [boolean s = true]
|
||||
: u = unary[false] { $s = $u.s; } # single
|
||||
| expression ( MUL | DIV | REM ) expression { $s = false; } # binary
|
||||
| expression ( ADD | SUB ) expression { $s = false; } # binary
|
||||
| expression ( FIND | MATCH ) expression { $s = false; } # binary
|
||||
| expression ( LSH | RSH | USH ) expression { $s = false; } # binary
|
||||
| expression ( LT | LTE | GT | GTE ) expression { $s = false; } # comp
|
||||
| expression ( EQ | EQR | NE | NER ) expression { $s = false; } # comp
|
||||
|
@ -49,33 +49,35 @@ COND=48
|
||||
COLON=49
|
||||
REF=50
|
||||
ARROW=51
|
||||
INCR=52
|
||||
DECR=53
|
||||
ASSIGN=54
|
||||
AADD=55
|
||||
ASUB=56
|
||||
AMUL=57
|
||||
ADIV=58
|
||||
AREM=59
|
||||
AAND=60
|
||||
AXOR=61
|
||||
AOR=62
|
||||
ALSH=63
|
||||
ARSH=64
|
||||
AUSH=65
|
||||
OCTAL=66
|
||||
HEX=67
|
||||
INTEGER=68
|
||||
DECIMAL=69
|
||||
STRING=70
|
||||
REGEX=71
|
||||
TRUE=72
|
||||
FALSE=73
|
||||
NULL=74
|
||||
TYPE=75
|
||||
ID=76
|
||||
DOTINTEGER=77
|
||||
DOTID=78
|
||||
FIND=52
|
||||
MATCH=53
|
||||
INCR=54
|
||||
DECR=55
|
||||
ASSIGN=56
|
||||
AADD=57
|
||||
ASUB=58
|
||||
AMUL=59
|
||||
ADIV=60
|
||||
AREM=61
|
||||
AAND=62
|
||||
AXOR=63
|
||||
AOR=64
|
||||
ALSH=65
|
||||
ARSH=66
|
||||
AUSH=67
|
||||
OCTAL=68
|
||||
HEX=69
|
||||
INTEGER=70
|
||||
DECIMAL=71
|
||||
STRING=72
|
||||
REGEX=73
|
||||
TRUE=74
|
||||
FALSE=75
|
||||
NULL=76
|
||||
TYPE=77
|
||||
ID=78
|
||||
DOTINTEGER=79
|
||||
DOTID=80
|
||||
'{'=3
|
||||
'}'=4
|
||||
'['=5
|
||||
@ -125,20 +127,22 @@ DOTID=78
|
||||
':'=49
|
||||
'::'=50
|
||||
'->'=51
|
||||
'++'=52
|
||||
'--'=53
|
||||
'='=54
|
||||
'+='=55
|
||||
'-='=56
|
||||
'*='=57
|
||||
'/='=58
|
||||
'%='=59
|
||||
'&='=60
|
||||
'^='=61
|
||||
'|='=62
|
||||
'<<='=63
|
||||
'>>='=64
|
||||
'>>>='=65
|
||||
'true'=72
|
||||
'false'=73
|
||||
'null'=74
|
||||
'=~'=52
|
||||
'==~'=53
|
||||
'++'=54
|
||||
'--'=55
|
||||
'='=56
|
||||
'+='=57
|
||||
'-='=58
|
||||
'*='=59
|
||||
'/='=60
|
||||
'%='=61
|
||||
'&='=62
|
||||
'^='=63
|
||||
'|='=64
|
||||
'<<='=65
|
||||
'>>='=66
|
||||
'>>>='=67
|
||||
'true'=74
|
||||
'false'=75
|
||||
'null'=76
|
||||
|
@ -215,25 +215,27 @@ public final class Def {
|
||||
* @param callSiteType callsite's type
|
||||
* @param receiverClass Class of the object to invoke the method on.
|
||||
* @param name Name of the method.
|
||||
* @param args args passed to callsite
|
||||
* @param recipe bitset marking functional parameters
|
||||
* @param args bootstrap args passed to callsite
|
||||
* @return pointer to matching method to invoke. never returns null.
|
||||
* @throws IllegalArgumentException if no matching whitelisted method was found.
|
||||
* @throws Throwable if a method reference cannot be converted to an functional interface
|
||||
*/
|
||||
static MethodHandle lookupMethod(Lookup lookup, MethodType callSiteType,
|
||||
Class<?> receiverClass, String name, Object args[], long recipe) throws Throwable {
|
||||
Class<?> receiverClass, String name, Object args[]) throws Throwable {
|
||||
long recipe = (Long) args[0];
|
||||
int numArguments = callSiteType.parameterCount();
|
||||
// simple case: no lambdas
|
||||
if (recipe == 0) {
|
||||
return lookupMethodInternal(receiverClass, name, args.length - 1).handle;
|
||||
return lookupMethodInternal(receiverClass, name, numArguments - 1).handle;
|
||||
}
|
||||
|
||||
// otherwise: first we have to compute the "real" arity. This is because we have extra arguments:
|
||||
// e.g. f(a, g(x), b, h(y), i()) looks like f(a, g, x, b, h, y, i).
|
||||
int arity = args.length - 1;
|
||||
for (int i = 0; i < args.length; i++) {
|
||||
int arity = callSiteType.parameterCount() - 1;
|
||||
int upTo = 1;
|
||||
for (int i = 0; i < numArguments; i++) {
|
||||
if ((recipe & (1L << (i - 1))) != 0) {
|
||||
String signature = (String) args[i];
|
||||
String signature = (String) args[upTo++];
|
||||
int numCaptures = Integer.parseInt(signature.substring(signature.indexOf(',')+1));
|
||||
arity -= numCaptures;
|
||||
}
|
||||
@ -245,11 +247,12 @@ public final class Def {
|
||||
MethodHandle handle = method.handle;
|
||||
|
||||
int replaced = 0;
|
||||
for (int i = 1; i < args.length; i++) {
|
||||
upTo = 1;
|
||||
for (int i = 1; i < numArguments; i++) {
|
||||
// its a functional reference, replace the argument with an impl
|
||||
if ((recipe & (1L << (i - 1))) != 0) {
|
||||
// decode signature of form 'type.call,2'
|
||||
String signature = (String) args[i];
|
||||
String signature = (String) args[upTo++];
|
||||
int separator = signature.indexOf('.');
|
||||
int separator2 = signature.indexOf(',');
|
||||
String type = signature.substring(1, separator);
|
||||
|
@ -125,10 +125,10 @@ public final class DefBootstrap {
|
||||
/**
|
||||
* Does a slow lookup against the whitelist.
|
||||
*/
|
||||
private MethodHandle lookup(int flavor, String name, Class<?> receiver, Object[] callArgs) throws Throwable {
|
||||
private MethodHandle lookup(int flavor, String name, Class<?> receiver) throws Throwable {
|
||||
switch(flavor) {
|
||||
case METHOD_CALL:
|
||||
return Def.lookupMethod(lookup, type(), receiver, name, callArgs, (Long) this.args[0]);
|
||||
return Def.lookupMethod(lookup, type(), receiver, name, args);
|
||||
case LOAD:
|
||||
return Def.lookupGetter(receiver, name);
|
||||
case STORE:
|
||||
@ -140,7 +140,7 @@ public final class DefBootstrap {
|
||||
case ITERATOR:
|
||||
return Def.lookupIterator(receiver);
|
||||
case REFERENCE:
|
||||
return Def.lookupReference(lookup, (String) this.args[0], receiver, name);
|
||||
return Def.lookupReference(lookup, (String) args[0], receiver, name);
|
||||
default: throw new AssertionError();
|
||||
}
|
||||
}
|
||||
@ -148,27 +148,23 @@ public final class DefBootstrap {
|
||||
/**
|
||||
* Creates the {@link MethodHandle} for the megamorphic call site
|
||||
* using {@link ClassValue} and {@link MethodHandles#exactInvoker(MethodType)}:
|
||||
* <p>
|
||||
* TODO: Remove the variable args and just use {@code type()}!
|
||||
*/
|
||||
private MethodHandle createMegamorphicHandle(final Object[] callArgs) throws Throwable {
|
||||
private MethodHandle createMegamorphicHandle() {
|
||||
final MethodType type = type();
|
||||
final ClassValue<MethodHandle> megamorphicCache = new ClassValue<MethodHandle>() {
|
||||
@Override
|
||||
protected MethodHandle computeValue(Class<?> receiverType) {
|
||||
// it's too stupid that we cannot throw checked exceptions... (use rethrow puzzler):
|
||||
try {
|
||||
return lookup(flavor, name, receiverType, callArgs).asType(type);
|
||||
return lookup(flavor, name, receiverType).asType(type);
|
||||
} catch (Throwable t) {
|
||||
Def.rethrow(t);
|
||||
throw new AssertionError();
|
||||
}
|
||||
}
|
||||
};
|
||||
MethodHandle cacheLookup = MEGAMORPHIC_LOOKUP.bindTo(megamorphicCache);
|
||||
cacheLookup = MethodHandles.dropArguments(cacheLookup,
|
||||
1, type.parameterList().subList(1, type.parameterCount()));
|
||||
return MethodHandles.foldArguments(MethodHandles.exactInvoker(type), cacheLookup);
|
||||
return MethodHandles.foldArguments(MethodHandles.exactInvoker(type),
|
||||
MEGAMORPHIC_LOOKUP.bindTo(megamorphicCache));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -180,13 +176,13 @@ public final class DefBootstrap {
|
||||
Object fallback(final Object[] callArgs) throws Throwable {
|
||||
if (depth >= MAX_DEPTH) {
|
||||
// we revert the whole cache and build a new megamorphic one
|
||||
final MethodHandle target = this.createMegamorphicHandle(callArgs);
|
||||
final MethodHandle target = this.createMegamorphicHandle();
|
||||
|
||||
setTarget(target);
|
||||
return target.invokeWithArguments(callArgs);
|
||||
} else {
|
||||
final Class<?> receiver = callArgs[0].getClass();
|
||||
final MethodHandle target = lookup(flavor, name, receiver, callArgs).asType(type());
|
||||
final MethodHandle target = lookup(flavor, name, receiver).asType(type());
|
||||
|
||||
MethodHandle test = CHECK_CLASS.bindTo(receiver);
|
||||
MethodHandle guard = MethodHandles.guardWithTest(test, target, getTarget());
|
||||
@ -270,7 +266,7 @@ public final class DefBootstrap {
|
||||
}
|
||||
}
|
||||
|
||||
private MethodHandle lookupGeneric() throws Throwable {
|
||||
private MethodHandle lookupGeneric() {
|
||||
if ((flags & OPERATOR_COMPOUND_ASSIGNMENT) != 0) {
|
||||
return DefMath.lookupGenericWithCast(name);
|
||||
} else {
|
||||
@ -279,8 +275,8 @@ public final class DefBootstrap {
|
||||
}
|
||||
|
||||
/**
|
||||
* Called when a new type is encountered (or, when we have encountered more than {@code MAX_DEPTH}
|
||||
* types at this call site and given up on caching).
|
||||
* Called when a new type is encountered or if cached type does not match.
|
||||
* In that case we revert to a generic, but slower operator handling.
|
||||
*/
|
||||
@SuppressForbidden(reason = "slow path")
|
||||
Object fallback(Object[] args) throws Throwable {
|
||||
@ -398,16 +394,20 @@ public final class DefBootstrap {
|
||||
switch(flavor) {
|
||||
// "function-call" like things get a polymorphic cache
|
||||
case METHOD_CALL:
|
||||
if (args.length != 1) {
|
||||
if (args.length == 0) {
|
||||
throw new BootstrapMethodError("Invalid number of parameters for method call");
|
||||
}
|
||||
if (args[0] instanceof Long == false) {
|
||||
throw new BootstrapMethodError("Illegal parameter for method call: " + args[0]);
|
||||
}
|
||||
long recipe = (Long) args[0];
|
||||
if (Long.bitCount(recipe) > type.parameterCount()) {
|
||||
int numLambdas = Long.bitCount(recipe);
|
||||
if (numLambdas > type.parameterCount()) {
|
||||
throw new BootstrapMethodError("Illegal recipe for method call: too many bits");
|
||||
}
|
||||
if (args.length != numLambdas + 1) {
|
||||
throw new BootstrapMethodError("Illegal number of parameters: expected " + numLambdas + " references");
|
||||
}
|
||||
return new PIC(lookup, name, type, flavor, args);
|
||||
case LOAD:
|
||||
case STORE:
|
||||
|
@ -87,6 +87,8 @@ public final class Definition {
|
||||
public static final Type DEF_TYPE = getType("def");
|
||||
public static final Type STRING_TYPE = getType("String");
|
||||
public static final Type EXCEPTION_TYPE = getType("Exception");
|
||||
public static final Type PATTERN_TYPE = getType("Pattern");
|
||||
public static final Type MATCHER_TYPE = getType("Matcher");
|
||||
|
||||
public enum Sort {
|
||||
VOID( void.class , 0 , true , false , false , false ),
|
||||
@ -772,7 +774,7 @@ public final class Definition {
|
||||
|
||||
if (owner.staticMethods.containsKey(methodKey) || owner.methods.containsKey(methodKey)) {
|
||||
throw new IllegalArgumentException(
|
||||
"Duplicate method signature [" + methodKey + "] found within the struct [" + owner.name + "].");
|
||||
"Duplicate method signature [" + methodKey + "] found within the struct [" + owner.name + "].");
|
||||
}
|
||||
|
||||
final Class<?>[] classes = new Class<?>[args.length];
|
||||
|
@ -98,7 +98,7 @@ public class FunctionRef {
|
||||
implMethod = impl.handle;
|
||||
|
||||
// remove any prepended captured arguments for the 'natural' signature.
|
||||
samMethodType = impl.getMethodType().dropParameterTypes(0, captures.length);
|
||||
samMethodType = adapt(interfaceMethodType, impl.getMethodType().dropParameterTypes(0, captures.length));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -119,7 +119,7 @@ public class FunctionRef {
|
||||
implMethodASM = null;
|
||||
|
||||
// remove any prepended captured arguments for the 'natural' signature.
|
||||
samMethodType = impl.type().dropParameterTypes(0, captures.length);
|
||||
samMethodType = adapt(interfaceMethodType, impl.type().dropParameterTypes(0, captures.length));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -171,4 +171,15 @@ public class FunctionRef {
|
||||
// either way, stuff will fail if its wrong :)
|
||||
return interfaceMethodType.equals(samMethodType) == false;
|
||||
}
|
||||
|
||||
/**
|
||||
* If the interface expects a primitive type to be returned, we can't return Object,
|
||||
* But we can set SAM to the wrapper version, and a cast will take place
|
||||
*/
|
||||
private static MethodType adapt(MethodType expected, MethodType actual) {
|
||||
if (expected.returnType().isPrimitive() && actual.returnType() == Object.class) {
|
||||
actual = actual.changeReturnType(MethodType.methodType(expected.returnType()).wrap().returnType());
|
||||
}
|
||||
return actual;
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user